Hugging Face
Models
Datasets
Spaces
Community
Docs
Enterprise
Pricing
Log In
Sign Up
Spaces:
Duplicated from
k2-fsa/automatic-speech-recognition
hynt
/
k2-automatic-speech-recognition-demo
like
2
Running
App
Files
Files
Community
Fetching metadata from the HF Docker repository...
main
k2-automatic-speech-recognition-demo
253 MB
2 contributors
History:
59 commits
hynt
Update model.py
56596c3
verified
17 days ago
test_wavs
test waves
7 months ago
.gitattributes
Safe
1.26 kB
first commit
7 months ago
README.md
Safe
292 Bytes
fixes
7 months ago
app.py
Safe
12.8 kB
Update app.py
18 days ago
config.json
Safe
23.2 kB
Rename tokens.txt to config.json
17 days ago
decode.py
Safe
3.56 kB
first commit
7 months ago
decoder-epoch-20-avg-10.int8.onnx
Safe
1.31 MB
xet
Upload 8 files
19 days ago
decoder-epoch-20-avg-10.onnx
Safe
5.17 MB
xet
Upload 8 files
19 days ago
encoder-epoch-20-avg-10.int8.onnx
Safe
27.7 MB
xet
Upload 8 files
19 days ago
encoder-epoch-20-avg-10.onnx
Safe
92.2 MB
xet
Upload 8 files
19 days ago
examples.py
Safe
15.8 kB
first commit
7 months ago
jit_script.pt
pickle
Detected Pickle imports (172)
"__torch__.torch.nn.modules.linear.___torch_mangle_26.Linear"
,
"__torch__.torch.nn.modules.linear.Identity"
,
"__torch__.subsampling.Conv2dSubsampling"
,
"__torch__.zipformer.___torch_mangle_6.BypassModule"
,
"__torch__.torch.nn.modules.container.___torch_mangle_51.ModuleList"
,
"__torch__.torch.nn.modules.conv.___torch_mangle_1.Conv2d"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_21.Linear"
,
"__torch__.zipformer.SimpleUpsample"
,
"__torch__.zipformer.SelfAttention"
,
"torch.FloatStorage"
,
"__torch__.zipformer.___torch_mangle_31.NonlinAttention"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_24.Linear"
,
"__torch__.zipformer.___torch_mangle_29.FeedforwardModule"
,
"__torch__.torch.nn.modules.conv.Conv1d"
,
"__torch__.zipformer.___torch_mangle_13.FeedforwardModule"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_14.Linear"
,
"__torch__.zipformer.___torch_mangle_52.Zipformer2Encoder"
,
"__torch__.zipformer.Zipformer2"
,
"__torch__.zipformer.___torch_mangle_23.SelfAttention"
,
"__torch__.zipformer.RelPositionMultiheadAttentionWeights"
,
"__torch__.zipformer.NonlinAttention"
,
"__torch__.torch.nn.modules.container.Sequential"
,
"__torch__.scaling.Dropout2"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_30.Linear"
,
"__torch__.scaling.___torch_mangle_5.ScheduledFloat"
,
"__torch__.torch.nn.modules.linear.Linear"
,
"__torch__.zipformer.___torch_mangle_46.RelPositionMultiheadAttentionWeights"
,
"__torch__.scaling.SwooshR"
,
"__torch__.scaling.Identity"
,
"__torch__.scaling.BiasNorm"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_58.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_8.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_19.Linear"
,
"__torch__.zipformer.___torch_mangle_42.Zipformer2Encoder"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_45.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_12.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_22.Linear"
,
"__torch__.scaling.ScheduledFloat"
,
"__torch__.zipformer.___torch_mangle_40.Zipformer2EncoderLayer"
,
"__torch__.scaling.ActivationDropoutAndLinear"
,
"__torch__.torch.nn.modules.conv.Conv2d"
,
"__torch__.torch.nn.modules.conv.___torch_mangle_38.Conv1d"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_9.Linear"
,
"__torch__.torch.nn.modules.container.___torch_mangle_41.ModuleList"
,
"__torch__.zipformer.___torch_mangle_37.Zipformer2Encoder"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_10.Linear"
,
"__torch__.zipformer.___torch_mangle_25.FeedforwardModule"
,
"__torch__.torch.nn.modules.container.___torch_mangle_36.ModuleList"
,
"__torch__.zipformer.___torch_mangle_35.Zipformer2EncoderLayer"
,
"torch._utils._rebuild_tensor_v2"
,
"__torch__.zipformer.Zipformer2Encoder"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_48.Linear"
,
"__torch__.model.AsrModel"
,
"__torch__.torch.nn.modules.conv.___torch_mangle_2.Conv2d"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_32.Linear"
,
"__torch__.torch.nn.modules.conv.___torch_mangle_0.Conv2d"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_16.Linear"
,
"__torch__.torch.nn.modules.container.___torch_mangle_54.ModuleList"
,
"__torch__.joiner.Joiner"
,
"__torch__.zipformer.___torch_mangle_43.DownsampledZipformer2Encoder"
,
"__torch__.torch.nn.modules.conv.___torch_mangle_4.Conv2d"
,
"torch.jit._pickle.build_intlist"
,
"__torch__.EncoderModel"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_44.Linear"
,
"__torch__.torch.nn.modules.sparse.Embedding"
,
"__torch__.torch.nn.modules.conv.___torch_mangle_55.Conv1d"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_11.Linear"
,
"collections.OrderedDict"
,
"__torch__.torch.nn.modules.activation.Tanh"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_47.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_28.Linear"
,
"__torch__.torch.nn.modules.activation.Sigmoid"
,
"__torch__.zipformer.CompactRelPositionalEncoding"
,
"__torch__.zipformer.___torch_mangle_20.RelPositionMultiheadAttentionWeights"
,
"__torch__.zipformer.___torch_mangle_53.DownsampledZipformer2Encoder"
,
"__torch__.zipformer.___torch_mangle_39.ConvolutionModule"
,
"__torch__.torch.nn.modules.container.ModuleList"
,
"__torch__.zipformer.SimpleDownsample"
,
"__torch__.zipformer.ConvolutionModule"
,
"__torch__.zipformer.___torch_mangle_27.FeedforwardModule"
,
"__torch__.zipformer.___torch_mangle_49.SelfAttention"
,
"__torch__.zipformer.DownsampledZipformer2Encoder"
,
"__torch__.subsampling.ConvNeXt"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_7.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_57.Linear"
,
"__torch__.zipformer.___torch_mangle_34.ConvolutionModule"
,
"__torch__.zipformer.___torch_mangle_50.Zipformer2EncoderLayer"
,
"__torch__.zipformer.___torch_mangle_15.FeedforwardModule"
,
"__torch__.decoder.Decoder"
,
"__torch__.scaling.___torch_mangle_18.ActivationDropoutAndLinear"
,
"__torch__.scaling.SwooshL"
,
"__torch__.torch.nn.modules.conv.___torch_mangle_3.Conv2d"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_56.Linear"
,
"__torch__.zipformer.FeedforwardModule"
,
"__torch__.zipformer.Zipformer2EncoderLayer"
,
"__torch__.torch.nn.modules.conv.___torch_mangle_33.Conv1d"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_17.Linear"
,
"__torch__.zipformer.BypassModule"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
,
"torch.jit._pickle.build_intlist"
How to fix it?
107 MB
xet
Upload 8 files
19 days ago
joiner-epoch-20-avg-10.int8.onnx
Safe
1.03 MB
xet
Upload 8 files
19 days ago
joiner-epoch-20-avg-10.onnx
Safe
4.1 MB
xet
Upload 8 files
19 days ago
model.py
Safe
70 kB
Update model.py
17 days ago
requirements.txt
Safe
965 Bytes
update sherpa-onnx
4 months ago