Skip to content

Commit f710c3b

Browse files
committed
Add fine-tuned whisper on aishell.
See also k2-fsa/icefall#1466
1 parent 0b18ccf commit f710c3b

File tree

2 files changed

+26
-4
lines changed

2 files changed

+26
-4
lines changed

.github/workflows/export-whisper-to-onnx.yaml

+10-3
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,9 @@ jobs:
1515
strategy:
1616
fail-fast: false
1717
matrix:
18-
os: [macos-latest]
18+
os: [ubunut-latest]
1919
# model: ["distil-medium.en", "distil-small.en", "tiny.en", "base.en", "small.en", "medium.en", "tiny", "base", "small", "medium", "large", "large-v1", "large-v2", "distil-large-v2"]
20-
model: ["distil-medium.en", "distil-small.en", "tiny.en", "base.en", "small.en", "medium.en", "tiny", "base", "small", "medium"]
20+
model: ["distil-medium.en", "distil-small.en", "tiny.en", "base.en", "small.en", "medium.en", "tiny", "base", "small", "medium", "medium-aishell"]
2121
python-version: ["3.8"]
2222

2323
steps:
@@ -49,6 +49,9 @@ jobs:
4949
elif [[ $model == distil-small.en ]]; then
5050
wget -q -O distil-small-en-original-model.bin https://huggingface.co/distil-whisper/distil-small.en/resolve/main/original-model.bin
5151
ls -lh
52+
elif [[ $model == medium-aishell ]]; then
53+
wget -q -O medium-aishell.pt https://huggingface.co/yuekai/icefall_asr_aishell_whisper/resolve/main/exp_medium/epoch-10-avg-4.pt
54+
ls -lh
5255
fi
5356
python3 ./export-onnx.py --model ${{ matrix.model }}
5457
# python3 -m onnxruntime.tools.convert_onnx_models_to_ort --optimization_style=Fixed ./
@@ -59,6 +62,7 @@ jobs:
5962
ls -lh distil*original-model.bin || true
6063
rm -rf ~/.cache/whisper
6164
rm -f distil*original-model.bin
65+
rm -f medium-aishell.pt
6266
6367
src=sherpa-onnx-whisper-${{ matrix.model }}
6468
@@ -132,7 +136,10 @@ jobs:
132136
git config --global user.name "Fangjun Kuang"
133137
134138
GIT_LFS_SKIP_SMUDGE=1 git clone https://huggingface.co/csukuangfj/sherpa-onnx-whisper-${{ matrix.model }} huggingface
135-
rm -rf huggingface/*
139+
140+
if [[ $model != medium-aishell ]]; then
141+
rm -rf huggingface/*
142+
fi
136143
137144
if [[ $model == large || $model == large-v1 || $model == large-v2 || $model == distil-large-v2 ]]; then
138145
mv $src.tar* ./huggingface

scripts/whisper/export-onnx.py

+16-1
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,9 @@ def get_args():
4444
"tiny", "tiny.en", "base", "base.en",
4545
"small", "small.en", "medium", "medium.en",
4646
"large", "large-v1", "large-v2",
47-
"distil-medium.en", "distil-small.en", "distil-large-v2"
47+
"distil-medium.en", "distil-small.en", "distil-large-v2",
48+
# for fine-tuned models from icefall
49+
"medium-aishell",
4850
],
4951
# fmt: on
5052
)
@@ -340,6 +342,19 @@ def main():
340342
"""
341343
)
342344
model = whisper.load_model(filename)
345+
elif name == "medium-aishell":
346+
filename = "./medium-aishell.pt"
347+
if not Path(filename).is_file():
348+
raise ValueError(
349+
"""
350+
Please go to https://huggingface.co/yuekai/icefall_asr_aishell_whisper/tree/main/exp_medium
351+
to download epoch-10-avg-4.pt
352+
You can use the following command to do that:
353+
354+
wget -O medium-aishell.pt https://huggingface.co/yuekai/icefall_asr_aishell_whisper/resolve/main/exp_medium/epoch-10-avg-4.pt
355+
"""
356+
)
357+
model = whisper.load_model(filename)
343358
else:
344359
model = whisper.load_model(name)
345360
print(model.dims)

0 commit comments

Comments
 (0)