|
15 | 15 | strategy:
|
16 | 16 | fail-fast: false
|
17 | 17 | matrix:
|
18 |
| - os: [macos-latest] |
| 18 | + os: [ubuntu-latest] |
19 | 19 | # model: ["distil-medium.en", "distil-small.en", "tiny.en", "base.en", "small.en", "medium.en", "tiny", "base", "small", "medium", "large", "large-v1", "large-v2", "distil-large-v2"]
|
20 |
| - model: ["distil-medium.en", "distil-small.en", "tiny.en", "base.en", "small.en", "medium.en", "tiny", "base", "small", "medium"] |
| 20 | + model: ["distil-medium.en", "distil-small.en", "tiny.en", "base.en", "small.en", "medium.en", "tiny", "base", "small", "medium", "medium-aishell"] |
21 | 21 | python-version: ["3.8"]
|
22 | 22 |
|
23 | 23 | steps:
|
@@ -49,16 +49,27 @@ jobs:
|
49 | 49 | elif [[ $model == distil-small.en ]]; then
|
50 | 50 | wget -q -O distil-small-en-original-model.bin https://huggingface.co/distil-whisper/distil-small.en/resolve/main/original-model.bin
|
51 | 51 | ls -lh
|
| 52 | + elif [[ $model == medium-aishell ]]; then |
| 53 | + wget -q -O medium-aishell.pt https://huggingface.co/yuekai/icefall_asr_aishell_whisper/resolve/main/exp_medium/whisper-medium-aishell1-epoch-10-avg-4.pt |
| 54 | + ls -lh |
52 | 55 | fi
|
53 | 56 | python3 ./export-onnx.py --model ${{ matrix.model }}
|
54 | 57 | # python3 -m onnxruntime.tools.convert_onnx_models_to_ort --optimization_style=Fixed ./
|
| 58 | + # |
| 59 | + if [[ $model == medium-aishell ]]; then |
| 60 | + ls -lh *.onnx |
| 61 | + rm -fv medium-aishell-encoder.onnx |
| 62 | + rm -fv medium-aishell-decoder.onnx |
| 63 | + fi |
| 64 | +
|
55 | 65 |
|
56 | 66 | ls -lh
|
57 | 67 |
|
58 | 68 | ls -lh ~/.cache/whisper || true
|
59 | 69 | ls -lh distil*original-model.bin || true
|
60 | 70 | rm -rf ~/.cache/whisper
|
61 | 71 | rm -f distil*original-model.bin
|
| 72 | + rm -f medium-aishell.pt |
62 | 73 |
|
63 | 74 | src=sherpa-onnx-whisper-${{ matrix.model }}
|
64 | 75 |
|
@@ -132,7 +143,10 @@ jobs:
|
132 | 143 | git config --global user.name "Fangjun Kuang"
|
133 | 144 |
|
134 | 145 | GIT_LFS_SKIP_SMUDGE=1 git clone https://huggingface.co/csukuangfj/sherpa-onnx-whisper-${{ matrix.model }} huggingface
|
135 |
| - rm -rf huggingface/* |
| 146 | +
|
| 147 | + if [[ $model != medium-aishell ]]; then |
| 148 | + rm -rf huggingface/* |
| 149 | + fi |
136 | 150 |
|
137 | 151 | if [[ $model == large || $model == large-v1 || $model == large-v2 || $model == distil-large-v2 ]]; then
|
138 | 152 | mv $src.tar* ./huggingface
|
|
0 commit comments