test with run.sh

pull/2034/head
Hui Zhang 3 years ago
parent 1c9eb44558
commit 5a4e35b543

@ -1,2 +1,3 @@
data data
log log
exp

@ -1,7 +1,27 @@
# DeepSpeech2 ONNX model # DeepSpeech2 ONNX model
1. convert ds2 model to ONNX, using Paddle2ONNX. 1. convert deepspeech2 model to ONNX, using Paddle2ONNX.
2. check paddleinference and onnxruntime output equal. 2. check paddleinference and onnxruntime output equal.
3. optimize onnx model
4. check paddleinference and optimized onnxruntime output equal.
Please make sure [Paddle2ONNX](https://github.com/PaddlePaddle/Paddle2ONNX) and [onnx-simplifier](https://github.com/zh794390558/onnx-simplifier/tree/dyn_time_shape) version is correct.
The example test with these packages installed:
```
paddle2onnx 0.9.8rc0 # develop af4354b4e9a61a93be6490640059a02a4499bc7a
paddleaudio 0.2.1
paddlefsl 1.1.0
paddlenlp 2.2.6
paddlepaddle-gpu 2.2.2
paddlespeech 0.0.0 # develop
paddlespeech-ctcdecoders 0.2.0
paddlespeech-feat 0.1.0
onnx 1.11.0
onnx-simplifier 0.0.0 # https://github.com/zh794390558/onnx-simplifier/tree/dyn_time_shape
onnxoptimizer 0.2.7
onnxruntime 1.11.0
```
## Using ## Using
@ -9,8 +29,9 @@
bash run.sh bash run.sh
``` ```
For more details please see `run.sh`.
## Outputs ## Outputs
∂∂∂ The optimized onnx model is `exp/model.opt.onnx`.
```
ls exp/model.onnx∂∂∂ To show the graph, please using `local/netron.sh`.
```

@ -17,4 +17,4 @@ input_shape=$3
check_n=3 check_n=3
onnxsim $in $2 $check_n --dynamic-input-shape --input-shape $input_shape onnxsim $in $out $check_n --dynamic-input-shape --input-shape $input_shape

@ -17,11 +17,18 @@ mkdir -p $data $exp
if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ];then if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ];then
test -f $data/asr0_deepspeech2_online_wenetspeech_ckpt_1.0.0a.model.tar.gz || wget -c https://paddlespeech.bj.bcebos.com/s2t/wenetspeech/asr0/asr0_deepspeech2_online_wenetspeech_ckpt_1.0.0a.model.tar.gz -P $data test -f $data/asr0_deepspeech2_online_wenetspeech_ckpt_1.0.0a.model.tar.gz || wget -c https://paddlespeech.bj.bcebos.com/s2t/wenetspeech/asr0/asr0_deepspeech2_online_wenetspeech_ckpt_1.0.0a.model.tar.gz -P $data
# wenetspeech ds2 model
pushd $data pushd $data
tar zxvf asr0_deepspeech2_online_wenetspeech_ckpt_1.0.0a.model.tar.gz tar zxvf asr0_deepspeech2_online_wenetspeech_ckpt_1.0.0a.model.tar.gz
popd popd
fi
# ds2 model demo inputs
pushd $exp
wget -c http://paddlespeech.bj.bcebos.com/speechx/examples/ds2_ol/onnx/static_ds2online_inputs.pickle
popd
fi
dir=$data/exp/deepspeech2_online/checkpoints dir=$data/exp/deepspeech2_online/checkpoints
model=avg_1.jit.pdmodel model=avg_1.jit.pdmodel
@ -46,22 +53,24 @@ if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ];then
--model_filename $model \ --model_filename $model \
--params_filename $param \ --params_filename $param \
--save_dir $exp/shape \ --save_dir $exp/shape \
--input_shape_dict=${input_shape_dict} --input_shape_dict="${input_shape_dict}"
fi fi
input_file=$exp/static_ds2online_inputs.pickle
test -e $input_file
if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ];then if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ];then
# to onnx # to onnx
./local/tonnx.sh $dir $model $param $exp/model.onnx ./local/tonnx.sh $dir $model $param $exp/model.onnx
./local/infer_check.py --input_file 'static_ds2online_inputs.pickle' --model_dir $dir --onnx_model $exp/model.onnx ./local/infer_check.py --input_file $input_file --model_dir $dir --onnx_model $exp/model.onnx
fi fi
if [ ${stage} -le 4 ] && [ ${stop_stage} -ge 4 ] ;then if [ ${stage} -le 4 ] && [ ${stop_stage} -ge 4 ] ;then
input_shape="audio_chunk:1,-1,161 audio_chunk_lens:1 chunk_state_c_box:5,1,1024 chunk_state_h_box:5,1,1024" input_shape="audio_chunk:1,-1,161 audio_chunk_lens:1 chunk_state_c_box:5,1,1024 chunk_state_h_box:5,1,1024"
# simplifying onnx model # simplifying onnx model
./local/onnx_opt.sh $exp/model.onnx $exp/model.opt.onnx $input_shape ./local/onnx_opt.sh $exp/model.onnx $exp/model.opt.onnx "$input_shape"
./local/infer_check.py --input_file 'static_ds2online_inputs.pickle' --model_dir $dir --onnx_model $exp/model.opt.onnx ./local/infer_check.py --input_file $input_file --model_dir $dir --onnx_model $exp/model.opt.onnx
fi fi
Loading…
Cancel
Save