Add inference scripts.

pull/2/head
yangyaming 7 years ago
parent a235b3532d
commit 0a5f123764

@ -0,0 +1,46 @@
#! /usr/bin/env bash
pushd ../.. > /dev/null
# download language model
pushd models/lm > /dev/null
sh download_lm_ch.sh
if [ $? -ne 0 ]; then
exit 1
fi
popd > /dev/null
# infer
CUDA_VISIBLE_DEVICES=0 \
python -u infer.py \
--num_samples=10 \
--trainer_count=1 \
--beam_size=300 \
--num_proc_bsearch=8 \
--num_conv_layers=2 \
--num_rnn_layers=3 \
--rnn_layer_size=1024 \
--alpha=1.4 \
--beta=2.4 \
--cutoff_prob=0.99 \
--cutoff_top_n=40 \
--use_gru=False \
--use_gpu=True \
--share_rnn_weights=False \
--infer_manifest='data/aishell/manifest.test' \
--mean_std_path='data/aishell/mean_std.npz' \
--vocab_path='data/aishell/vocab.txt' \
--model_path='checkpoints/aishell/params.latest.tar.gz' \
--lang_model_path='models/lm/zh_giga.no_cna_cmn.prune01244.klm' \
--decoding_method='ctc_beam_search' \
--error_rate_type='cer' \
--specgram_type='linear'
if [ $? -ne 0 ]; then
echo "Failed in inference!"
exit 1
fi
exit 0

@ -0,0 +1,55 @@
#! /usr/bin/env bash
pushd ../.. > /dev/null
# download language model
pushd models/lm > /dev/null
sh download_lm_ch.sh
if [ $? -ne 0 ]; then
exit 1
fi
popd > /dev/null
# download well-trained model
pushd models/aishell > /dev/null
sh download_model.sh
if [ $? -ne 0 ]; then
exit 1
fi
popd > /dev/null
# infer
CUDA_VISIBLE_DEVICES=0 \
python -u infer.py \
--num_samples=10 \
--trainer_count=1 \
--beam_size=300 \
--num_proc_bsearch=8 \
--num_conv_layers=2 \
--num_rnn_layers=3 \
--rnn_layer_size=1024 \
--alpha=1.4 \
--beta=2.4 \
--cutoff_prob=0.99 \
--cutoff_top_n=40 \
--use_gru=False \
--use_gpu=True \
--share_rnn_weights=False \
--infer_manifest='data/aishell/manifest.test' \
--mean_std_path='models/aishell/mean_std.npz' \
--vocab_path='models/aishell/vocab.txt' \
--model_path='models/aishell/params.tar.gz' \
--lang_model_path='models/lm/zh_giga.no_cna_cmn.prune01244.klm' \
--decoding_method='ctc_beam_search' \
--error_rate_type='cer' \
--specgram_type='linear'
if [ $? -ne 0 ]; then
echo "Failed in inference!"
exit 1
fi
exit 0
Loading…
Cancel
Save