add seed in argparse

pull/783/head
huangyuxin 3 years ago
parent 9068c0d4f9
commit 2d3b2aed05

@ -12,8 +12,6 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
"""Trainer for DeepSpeech2 model.""" """Trainer for DeepSpeech2 model."""
import os
from paddle import distributed as dist from paddle import distributed as dist
from deepspeech.exps.deepspeech2.config import get_cfg_defaults from deepspeech.exps.deepspeech2.config import get_cfg_defaults
@ -55,7 +53,5 @@ if __name__ == "__main__":
if args.dump_config: if args.dump_config:
with open(args.dump_config, 'w') as f: with open(args.dump_config, 'w') as f:
print(config, file=f) print(config, file=f)
if config.training.seed is not None:
os.environ.setdefault('FLAGS_cudnn_deterministic', 'True')
main(config, args) main(config, args)

@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
"""Contains DeepSpeech2 and DeepSpeech2Online model.""" """Contains DeepSpeech2 and DeepSpeech2Online model."""
import random
import time import time
from collections import defaultdict from collections import defaultdict
from pathlib import Path from pathlib import Path
@ -54,7 +53,6 @@ class DeepSpeech2Trainer(Trainer):
weight_decay=1e-6, # the coeff of weight decay weight_decay=1e-6, # the coeff of weight decay
global_grad_clip=5.0, # the global norm clip global_grad_clip=5.0, # the global norm clip
n_epoch=50, # train epochs n_epoch=50, # train epochs
seed=1024, #train seed
)) ))
if config is not None: if config is not None:
@ -63,13 +61,6 @@ class DeepSpeech2Trainer(Trainer):
def __init__(self, config, args): def __init__(self, config, args):
super().__init__(config, args) super().__init__(config, args)
if config.training.seed is not None:
self.set_seed(config.training.seed)
def set_seed(self, seed):
np.random.seed(seed)
random.seed(seed)
paddle.seed(seed)
def train_batch(self, batch_index, batch_data, msg): def train_batch(self, batch_index, batch_data, msg):
start = time.time() start = time.time()

@ -16,23 +16,23 @@ import argparse
def default_argument_parser(): def default_argument_parser():
r"""A simple yet genral argument parser for experiments with parakeet. r"""A simple yet genral argument parser for experiments with parakeet.
This is used in examples with parakeet. And it is intended to be used by This is used in examples with parakeet. And it is intended to be used by
other experiments with parakeet. It requires a minimal set of command line other experiments with parakeet. It requires a minimal set of command line
arguments to start a training script. arguments to start a training script.
The ``--config`` and ``--opts`` are used for overwrite the deault The ``--config`` and ``--opts`` are used for overwrite the deault
configuration. configuration.
The ``--data`` and ``--output`` specifies the data path and output path. The ``--data`` and ``--output`` specifies the data path and output path.
Resuming training from existing progress at the output directory is the Resuming training from existing progress at the output directory is the
intended default behavior. intended default behavior.
The ``--checkpoint_path`` specifies the checkpoint to load from. The ``--checkpoint_path`` specifies the checkpoint to load from.
The ``--device`` and ``--nprocs`` specifies how to run the training. The ``--device`` and ``--nprocs`` specifies how to run the training.
See Also See Also
-------- --------
parakeet.training.experiment parakeet.training.experiment
@ -53,10 +53,10 @@ def default_argument_parser():
# load from saved checkpoint # load from saved checkpoint
parser.add_argument("--checkpoint_path", type=str, help="path of the checkpoint to load") parser.add_argument("--checkpoint_path", type=str, help="path of the checkpoint to load")
# save jit model to # save jit model to
parser.add_argument("--export_path", type=str, help="path of the jit model to save") parser.add_argument("--export_path", type=str, help="path of the jit model to save")
# save asr result to # save asr result to
parser.add_argument("--result_file", type=str, help="path of save the asr result") parser.add_argument("--result_file", type=str, help="path of save the asr result")
# running # running
@ -65,10 +65,13 @@ def default_argument_parser():
parser.add_argument("--nprocs", type=int, default=1, help="number of parallel processes to use.") parser.add_argument("--nprocs", type=int, default=1, help="number of parallel processes to use.")
# overwrite extra config and default config # overwrite extra config and default config
# parser.add_argument("--opts", nargs=argparse.REMAINDER, # parser.add_argument("--opts", nargs=argparse.REMAINDER,
# help="options to overwrite --config file and the default config, passing in KEY VALUE pairs") # help="options to overwrite --config file and the default config, passing in KEY VALUE pairs")
parser.add_argument("--opts", type=str, default=[], nargs='+', parser.add_argument("--opts", type=str, default=[], nargs='+',
help="options to overwrite --config file and the default config, passing in KEY VALUE pairs") help="options to overwrite --config file and the default config, passing in KEY VALUE pairs")
parser.add_argument("--seed", type=int, default=None,
help="seed to use for paddle, np and random. The default value is None")
# yapd: enable # yapd: enable
return parser return parser

@ -11,9 +11,11 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import random
import time import time
from pathlib import Path from pathlib import Path
import numpy as np
import paddle import paddle
from paddle import distributed as dist from paddle import distributed as dist
from tensorboardX import SummaryWriter from tensorboardX import SummaryWriter
@ -93,6 +95,13 @@ class Trainer():
self.checkpoint_dir = None self.checkpoint_dir = None
self.iteration = 0 self.iteration = 0
self.epoch = 0 self.epoch = 0
if args.seed is not None:
self.set_seed(args.seed)
def set_seed(self, seed):
np.random.seed(seed)
random.seed(seed)
paddle.seed(seed)
def setup(self): def setup(self):
"""Setup the experiment. """Setup the experiment.

@ -19,12 +19,22 @@ fi
mkdir -p exp mkdir -p exp
seed=1024
if [ ${seed} ]; then
export FLAGS_cudnn_deterministic=True
fi
python3 -u ${BIN_DIR}/train.py \ python3 -u ${BIN_DIR}/train.py \
--device ${device} \ --device ${device} \
--nproc ${ngpu} \ --nproc ${ngpu} \
--config ${config_path} \ --config ${config_path} \
--output exp/${ckpt_name} \ --output exp/${ckpt_name} \
--model_type ${model_type} --model_type ${model_type} \
--seed ${seed}
if [ ${seed} ]; then
unset FLAGS_cudnn_deterministic
fi
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
echo "Failed in training!" echo "Failed in training!"

@ -19,11 +19,21 @@ echo "using ${device}..."
mkdir -p exp mkdir -p exp
seed=1024
if [ ${seed} ]; then
export FLAGS_cudnn_deterministic=True
fi
python3 -u ${BIN_DIR}/train.py \ python3 -u ${BIN_DIR}/train.py \
--device ${device} \ --device ${device} \
--nproc ${ngpu} \ --nproc ${ngpu} \
--config ${config_path} \ --config ${config_path} \
--output exp/${ckpt_name} --output exp/${ckpt_name} \
--seed ${seed}
if [ ${seed} ]; then
unset FLAGS_cudnn_deterministic
fi
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
echo "Failed in training!" echo "Failed in training!"

@ -19,11 +19,21 @@ echo "using ${device}..."
mkdir -p exp mkdir -p exp
seed=1024
if [ ${seed} ]; then
export FLAGS_cudnn_deterministic=True
fi
python3 -u ${BIN_DIR}/train.py \ python3 -u ${BIN_DIR}/train.py \
--device ${device} \ --device ${device} \
--nproc ${ngpu} \ --nproc ${ngpu} \
--config ${config_path} \ --config ${config_path} \
--output exp/${ckpt_name} --output exp/${ckpt_name} \
--seed ${seed}
if [ ${seed} ]; then
unset FLAGS_cudnn_deterministic
fi
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
echo "Failed in training!" echo "Failed in training!"

@ -20,12 +20,22 @@ echo "using ${device}..."
mkdir -p exp mkdir -p exp
seed=1024
if [ ${seed} ]; then
export FLAGS_cudnn_deterministic=True
fi
python3 -u ${BIN_DIR}/train.py \ python3 -u ${BIN_DIR}/train.py \
--device ${device} \ --device ${device} \
--nproc ${ngpu} \ --nproc ${ngpu} \
--config ${config_path} \ --config ${config_path} \
--output exp/${ckpt_name} \ --output exp/${ckpt_name} \
--model_type ${model_type} --model_type ${model_type} \
--seed ${seed}
if [ ${seed} ]; then
unset FLAGS_cudnn_deterministic
fi
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
echo "Failed in training!" echo "Failed in training!"

@ -19,11 +19,21 @@ echo "using ${device}..."
mkdir -p exp mkdir -p exp
seed=1024
if [ ${seed} ]; then
export FLAGS_cudnn_deterministic=True
fi
python3 -u ${BIN_DIR}/train.py \ python3 -u ${BIN_DIR}/train.py \
--device ${device} \ --device ${device} \
--nproc ${ngpu} \ --nproc ${ngpu} \
--config ${config_path} \ --config ${config_path} \
--output exp/${ckpt_name} --output exp/${ckpt_name} \
--seed ${seed}
if [ ${seed} ]; then
unset FLAGS_cudnn_deterministic
fi
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
echo "Failed in training!" echo "Failed in training!"

@ -19,11 +19,21 @@ echo "using ${device}..."
mkdir -p exp mkdir -p exp
seed=1024
if [ ${seed} ]; then
export FLAGS_cudnn_deterministic=True
fi
python3 -u ${BIN_DIR}/train.py \ python3 -u ${BIN_DIR}/train.py \
--device ${device} \ --device ${device} \
--nproc ${ngpu} \ --nproc ${ngpu} \
--config ${config_path} \ --config ${config_path} \
--output exp/${ckpt_name} --output exp/${ckpt_name} \
--seed ${seed}
if [ ${seed} ]; then
unset FLAGS_cudnn_deterministic
fi
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
echo "Failed in training!" echo "Failed in training!"

@ -19,11 +19,21 @@ echo "using ${device}..."
mkdir -p exp mkdir -p exp
seed=1024
if [ ${seed} ]; then
export FLAGS_cudnn_deterministic=True
fi
python3 -u ${BIN_DIR}/train.py \ python3 -u ${BIN_DIR}/train.py \
--device ${device} \ --device ${device} \
--nproc ${ngpu} \ --nproc ${ngpu} \
--config ${config_path} \ --config ${config_path} \
--output exp/${ckpt_name} --output exp/${ckpt_name} \
--seed ${seed}
if [ ${seed} ]; then
unset FLAGS_cudnn_deterministic
fi
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
echo "Failed in training!" echo "Failed in training!"

@ -19,11 +19,21 @@ echo "using ${device}..."
mkdir -p exp mkdir -p exp
seed=1024
if [ ${seed} ]; then
export FLAGS_cudnn_deterministic=True
fi
python3 -u ${BIN_DIR}/train.py \ python3 -u ${BIN_DIR}/train.py \
--device ${device} \ --device ${device} \
--nproc ${ngpu} \ --nproc ${ngpu} \
--config ${config_path} \ --config ${config_path} \
--output exp/${ckpt_name} --output exp/${ckpt_name} \
--seed ${seed}
if [ ${seed} ]; then
unset FLAGS_cudnn_deterministic
fi
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
echo "Failed in training!" echo "Failed in training!"

@ -19,12 +19,22 @@ fi
mkdir -p exp mkdir -p exp
seed=1024
if [ ${seed} ]; then
export FLAGS_cudnn_deterministic=True
fi
python3 -u ${BIN_DIR}/train.py \ python3 -u ${BIN_DIR}/train.py \
--device ${device} \ --device ${device} \
--nproc ${ngpu} \ --nproc ${ngpu} \
--config ${config_path} \ --config ${config_path} \
--output exp/${ckpt_name} \ --output exp/${ckpt_name} \
--model_type ${model_type} --model_type ${model_type} \
--seed ${seed}
if [ ${seed} ]; then
unset FLAGS_cudnn_deterministic
fi
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
echo "Failed in training!" echo "Failed in training!"

@ -18,11 +18,21 @@ fi
mkdir -p exp mkdir -p exp
seed=1024
if [ ${seed} ]; then
export FLAGS_cudnn_deterministic=True
fi
python3 -u ${BIN_DIR}/train.py \ python3 -u ${BIN_DIR}/train.py \
--device ${device} \ --device ${device} \
--nproc ${ngpu} \ --nproc ${ngpu} \
--config ${config_path} \ --config ${config_path} \
--output exp/${ckpt_name} --output exp/${ckpt_name} \
--seed ${seed}
if [ ${seed} ]; then
unset FLAGS_cudnn_deterministic
fi
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
echo "Failed in training!" echo "Failed in training!"

Loading…
Cancel
Save