[asr] rm uesless comment
@ -90,7 +90,7 @@ training:
optim_conf:
lr: 0.001
weight_decay: 1e-6
scheduler: warmuplr # pytorch v1.1.0+ required
scheduler: warmuplr
scheduler_conf:
warmup_steps: 25000
lr_decay: 1.0
@ -80,7 +80,7 @@ training:
lr: 0.002
@ -88,7 +88,7 @@ training:
@ -83,7 +83,7 @@ training:
weight_decay: 1e-06
@ -86,7 +86,7 @@ training:
lr: 0.004
@ -75,7 +75,7 @@ optim: adam
global_grad_clip: 5.0
weight_decay: 1.0e-06
@ -82,7 +82,7 @@ training:
warmup_steps: 1200
@ -87,7 +87,7 @@ training:
warmup_steps: 5000