From 3a5258f6a00ba0c660bd92d241e3b24cf0554520 Mon Sep 17 00:00:00 2001 From: Hui Zhang Date: Wed, 15 Sep 2021 11:36:35 +0000 Subject: [PATCH] lr and opt param will restore from ckpt, so we do not set lr manully --- deepspeech/exps/u2/model.py | 5 +++-- deepspeech/training/trainer.py | 4 +++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/deepspeech/exps/u2/model.py b/deepspeech/exps/u2/model.py index 67b666ed..1328a1cb 100644 --- a/deepspeech/exps/u2/model.py +++ b/deepspeech/exps/u2/model.py @@ -182,9 +182,10 @@ class U2Trainer(Trainer): from_scratch = self.resume_or_scratch() if from_scratch: # save init model, i.e. 0 epoch - self.save(tag='init') + self.save(tag='init', infos=None) - self.lr_scheduler.step(self.iteration) + # lr will resotre from optimizer ckpt + # self.lr_scheduler.step(self.iteration) if self.parallel and hasattr(self.train_loader, 'batch_sampler'): self.train_loader.batch_sampler.set_epoch(self.epoch) diff --git a/deepspeech/training/trainer.py b/deepspeech/training/trainer.py index b31ddcad..6587f129 100644 --- a/deepspeech/training/trainer.py +++ b/deepspeech/training/trainer.py @@ -194,7 +194,9 @@ class Trainer(): if from_scratch: # save init model, i.e. 0 epoch self.save(tag='init', infos=None) - self.lr_scheduler.step(self.epoch) + + # lr will resotre from optimizer ckpt + # self.lr_scheduler.step(self.epoch) if self.parallel and hasattr(self.train_loader, "batch_sampler"): self.train_loader.batch_sampler.set_epoch(self.epoch)