lr and opt param will restore from ckpt, so we do not set lr manully

pull/831/head
Hui Zhang 3 years ago
parent 7907319288
commit 3a5258f6a0

@ -182,9 +182,10 @@ class U2Trainer(Trainer):
from_scratch = self.resume_or_scratch()
if from_scratch:
# save init model, i.e. 0 epoch
self.save(tag='init')
self.save(tag='init', infos=None)
self.lr_scheduler.step(self.iteration)
# lr will resotre from optimizer ckpt
# self.lr_scheduler.step(self.iteration)
if self.parallel and hasattr(self.train_loader, 'batch_sampler'):
self.train_loader.batch_sampler.set_epoch(self.epoch)

@ -194,7 +194,9 @@ class Trainer():
if from_scratch:
# save init model, i.e. 0 epoch
self.save(tag='init', infos=None)
self.lr_scheduler.step(self.epoch)
# lr will resotre from optimizer ckpt
# self.lr_scheduler.step(self.epoch)
if self.parallel and hasattr(self.train_loader, "batch_sampler"):
self.train_loader.batch_sampler.set_epoch(self.epoch)

Loading…
Cancel
Save