fix the resume bug: the lr is not related to iteration, but epoch

pull/753/head
huangyuxin 3 years ago
parent 61fe292c47
commit e1a2cfef7f

@ -123,10 +123,6 @@ class DeepSpeech2Trainer(Trainer):
def setup_model(self):
config = self.config.clone()
config.defrost()
assert (self.train_loader.collate_fn.feature_size ==
self.test_loader.collate_fn.feature_size)
assert (self.train_loader.collate_fn.vocab_size ==
self.test_loader.collate_fn.vocab_size)
config.model.feat_size = self.train_loader.collate_fn.feature_size
config.model.dict_size = self.train_loader.collate_fn.vocab_size
config.freeze()

@ -181,8 +181,7 @@ class Trainer():
if from_scratch:
# save init model, i.e. 0 epoch
self.save(tag='init', infos=None)
self.lr_scheduler.step(self.iteration)
self.lr_scheduler.step(self.epoch)
if self.parallel:
self.train_loader.batch_sampler.set_epoch(self.epoch)

Loading…
Cancel
Save