From 5912ba53e465019d2eb24a1b47d354bc9681678b Mon Sep 17 00:00:00 2001 From: huangyuxin Date: Sun, 24 Apr 2022 03:10:55 +0000 Subject: [PATCH] fix log_interval and lr when resume training, test=asr --- paddlespeech/s2t/training/trainer.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/paddlespeech/s2t/training/trainer.py b/paddlespeech/s2t/training/trainer.py index de90c9ef..84da251a 100644 --- a/paddlespeech/s2t/training/trainer.py +++ b/paddlespeech/s2t/training/trainer.py @@ -289,7 +289,8 @@ class Trainer(): float) else f"{v}" msg += "," msg = msg[:-1] # remove the last "," - logger.info(msg) + if (batch_index + 1) % self.config.log_interval == 0: + logger.info(msg) data_start_time = time.time() except Exception as e: logger.error(e) @@ -316,10 +317,10 @@ class Trainer(): self.visualizer.add_scalar( tag='eval/lr', value=self.lr_scheduler(), step=self.epoch) - # after epoch - self.save(tag=self.epoch, infos={'val_loss': cv_loss}) # step lr every epoch self.lr_scheduler.step() + # after epoch + self.save(tag=self.epoch, infos={'val_loss': cv_loss}) self.new_epoch() def run(self):