From eb932c6c0d6313d44b4bb143899ce34440040bb4 Mon Sep 17 00:00:00 2001 From: 0x45f Date: Wed, 22 Feb 2023 09:36:40 +0000 Subject: [PATCH] Fix dy2st error for taco2 --- paddlespeech/t2s/modules/tacotron2/attentions.py | 2 +- paddlespeech/t2s/modules/tacotron2/decoder.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/paddlespeech/t2s/modules/tacotron2/attentions.py b/paddlespeech/t2s/modules/tacotron2/attentions.py index 2256d8ea8..691139f10 100644 --- a/paddlespeech/t2s/modules/tacotron2/attentions.py +++ b/paddlespeech/t2s/modules/tacotron2/attentions.py @@ -194,7 +194,7 @@ class AttLoc(nn.Layer): e = masked_fill(e, self.mask, -float("inf")) # apply monotonic attention constraint (mainly for TTS) - if last_attended_idx is not None: + if last_attended_idx != -1: e = _apply_attention_constraint(e, last_attended_idx, backward_window, forward_window) diff --git a/paddlespeech/t2s/modules/tacotron2/decoder.py b/paddlespeech/t2s/modules/tacotron2/decoder.py index 6118a004e..15e29194d 100644 --- a/paddlespeech/t2s/modules/tacotron2/decoder.py +++ b/paddlespeech/t2s/modules/tacotron2/decoder.py @@ -556,13 +556,15 @@ class Decoder(nn.Layer): if use_att_constraint: last_attended_idx = 0 else: - last_attended_idx = None + last_attended_idx = -1 # loop for an output sequence idx = 0 outs, att_ws, probs = [], [], [] prob = paddle.zeros([1]) while paddle.to_tensor(True): + z_list = z_list + c_list = c_list # updated index idx += self.reduction_factor