float_mul_bool type promote, lhs type promote to rhs type, https://github.com/PaddlePaddle/Paddle/pull/29265

pull/816/head
Hui Zhang 3 years ago
parent 75cd366ddd
commit 6e4a3aff07

@ -106,11 +106,9 @@ class ConvBn(nn.Layer):
# reset padding part to 0 # reset padding part to 0
masks = make_non_pad_mask(x_len) #[B, T] masks = make_non_pad_mask(x_len) #[B, T]
masks = masks.unsqueeze(1).unsqueeze(1) # [B, 1, 1, T] masks = masks.unsqueeze(1).unsqueeze(1) # [B, 1, 1, T]
# TODO(Hui Zhang): not support bool multiply # https://github.com/PaddlePaddle/Paddle/pull/29265
# masks = masks.type_as(x) # rhs will type promote to lhs
masks = masks.astype(x.dtype) x = x * masks
x = x.multiply(masks)
return x, x_len return x, x_len

@ -308,7 +308,8 @@ class RNNStack(nn.Layer):
x, x_len = rnn(x, x_len) x, x_len = rnn(x, x_len)
masks = make_non_pad_mask(x_len) #[B, T] masks = make_non_pad_mask(x_len) #[B, T]
masks = masks.unsqueeze(-1) # [B, T, 1] masks = masks.unsqueeze(-1) # [B, T, 1]
# TODO(Hui Zhang): not support bool multiply # https://github.com/PaddlePaddle/Paddle/pull/29265
masks = masks.astype(x.dtype) # rhs will type promote to lhs
x = x.multiply(masks) x = x * masks
return x, x_len return x, x_len

@ -113,11 +113,9 @@ class ConvBn(nn.Layer):
# reset padding part to 0 # reset padding part to 0
masks = make_non_pad_mask(x_len) #[B, T] masks = make_non_pad_mask(x_len) #[B, T]
masks = masks.unsqueeze(1).unsqueeze(1) # [B, 1, 1, T] masks = masks.unsqueeze(1).unsqueeze(1) # [B, 1, 1, T]
# TODO(Hui Zhang): not support bool multiply # https://github.com/PaddlePaddle/Paddle/pull/29265
# masks = masks.type_as(x) # rhs will type promote to lhs
masks = masks.astype(x.dtype) x = x * masks
x = x.multiply(masks)
return x, x_len return x, x_len

@ -46,7 +46,6 @@ class CTCLoss(nn.Layer):
# warp-ctc need activation with shape [T, B, V + 1] # warp-ctc need activation with shape [T, B, V + 1]
# logits: (B, L, D) -> (L, B, D) # logits: (B, L, D) -> (L, B, D)
logits = logits.transpose([1, 0, 2]) logits = logits.transpose([1, 0, 2])
# (TODO:Hui Zhang) ctc loss does not support int64 labels
ys_pad = ys_pad.astype(paddle.int32) ys_pad = ys_pad.astype(paddle.int32)
loss = self.loss( loss = self.loss(
logits, ys_pad, hlens, ys_lens, norm_by_times=self.batch_average) logits, ys_pad, hlens, ys_lens, norm_by_times=self.batch_average)

@ -308,7 +308,7 @@ class RNNStack(nn.Layer):
x, x_len = rnn(x, x_len) x, x_len = rnn(x, x_len)
masks = make_non_pad_mask(x_len) #[B, T] masks = make_non_pad_mask(x_len) #[B, T]
masks = masks.unsqueeze(-1) # [B, T, 1] masks = masks.unsqueeze(-1) # [B, T, 1]
# TODO(Hui Zhang): not support bool multiply # https://github.com/PaddlePaddle/Paddle/pull/29265
masks = masks.astype(x.dtype) # rhs will type promote to lhs
x = x.multiply(masks) x = x * masks
return x, x_len return x, x_len

Loading…
Cancel
Save