fix missing code, test=asr

pull/2755/head
yeyupiaoling 3 years ago
parent ccc1571594
commit 7b1519b858

@ -55,12 +55,13 @@ class PositionwiseFeedForward(nn.Layer):
self.dropout = nn.Dropout(dropout_rate) self.dropout = nn.Dropout(dropout_rate)
self.w_2 = Linear(hidden_units, idim) self.w_2 = Linear(hidden_units, idim)
self.adaptive_scale = adaptive_scale self.adaptive_scale = adaptive_scale
ada_scale = self.create_parameter( if self.adaptive_scale:
[1, 1, idim], default_initializer=I.XavierUniform()) ada_scale = self.create_parameter(
self.add_parameter('ada_scale', ada_scale) [1, 1, idim], default_initializer=I.XavierUniform())
ada_bias = self.create_parameter( self.add_parameter('ada_scale', ada_scale)
[1, 1, idim], default_initializer=I.XavierUniform()) ada_bias = self.create_parameter(
self.add_parameter('ada_bias', ada_bias) [1, 1, idim], default_initializer=I.XavierUniform())
self.add_parameter('ada_bias', ada_bias)
if init_weights: if init_weights:
self.init_weights() self.init_weights()
@ -84,4 +85,6 @@ class PositionwiseFeedForward(nn.Layer):
Returns: Returns:
output tensor, (B, Lmax, D) output tensor, (B, Lmax, D)
""" """
if self.adaptive_scale:
xs = self.ada_scale * xs + self.ada_bias
return self.w_2(self.dropout(self.activation(self.w_1(xs)))) return self.w_2(self.dropout(self.activation(self.w_1(xs))))

Loading…
Cancel
Save