fix attention.py validation bug

pull/2543/head
tianhao zhang 3 years ago
parent e1a70ca1ed
commit 62c1b6c6ee

@ -56,15 +56,17 @@ class MultiHeadedAttention(nn.Layer):
self.linear_out = Linear(n_feat, n_feat) self.linear_out = Linear(n_feat, n_feat)
self.dropout = nn.Dropout(p=dropout_rate) self.dropout = nn.Dropout(p=dropout_rate)
def _build_once(self, *args, **kwargs): if self.training:
super()._build_once(*args, **kwargs) self.train_stage = True
# if self.self_att: else:
# self.linear_kv = Linear(self.n_feat, self.n_feat*2) self.build_kv()
if not self.training: self.train_stage = False
self.weight = paddle.concat(
[self.linear_k.weight, self.linear_v.weight], axis=-1) # self._built = True
self.bias = paddle.concat([self.linear_k.bias, self.linear_v.bias]) def build_kv(self):
self._built = True self.weight = paddle.concat(
[self.linear_k.weight, self.linear_v.weight], axis=-1)
self.bias = paddle.concat([self.linear_k.bias, self.linear_v.bias])
def forward_qkv(self, def forward_qkv(self,
query: paddle.Tensor, query: paddle.Tensor,
@ -88,12 +90,23 @@ class MultiHeadedAttention(nn.Layer):
q = self.linear_q(query).view(n_batch, -1, self.h, self.d_k) q = self.linear_q(query).view(n_batch, -1, self.h, self.d_k)
if self.training: if self.training:
if not self.train_stage:
del self.weight
del self.bias
self.train_stage = True
k = self.linear_k(key).view(n_batch, -1, self.h, self.d_k) k = self.linear_k(key).view(n_batch, -1, self.h, self.d_k)
v = self.linear_v(value).view(n_batch, -1, self.h, self.d_k) v = self.linear_v(value).view(n_batch, -1, self.h, self.d_k)
else: else:
k, v = F.linear(key, self.weight, self.bias).view( if self.train_stage:
n_batch, -1, 2 * self.h, self.d_k).split( self.build_kv()
2, axis=2) self.train_stage = False
weight = paddle.concat(
[self.linear_k.weight, self.linear_v.weight], axis=-1)
bias = paddle.concat([self.linear_k.bias, self.linear_v.bias])
k, v = F.linear(key, weight, bias).view(n_batch, -1, 2 * self.h,
self.d_k).split(
2, axis=2)
q = q.transpose([0, 2, 1, 3]) # (batch, head, time1, d_k) q = q.transpose([0, 2, 1, 3]) # (batch, head, time1, d_k)
k = k.transpose([0, 2, 1, 3]) # (batch, head, time2, d_k) k = k.transpose([0, 2, 1, 3]) # (batch, head, time2, d_k)

Loading…
Cancel
Save