undo loc attn after fwd attn

This commit is contained in:
Thomas Werkmeister 2019-07-25 13:04:41 +02:00
parent f3dac0aa84
commit ab42396fbf
1 changed files with 4 additions and 3 deletions

View File

@ -248,14 +248,15 @@ class Attention(nn.Module):
dim=1, keepdim=True)
else:
raise ValueError("Unknown value for attention norm type")
if self.location_attention:
self.update_location_attention(alignment)
# apply forward attention if enabled
if self.forward_attn:
alignment = self.apply_forward_attention(alignment)
self.alpha = alignment
if self.location_attention:
self.update_location_attention(alignment)
context = torch.bmm(alignment.unsqueeze(1), inputs)
context = context.squeeze(1)
self.attention_weights = alignment