Attention biased chaged

This commit is contained in:
Eren Golge 2019-01-22 18:18:21 +01:00
parent 83d6f1dcee
commit 66f8d0e260
1 changed files with 3 additions and 3 deletions

View File

@ -52,9 +52,9 @@ class LocationSensitiveAttention(nn.Module):
stride=1,
padding=0,
bias=False))
self.loc_linear = nn.Linear(filters, attn_dim, bias=False)
self.query_layer = nn.Linear(query_dim, attn_dim, bias=False)
self.annot_layer = nn.Linear(annot_dim, attn_dim, bias=False)
self.loc_linear = nn.Linear(filters, attn_dim, bias=True)
self.query_layer = nn.Linear(query_dim, attn_dim, bias=True)
self.annot_layer = nn.Linear(annot_dim, attn_dim, bias=True)
self.v = nn.Linear(attn_dim, 1, bias=True)
self.processed_annots = None
# self.init_layers()