From d28bbe09fb6150175f63cf5ed43a604150f6ef4f Mon Sep 17 00:00:00 2001 From: Eren Golge Date: Wed, 6 Feb 2019 16:23:01 +0100 Subject: [PATCH] Attention bias setting Revert to old --- layers/attention.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/layers/attention.py b/layers/attention.py index cb2b0f80..d64f6894 100644 --- a/layers/attention.py +++ b/layers/attention.py @@ -55,7 +55,7 @@ class LocationSensitiveAttention(nn.Module): self.loc_linear = nn.Linear(filters, attn_dim, bias=True) self.query_layer = nn.Linear(query_dim, attn_dim, bias=True) self.annot_layer = nn.Linear(annot_dim, attn_dim, bias=True) - self.v = nn.Linear(attn_dim, 1, bias=True) + self.v = nn.Linear(attn_dim, 1, bias=False) self.processed_annots = None # self.init_layers()