diff --git a/layers/attention.py b/layers/attention.py
index cb2b0f80..d64f6894 100644
--- a/layers/attention.py
+++ b/layers/attention.py
@@ -55,7 +55,7 @@ class LocationSensitiveAttention(nn.Module):
         self.loc_linear = nn.Linear(filters, attn_dim, bias=True)
         self.query_layer = nn.Linear(query_dim, attn_dim, bias=True)
         self.annot_layer = nn.Linear(annot_dim, attn_dim, bias=True)
-        self.v = nn.Linear(attn_dim, 1, bias=True)
+        self.v = nn.Linear(attn_dim, 1, bias=False)
         self.processed_annots = None
         # self.init_layers()