From f377cd3cb8652adc061ac2c2933e603410733895 Mon Sep 17 00:00:00 2001 From: Eren Date: Thu, 6 Sep 2018 15:27:15 +0200 Subject: [PATCH] larger attention filter size and mode filters --- layers/attention.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/layers/attention.py b/layers/attention.py index 26e6c5d2..f8fecb83 100644 --- a/layers/attention.py +++ b/layers/attention.py @@ -37,8 +37,8 @@ class LocationSensitiveAttention(nn.Module): annot_dim, query_dim, attn_dim, - kernel_size=7, - filters=20): + kernel_size=31, + filters=32): super(LocationSensitiveAttention, self).__init__() self.kernel_size = kernel_size self.filters = filters