From 0f933106caccdb5854db2d334733eb284a41893f Mon Sep 17 00:00:00 2001 From: Eren Golge Date: Wed, 23 May 2018 06:17:01 -0700 Subject: [PATCH] Configurable alignment method --- layers/attention.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/layers/attention.py b/layers/attention.py index e0d5e52c..6b9ee47b 100644 --- a/layers/attention.py +++ b/layers/attention.py @@ -105,7 +105,7 @@ class AttentionRNN(nn.Module): # Alignment # (batch, max_time) # e_{ij} = a(s_{i-1}, h_j) - if attnetion_vec is None: + if self.align_model is 'b': alignment = self.alignment_model(annotations, rnn_output) else: alignment = self.alignment_model(annotations, rnn_output, attention_vec)