From d8c460442a178464c0cfbfe85b45d00d07d4fa48 Mon Sep 17 00:00:00 2001 From: Eren Golge Date: Wed, 23 May 2018 06:16:39 -0700 Subject: [PATCH] Commenting the attention code --- layers/attention.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/layers/attention.py b/layers/attention.py index 2a0cec3d..e0d5e52c 100644 --- a/layers/attention.py +++ b/layers/attention.py @@ -70,6 +70,15 @@ class LocationSensitiveAttention(nn.Module): class AttentionRNN(nn.Module): def __init__(self, out_dim, annot_dim, memory_dim, align_model): + r""" + General Attention RNN wrapper + + Args: + out_dim (int): context vector feature dimension. + annot_dim (int): annotation vector feature dimension. + memory_dim (int): memory vector (decoder autogression) feature dimension. + align_model (str): 'b' for Bahdanau, 'ls' Location Sensitive alignment. + """ super(AttentionRNN, self).__init__() self.rnn_cell = nn.GRUCell(out_dim + memory_dim, out_dim) # pick bahdanau or location sensitive attention