From 82cde95cfa3c1c58f2aabc3c4e2a6048693a65e2 Mon Sep 17 00:00:00 2001 From: Eren Golge Date: Tue, 19 Mar 2019 12:21:36 +0100 Subject: [PATCH] add bias to attention v --- layers/tacotron2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/layers/tacotron2.py b/layers/tacotron2.py index 9f582432..8275788b 100644 --- a/layers/tacotron2.py +++ b/layers/tacotron2.py @@ -118,7 +118,7 @@ class Attention(nn.Module): attention_rnn_dim, attention_dim, bias=False, init_gain='tanh') self.inputs_layer = Linear( embedding_dim, attention_dim, bias=False, init_gain='tanh') - self.v = Linear(attention_dim, 1, bias=False) + self.v = Linear(attention_dim, 1, bias=True) self.location_layer = LocationLayer(attention_location_n_filters, attention_location_kernel_size, attention_dim)