From 736f169cc99f13e2fd7534df3a43d12147bc367b Mon Sep 17 00:00:00 2001 From: erogol Date: Tue, 28 Apr 2020 18:16:37 +0200 Subject: [PATCH] tf lstm does not match torch lstm wrt bias vectors. So I avoid bias in LSTM as an easy solution. --- layers/tacotron2.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/layers/tacotron2.py b/layers/tacotron2.py index 3e439b9b..35a5c0bb 100644 --- a/layers/tacotron2.py +++ b/layers/tacotron2.py @@ -61,6 +61,7 @@ class Encoder(nn.Module): int(output_input_dim / 2), num_layers=1, batch_first=True, + bias=False, bidirectional=True) self.rnn_state = None @@ -121,7 +122,8 @@ class Decoder(nn.Module): bias=False) self.attention_rnn = nn.LSTMCell(self.prenet_dim + input_dim, - self.query_dim) + self.query_dim, + bias=False) self.attention = init_attn(attn_type=attn_type, query_dim=self.query_dim,