From b78fc96115c5a4dfec38d501fc02155ecce90b1c Mon Sep 17 00:00:00 2001 From: Eren Golge Date: Tue, 15 Jan 2019 15:51:55 +0100 Subject: [PATCH] Change embedding layer init to old version --- models/tacotron.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/models/tacotron.py b/models/tacotron.py index 844f69b3..b3b04205 100644 --- a/models/tacotron.py +++ b/models/tacotron.py @@ -21,9 +21,7 @@ class Tacotron(nn.Module): self.embedding = nn.Embedding( len(phonemes), embedding_dim, padding_idx=padding_idx) print(" | > Number of characters : {}".format(len(phonemes))) - std = sqrt(2.0 / (len(phonemes) + embedding_dim)) - val = sqrt(3.0) * std # uniform bounds for std - self.embedding.weight.data.uniform_(-val, val) + self.embedding.weight.data.normal_(0, 0.3) self.encoder = Encoder(embedding_dim) self.decoder = Decoder(256, mel_dim, r, attn_windowing) self.postnet = PostCBHG(mel_dim)