bug fix for tacotron adapting it to new common layers

This commit is contained in:
Eren Golge 2019-05-28 14:29:17 +02:00
parent 0b5a00d29e
commit 013ec2f168
2 changed files with 2 additions and 2 deletions

View File

@ -454,7 +454,7 @@ class Decoder(nn.Module):
if t > 0: if t > 0:
new_memory = outputs[-1] new_memory = outputs[-1]
self._update_memory_queue(new_memory) self._update_memory_queue(new_memory)
output, stop_token, attention = self.decode(inputs, t, None) output, stop_token, attention = self.decode(inputs, None)
stop_token = torch.sigmoid(stop_token.data) stop_token = torch.sigmoid(stop_token.data)
outputs += [output] outputs += [output]
attentions += [attention] attentions += [attention]

View File

@ -113,7 +113,7 @@ class Decoder(nn.Module):
self.prenet = Prenet(self.mel_channels * r, prenet_type, self.prenet = Prenet(self.mel_channels * r, prenet_type,
prenet_dropout, prenet_dropout,
[self.prenet_dim, self.prenet_dim]) [self.prenet_dim, self.prenet_dim], bias=False)
self.attention_rnn = nn.LSTMCell(self.prenet_dim + in_features, self.attention_rnn = nn.LSTMCell(self.prenet_dim + in_features,
self.attention_rnn_dim) self.attention_rnn_dim)