Remove variables

This commit is contained in:
Eren Golge 2018-04-25 08:00:48 -07:00
parent e257bd7278
commit 07f71b1761
2 changed files with 8 additions and 10 deletions

View File

@ -1,6 +1,5 @@
# coding: utf-8 # coding: utf-8
import torch import torch
from torch.autograd import Variable
from torch import nn from torch import nn
from .attention import AttentionRNN from .attention import AttentionRNN
@ -263,16 +262,16 @@ class Decoder(nn.Module):
self.memory_dim, self.r) self.memory_dim, self.r)
T_decoder = memory.size(1) T_decoder = memory.size(1)
# go frame - 0 frames tarting the sequence # go frame - 0 frames tarting the sequence
initial_memory = Variable( initial_memory = \
inputs.data.new(B, self.memory_dim * self.r).zero_()) inputs.data.new(B, self.memory_dim * self.r).zero_().requires_grad_()
# Init decoder states # Init decoder states
attention_rnn_hidden = Variable( attention_rnn_hidden = \
inputs.data.new(B, 256).zero_()) inputs.data.new(B, 256).zero_().requires_grad_()
decoder_rnn_hiddens = [Variable( decoder_rnn_hiddens = [
inputs.data.new(B, 256).zero_()) inputs.data.new(B, 256).zero_().requires_grad_()
for _ in range(len(self.decoder_rnns))] for _ in range(len(self.decoder_rnns))]
current_context_vec = Variable( current_context_vec = \
inputs.data.new(B, 256).zero_()) inputs.data.new(B, 256).zero_().requires_grad_()
# Time first (T_decoder, B, memory_dim) # Time first (T_decoder, B, memory_dim)
if memory is not None: if memory is not None:
memory = memory.transpose(0, 1) memory = memory.transpose(0, 1)

View File

@ -1,6 +1,5 @@
# coding: utf-8 # coding: utf-8
import torch import torch
from torch.autograd import Variable
from torch import nn from torch import nn
from TTS.utils.text.symbols import symbols from TTS.utils.text.symbols import symbols
from TTS.layers.tacotron import Prenet, Encoder, Decoder, CBHG from TTS.layers.tacotron import Prenet, Encoder, Decoder, CBHG