remove requires_grad_()

This commit is contained in:
Eren Golge 2018-04-26 05:27:08 -07:00
parent 0482a66a98
commit 7bfdc32b7b
2 changed files with 4 additions and 10 deletions

View File

@ -263,15 +263,15 @@ class Decoder(nn.Module):
T_decoder = memory.size(1)
# go frame - 0 frames tarting the sequence
initial_memory = \
inputs.data.new(B, self.memory_dim * self.r).zero_().requires_grad_()
inputs.data.new(B, self.memory_dim * self.r).zero_()
# Init decoder states
attention_rnn_hidden = \
inputs.data.new(B, 256).zero_().requires_grad_()
inputs.data.new(B, 256).zero_()
decoder_rnn_hiddens = [
inputs.data.new(B, 256).zero_().requires_grad_()
inputs.data.new(B, 256).zero_()
for _ in range(len(self.decoder_rnns))]
current_context_vec = \
inputs.data.new(B, 256).zero_().requires_grad_()
inputs.data.new(B, 256).zero_()
# Time first (T_decoder, B, memory_dim)
if memory is not None:
memory = memory.transpose(0, 1)

View File

@ -93,12 +93,6 @@ def train(model, criterion, data_loader, optimizer, epoch):
optimizer.zero_grad()
# convert inputs to variables
text_input.requires_grad_()
mel_spec.requires_grad_()
# mel_lengths.requires_grad_()
# linear_spec.requires_grad_()
# dispatch data to GPU
if use_cuda:
text_input = text_input.cuda()