larger attn loss

This commit is contained in:
Eren Golge 2018-04-24 13:21:10 -07:00
parent 85bbab74b3
commit 17ef3de1b9
1 changed files with 1 additions and 1 deletions

View File

@ -129,7 +129,7 @@ def train(model, criterion, data_loader, optimizer, epoch):
linear_spec_var[:, :, :n_priority_freq], linear_spec_var[:, :, :n_priority_freq],
mel_lengths_var) mel_lengths_var)
attention_loss = criterion(alignments, M, mel_lengths_var) attention_loss = criterion(alignments, M, mel_lengths_var)
loss = mel_loss + linear_loss + 0.2 * attention_loss loss = mel_loss + linear_loss + attention_loss
# backpass and check the grad norm # backpass and check the grad norm
loss.backward() loss.backward()