From 17ef3de1b95fcdd141109d5d5abc104aac05d6c3 Mon Sep 17 00:00:00 2001 From: Eren Golge Date: Tue, 24 Apr 2018 13:21:10 -0700 Subject: [PATCH] larger attn loss --- train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/train.py b/train.py index 6d4af558..1e3a0a59 100644 --- a/train.py +++ b/train.py @@ -129,7 +129,7 @@ def train(model, criterion, data_loader, optimizer, epoch): linear_spec_var[:, :, :n_priority_freq], mel_lengths_var) attention_loss = criterion(alignments, M, mel_lengths_var) - loss = mel_loss + linear_loss + 0.2 * attention_loss + loss = mel_loss + linear_loss + attention_loss # backpass and check the grad norm loss.backward()