From 91795cc0f190f90027c3a3c7e5e6d1fbc0e6af00 Mon Sep 17 00:00:00 2001 From: Eren Golge Date: Mon, 22 Jul 2019 15:44:09 +0200 Subject: [PATCH] config update --- config.json | 1 + utils/generic_utils.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/config.json b/config.json index 17b9207f..807c4c60 100644 --- a/config.json +++ b/config.json @@ -43,6 +43,7 @@ "prenet_type": "original", // ONLY TACOTRON2 - "original" or "bn". "prenet_dropout": true, // ONLY TACOTRON2 - enable/disable dropout at prenet. "use_forward_attn": true, // ONLY TACOTRON2 - if it uses forward attention. In general, it aligns faster. + "forward_attn_mask": false, "transition_agent": false, // ONLY TACOTRON2 - enable/disable transition agent of forward attention. "location_attn": false, // ONLY TACOTRON2 - enable_disable location sensitive attention. It is enabled for TACOTRON by default. "loss_masking": true, // enable / disable loss masking against the sequence padding. diff --git a/utils/generic_utils.py b/utils/generic_utils.py index 64414765..6cf4f420 100644 --- a/utils/generic_utils.py +++ b/utils/generic_utils.py @@ -117,7 +117,7 @@ def save_checkpoint(model, optimizer, optimizer_st, model_loss, out_path, new_state_dict = model.state_dict() state = { 'model': new_state_dict, - 'optimizer': optimizer.state_dict(), + 'optimizer': optimizer.state_dict() if optimizer is not None else None, 'step': current_step, 'epoch': epoch, 'linear_loss': model_loss,