config update

This commit is contained in:
Eren Golge 2019-07-22 15:44:09 +02:00
parent 54fb236c86
commit 91795cc0f1
2 changed files with 2 additions and 1 deletions

View File

@ -43,6 +43,7 @@
"prenet_type": "original", // ONLY TACOTRON2 - "original" or "bn".
"prenet_dropout": true, // ONLY TACOTRON2 - enable/disable dropout at prenet.
"use_forward_attn": true, // ONLY TACOTRON2 - if it uses forward attention. In general, it aligns faster.
"forward_attn_mask": false,
"transition_agent": false, // ONLY TACOTRON2 - enable/disable transition agent of forward attention.
"location_attn": false, // ONLY TACOTRON2 - enable_disable location sensitive attention. It is enabled for TACOTRON by default.
"loss_masking": true, // enable / disable loss masking against the sequence padding.

View File

@ -117,7 +117,7 @@ def save_checkpoint(model, optimizer, optimizer_st, model_loss, out_path,
new_state_dict = model.state_dict()
state = {
'model': new_state_dict,
'optimizer': optimizer.state_dict(),
'optimizer': optimizer.state_dict() if optimizer is not None else None,
'step': current_step,
'epoch': epoch,
'linear_loss': model_loss,