mirror of https://github.com/coqui-ai/TTS.git
formatting for pylint
This commit is contained in:
parent
b22c7d4a29
commit
728b97da3a
|
@ -78,6 +78,7 @@
|
||||||
"use_phonemes": true, // use phonemes instead of raw characters. It is suggested for better pronounciation.
|
"use_phonemes": true, // use phonemes instead of raw characters. It is suggested for better pronounciation.
|
||||||
"phoneme_language": "en-us", // depending on your target language, pick one from https://github.com/bootphon/phonemizer#languages
|
"phoneme_language": "en-us", // depending on your target language, pick one from https://github.com/bootphon/phonemizer#languages
|
||||||
"text_cleaner": "phoneme_cleaners",
|
"text_cleaner": "phoneme_cleaners",
|
||||||
"use_speaker_embedding": false
|
"use_speaker_embedding": false, // use speaker embedding to enable multi-speaker learning.
|
||||||
|
"style_wav_for_test": null // path to style wav file to be used in TacotronGST inference.
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
14
train.py
14
train.py
|
@ -190,7 +190,7 @@ def train(model, criterion, criterion_st, optimizer, optimizer_st, scheduler,
|
||||||
"LoaderTime:{:.2f} LR:{:.6f}".format(
|
"LoaderTime:{:.2f} LR:{:.6f}".format(
|
||||||
num_iter, batch_n_iter, global_step, loss.item(),
|
num_iter, batch_n_iter, global_step, loss.item(),
|
||||||
postnet_loss.item(), decoder_loss.item(), stop_loss.item(),
|
postnet_loss.item(), decoder_loss.item(), stop_loss.item(),
|
||||||
grad_norm, grad_norm_st, avg_text_length, avg_spec_length, step_time,
|
grad_norm, grad_norm_st, avg_text_length, avg_spec_length, step_time,
|
||||||
loader_time, current_lr),
|
loader_time, current_lr),
|
||||||
flush=True)
|
flush=True)
|
||||||
|
|
||||||
|
@ -259,9 +259,9 @@ def train(model, criterion, criterion_st, optimizer, optimizer_st, scheduler,
|
||||||
"AvgPostnetLoss:{:.5f} AvgDecoderLoss:{:.5f} "
|
"AvgPostnetLoss:{:.5f} AvgDecoderLoss:{:.5f} "
|
||||||
"AvgStopLoss:{:.5f} EpochTime:{:.2f} "
|
"AvgStopLoss:{:.5f} EpochTime:{:.2f} "
|
||||||
"AvgStepTime:{:.2f} AvgLoaderTime:{:.2f}".format(global_step, avg_total_loss,
|
"AvgStepTime:{:.2f} AvgLoaderTime:{:.2f}".format(global_step, avg_total_loss,
|
||||||
avg_postnet_loss, avg_decoder_loss,
|
avg_postnet_loss, avg_decoder_loss,
|
||||||
avg_stop_loss, epoch_time, avg_step_time,
|
avg_stop_loss, epoch_time, avg_step_time,
|
||||||
avg_loader_time),
|
avg_loader_time),
|
||||||
flush=True)
|
flush=True)
|
||||||
|
|
||||||
# Plot Epoch Stats
|
# Plot Epoch Stats
|
||||||
|
@ -539,12 +539,12 @@ def main(args): #pylint: disable=redefined-outer-name
|
||||||
if c.gradual_training is not None:
|
if c.gradual_training is not None:
|
||||||
r, c.batch_size = gradual_training_scheduler(global_step, c)
|
r, c.batch_size = gradual_training_scheduler(global_step, c)
|
||||||
c.r = r
|
c.r = r
|
||||||
model.decoder._set_r(r)
|
model.decoder.set_r(r)
|
||||||
print(" > Number of outputs per iteration:", model.decoder.r)
|
print(" > Number of outputs per iteration:", model.decoder.r)
|
||||||
|
|
||||||
train_loss, global_step = train(model, criterion, criterion_st,
|
train_loss, global_step = train(model, criterion, criterion_st,
|
||||||
optimizer, optimizer_st, scheduler,
|
optimizer, optimizer_st, scheduler,
|
||||||
ap, global_step, epoch)
|
ap, global_step, epoch)
|
||||||
val_loss = evaluate(model, criterion, criterion_st, ap, global_step, epoch)
|
val_loss = evaluate(model, criterion, criterion_st, ap, global_step, epoch)
|
||||||
print(
|
print(
|
||||||
" | > Training Loss: {:.5f} Validation Loss: {:.5f}".format(
|
" | > Training Loss: {:.5f} Validation Loss: {:.5f}".format(
|
||||||
|
|
Loading…
Reference in New Issue