use 'enabled' argument to control autocast

This commit is contained in:
erogol 2020-11-17 14:22:01 +01:00
parent 060dc21808
commit aa2b31a1b0
3 changed files with 3 additions and 3 deletions

View File

@ -182,7 +182,7 @@ def train(model, criterion, optimizer, scheduler,
optimizer.zero_grad()
# forward pass model
with set_amp_context(c.mixed_precision):
with torch.cuda.amp.autocast(enabled=c.mixed_precision):
z, logdet, y_mean, y_log_scale, alignments, o_dur_log, o_total_dur = model.forward(
text_input, text_lengths, mel_input, mel_lengths, attn_mask, g=speaker_ids)

View File

@ -158,7 +158,7 @@ def train(model, criterion, optimizer, optimizer_st, scheduler,
if optimizer_st:
optimizer_st.zero_grad()
with set_amp_context(c.mixed_precision):
with torch.cuda.amp.autocast(enabled=c.mixed_precision):
# forward pass model
if c.bidirectional_decoder or c.double_decoder_consistency:
decoder_output, postnet_output, alignments, stop_tokens, decoder_backward_output, alignments_backward = model(

View File

@ -109,7 +109,7 @@ def train(model, criterion, optimizer,
global_step += 1
with set_amp_context(c.mixed_precision):
with torch.cuda.amp.autocast(enabled=c.mixed_precision):
# compute noisy input
if hasattr(model, 'module'):
noise, x_noisy, noise_scale = model.module.compute_y_n(x)