From 8ef8ddb91566f17861dc06e714c3bc46c6273291 Mon Sep 17 00:00:00 2001 From: Eren Golge Date: Wed, 25 Apr 2018 08:12:14 -0700 Subject: [PATCH] bug fix --- train.py | 2 +- utils/generic_utils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/train.py b/train.py index a9945592..1e1613c4 100644 --- a/train.py +++ b/train.py @@ -144,7 +144,7 @@ def train(model, criterion, data_loader, optimizer, epoch): progbar_display['total_loss'] = loss.item() progbar_display['linear_loss'] = linear_loss.item() progbar_display['mel_loss'] = mel_loss.item() - progbar_display['grad_norm'] = grad_norm + progbar_display['grad_norm'] = grad_norm.item() # update progbar.update(num_iter+1, values=list(progbar_display.items())) diff --git a/utils/generic_utils.py b/utils/generic_utils.py index 0103b169..d4d27875 100644 --- a/utils/generic_utils.py +++ b/utils/generic_utils.py @@ -113,7 +113,7 @@ def save_best_model(model, optimizer, model_loss, best_loss, out_path, def check_update(model, grad_clip, grad_top): r'''Check model gradient against unexpected jumps and failures''' skip_flag = False - grad_norm = torch.nn.utils.clip_grad_norm(model.parameters(), grad_clip) + grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), grad_clip) if np.isinf(grad_norm): print(" | > Gradient is INF !!") skip_flag = True