From db50d395ed8dd9a1d460209c71e885413c312096 Mon Sep 17 00:00:00 2001 From: Eren Golge Date: Thu, 10 May 2018 16:25:48 -0700 Subject: [PATCH] grad norm with new pytorch function --- utils/generic_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/generic_utils.py b/utils/generic_utils.py index b49b6d27..1cc547ab 100644 --- a/utils/generic_utils.py +++ b/utils/generic_utils.py @@ -112,7 +112,7 @@ def save_best_model(model, optimizer, model_loss, best_loss, out_path, def check_update(model, grad_clip, grad_top): r'''Check model gradient against unexpected jumps and failures''' skip_flag = False - grad_norm = torch.nn.utils.clip_grad_norm(model.parameters(), grad_clip) + grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), grad_clip) if np.isinf(grad_norm): print(" | > Gradient is INF !!") skip_flag = True