From d5a909807e0b0d3f8bd32d6f7aec4065636c0314 Mon Sep 17 00:00:00 2001 From: Eren Date: Fri, 10 Aug 2018 17:49:48 +0200 Subject: [PATCH] Disabling cappint gradient value --- utils/generic_utils.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/utils/generic_utils.py b/utils/generic_utils.py index c58efd75..18968d9d 100644 --- a/utils/generic_utils.py +++ b/utils/generic_utils.py @@ -123,16 +123,13 @@ def save_best_model(model, optimizer, model_loss, best_loss, out_path, return best_loss -def check_update(model, grad_clip, grad_top): +def check_update(model, grad_clip): r'''Check model gradient against unexpected jumps and failures''' skip_flag = False grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), grad_clip) if np.isinf(grad_norm): print(" | > Gradient is INF !!") skip_flag = True - elif grad_norm > grad_top: - print(" | > Gradient is above the top limit !!") - skip_flag = True return grad_norm, skip_flag