diff --git a/utils/generic_utils.py b/utils/generic_utils.py index c58efd75..18968d9d 100644 --- a/utils/generic_utils.py +++ b/utils/generic_utils.py @@ -123,16 +123,13 @@ def save_best_model(model, optimizer, model_loss, best_loss, out_path, return best_loss -def check_update(model, grad_clip, grad_top): +def check_update(model, grad_clip): r'''Check model gradient against unexpected jumps and failures''' skip_flag = False grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), grad_clip) if np.isinf(grad_norm): print(" | > Gradient is INF !!") skip_flag = True - elif grad_norm > grad_top: - print(" | > Gradient is above the top limit !!") - skip_flag = True return grad_norm, skip_flag