grad norm with new pytorch function

This commit is contained in:
Eren Golge 2018-05-10 16:25:48 -07:00
parent 3970491451
commit db50d395ed
1 changed files with 1 additions and 1 deletions

View File

@ -112,7 +112,7 @@ def save_best_model(model, optimizer, model_loss, best_loss, out_path,
def check_update(model, grad_clip, grad_top):
r'''Check model gradient against unexpected jumps and failures'''
skip_flag = False
grad_norm = torch.nn.utils.clip_grad_norm(model.parameters(), grad_clip)
grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), grad_clip)
if np.isinf(grad_norm):
print(" | > Gradient is INF !!")
skip_flag = True