Disabling cappint gradient value

This commit is contained in:
Eren 2018-08-10 17:49:48 +02:00
parent 1f74df7a50
commit d5a909807e
1 changed files with 1 additions and 4 deletions

View File

@ -123,16 +123,13 @@ def save_best_model(model, optimizer, model_loss, best_loss, out_path,
return best_loss
def check_update(model, grad_clip, grad_top):
def check_update(model, grad_clip):
r'''Check model gradient against unexpected jumps and failures'''
skip_flag = False
grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), grad_clip)
if np.isinf(grad_norm):
print(" | > Gradient is INF !!")
skip_flag = True
elif grad_norm > grad_top:
print(" | > Gradient is above the top limit !!")
skip_flag = True
return grad_norm, skip_flag