mirror of https://github.com/coqui-ai/TTS.git
Disabling cappint gradient value
This commit is contained in:
parent
9e2baa4f03
commit
9100e5762a
|
@ -123,16 +123,13 @@ def save_best_model(model, optimizer, model_loss, best_loss, out_path,
|
||||||
return best_loss
|
return best_loss
|
||||||
|
|
||||||
|
|
||||||
def check_update(model, grad_clip, grad_top):
|
def check_update(model, grad_clip):
|
||||||
r'''Check model gradient against unexpected jumps and failures'''
|
r'''Check model gradient against unexpected jumps and failures'''
|
||||||
skip_flag = False
|
skip_flag = False
|
||||||
grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), grad_clip)
|
grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), grad_clip)
|
||||||
if np.isinf(grad_norm):
|
if np.isinf(grad_norm):
|
||||||
print(" | > Gradient is INF !!")
|
print(" | > Gradient is INF !!")
|
||||||
skip_flag = True
|
skip_flag = True
|
||||||
elif grad_norm > grad_top:
|
|
||||||
print(" | > Gradient is above the top limit !!")
|
|
||||||
skip_flag = True
|
|
||||||
return grad_norm, skip_flag
|
return grad_norm, skip_flag
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue