From bbd90c3b4dd85dcc005b6c33bcb589a8fbe04f42 Mon Sep 17 00:00:00 2001 From: Eren Golge Date: Tue, 12 Mar 2019 16:54:42 +0100 Subject: [PATCH] use grad_clip from config.json --- train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/train.py b/train.py index 693cef8a..524bacbf 100644 --- a/train.py +++ b/train.py @@ -134,7 +134,7 @@ def train(model, criterion, criterion_st, optimizer, optimizer_st, scheduler, # backpass and check the grad norm for spec losses loss.backward(retain_graph=True) optimizer, current_lr = weight_decay(optimizer, c.wd) - grad_norm, _ = check_update(model, 1.0) + grad_norm, _ = check_update(model, c.grad_clip) optimizer.step() # backpass and check the grad norm for stop loss