optional proirity_freq

This commit is contained in:
Eren Golge 2018-04-26 05:35:25 -07:00
parent b076960c84
commit 379ff17a9a
2 changed files with 7 additions and 2 deletions

View File

@ -17,6 +17,7 @@
"eval_batch_size":32,
"r": 5,
"mk": 1.0,
"priority_freq": false,
"griffin_lim_iters": 60,
"power": 1.2,

View File

@ -54,6 +54,12 @@ pickle.dump(c, open(tmp_path, "wb"))
LOG_DIR = OUT_PATH
tb = SummaryWriter(LOG_DIR)
if c.priority_freq:
n_priority_freq = int(3000 / (c.sample_rate * 0.5) * c.num_freq)
print(" > Using num priority freq. : {}".format(n_priority_freq))
else:
print(" > Priority freq. is disabled.")
def signal_handler(signal, frame):
"""Ctrl+C handler to remove empty experiment folder"""
@ -71,7 +77,6 @@ def train(model, criterion, data_loader, optimizer, epoch):
print(" | > Epoch {}/{}".format(epoch, c.epochs))
progbar = Progbar(len(data_loader.dataset) / c.batch_size)
n_priority_freq = int(3000 / (c.sample_rate * 0.5) * c.num_freq)
progbar_display = {}
for num_iter, data in enumerate(data_loader):
start_time = time.time()
@ -214,7 +219,6 @@ def evaluate(model, criterion, data_loader, current_step):
print("\n | > Validation")
progbar = Progbar(len(data_loader.dataset) / c.batch_size)
n_priority_freq = int(3000 / (c.sample_rate * 0.5) * c.num_freq)
with torch.no_grad():
for num_iter, data in enumerate(data_loader):
start_time = time.time()