From fb88cb060909661f490cdeb1095eef7ed6a79edf Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Wed, 17 Apr 2019 17:29:23 +0200 Subject: [PATCH] updates --- train.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/train.py b/train.py index 0c93eba0..f3f54803 100644 --- a/train.py +++ b/train.py @@ -89,9 +89,9 @@ def train( # Scheduler (reduce lr at epochs 218, 245, i.e. batches 400k, 450k) # lf = lambda x: 1 - x / epochs # linear ramp to zero # lf = lambda x: 10 ** (-2 * x / epochs) # exp ramp to lr0 * 1e-2 - # lf = lambda x: 1 - 10 ** (-2 * (1 - x / epochs)) # inv exp ramp to lr0 * 1e-2 - # scheduler = optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lf, last_epoch=start_epoch - 1) - scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[218, 245], gamma=0.1, last_epoch=start_epoch - 1) + lf = lambda x: 1 - 10 ** (-2 * (1 - x / epochs)) # inv exp ramp to lr0 * 1e-2 + scheduler = optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lf, last_epoch=start_epoch - 1) + # scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[218, 245], gamma=0.1, last_epoch=start_epoch - 1) # y = [] # for epoch in range(epochs):