From b3dfd89878397fc347f74cec9e782cb1c3ca049f Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Thu, 23 Apr 2020 10:35:08 -0700 Subject: [PATCH] scheduler resume bug fix --- train.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/train.py b/train.py index c0f21dfa..e65161e5 100644 --- a/train.py +++ b/train.py @@ -146,11 +146,11 @@ def train(): if mixed_precision: model, optimizer = amp.initialize(model, optimizer, opt_level='O1', verbosity=0) - # Scheduler https://github.com/ultralytics/yolov3/issues/238 - lf = lambda x: (((1 + math.cos( - x * math.pi / epochs)) / 2) ** 1.0) * 0.95 + 0.05 # cosine https://arxiv.org/pdf/1812.01187.pdf - scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lf, last_epoch=start_epoch - 1) - # scheduler = lr_scheduler.MultiStepLR(optimizer, [round(epochs * x) for x in [0.8, 0.9]], 0.1, start_epoch - 1) + # Scheduler https://arxiv.org/pdf/1812.01187.pdf + lf = lambda x: (((1 + math.cos(x * math.pi / epochs)) / 2) ** 1.0) * 0.95 + 0.05 # cosine + scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lf) + scheduler.last_epoch=start_epoch - 1 # see link below + # https://discuss.pytorch.org/t/a-problem-occured-when-resuming-an-optimizer/28822 # Plot lr schedule # y = []