scheduler resume bug fix

This commit is contained in:
Glenn Jocher 2020-04-23 10:35:08 -07:00
parent c29be7f85d
commit b3dfd89878
1 changed files with 5 additions and 5 deletions

View File

@ -146,11 +146,11 @@ def train():
if mixed_precision: if mixed_precision:
model, optimizer = amp.initialize(model, optimizer, opt_level='O1', verbosity=0) model, optimizer = amp.initialize(model, optimizer, opt_level='O1', verbosity=0)
# Scheduler https://github.com/ultralytics/yolov3/issues/238 # Scheduler https://arxiv.org/pdf/1812.01187.pdf
lf = lambda x: (((1 + math.cos( lf = lambda x: (((1 + math.cos(x * math.pi / epochs)) / 2) ** 1.0) * 0.95 + 0.05 # cosine
x * math.pi / epochs)) / 2) ** 1.0) * 0.95 + 0.05 # cosine https://arxiv.org/pdf/1812.01187.pdf scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lf)
scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lf, last_epoch=start_epoch - 1) scheduler.last_epoch=start_epoch - 1 # see link below
# scheduler = lr_scheduler.MultiStepLR(optimizer, [round(epochs * x) for x in [0.8, 0.9]], 0.1, start_epoch - 1) # https://discuss.pytorch.org/t/a-problem-occured-when-resuming-an-optimizer/28822
# Plot lr schedule # Plot lr schedule
# y = [] # y = []