This commit is contained in:
Glenn Jocher 2019-07-23 15:08:28 +02:00
parent 308eda38fd
commit b025b3123e
2 changed files with 2 additions and 2 deletions

View File

@ -138,7 +138,7 @@ def train(cfg,
# lf = lambda x: 10 ** (hyp['lrf'] * x / epochs) # exp ramp # lf = lambda x: 10 ** (hyp['lrf'] * x / epochs) # exp ramp
# lf = lambda x: 1 - 10 ** (hyp['lrf'] * (1 - x / epochs)) # inverse exp ramp # lf = lambda x: 1 - 10 ** (hyp['lrf'] * (1 - x / epochs)) # inverse exp ramp
# scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lf) # scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lf)
scheduler = lr_scheduler.MultiStepLR(optimizer, milestones=[round(opt.epochs * x) for x in (0.8, 0.9)], gamma=0.1) scheduler = lr_scheduler.MultiStepLR(optimizer, milestones=[round(opt.epochs * x) for x in [0.8]], gamma=0.1)
scheduler.last_epoch = start_epoch - 1 scheduler.last_epoch = start_epoch - 1
# # Plot lr schedule # # Plot lr schedule

View File

@ -18,7 +18,7 @@ def select_device(force_cpu=False):
if cuda: if cuda:
try: # Mixed precision training https://github.com/NVIDIA/apex try: # Mixed precision training https://github.com/NVIDIA/apex
from apex import amp from apex import amp
apex_str = 'with Apex ' apex_str = 'Apex '
except: except:
apex_str = '' apex_str = ''