From 6ab753a9e7a9c88ff26530ffa23d7350f3bda552 Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Wed, 4 Mar 2020 13:06:31 -0800 Subject: [PATCH] updates --- train.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/train.py b/train.py index 9e919f88..b2d6cb10 100644 --- a/train.py +++ b/train.py @@ -28,7 +28,7 @@ hyp = {'giou': 3.54, # giou loss gain 'obj': 64.3, # obj loss gain (*=img_size/320 if img_size != 320) 'obj_pw': 1.0, # obj BCELoss positive_weight 'iou_t': 0.225, # iou training threshold - 'lr0': 0.00579, # initial learning rate (SGD=5E-3, Adam=5E-4) + 'lr0': 0.01, # initial learning rate (SGD=5E-3, Adam=5E-4) 'lrf': -4., # final LambdaLR learning rate = lr0 * (10 ** lrf) 'momentum': 0.937, # SGD momentum 'weight_decay': 0.000484, # optimizer weight decay @@ -141,7 +141,7 @@ def train(): # lf = lambda x: 1 - x / epochs # linear ramp to zero # lf = lambda x: 10 ** (hyp['lrf'] * x / epochs) # exp ramp # lf = lambda x: 1 - 10 ** (hyp['lrf'] * (1 - x / epochs)) # inverse exp ramp - lf = lambda x: 0.5 * (1 + math.cos(x * math.pi / epochs)) # cosine https://arxiv.org/pdf/1812.01187.pdf + lf = lambda x: (1 + math.cos(x * math.pi / epochs)) / 2 * 0.99 + 0.01 # cosine https://arxiv.org/pdf/1812.01187.pdf scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lf) # scheduler = lr_scheduler.MultiStepLR(optimizer, milestones=[round(epochs * x) for x in [0.8, 0.9]], gamma=0.1) scheduler.last_epoch = start_epoch @@ -311,7 +311,7 @@ def train(): batch_size=batch_size * 2, img_size=img_size_test, model=model, - conf_thres=1E-3 if opt.evolve or (final_epoch and is_coco) else 0.1, # 0.1 faster + conf_thres=0.001, # 0.001 if opt.evolve or (final_epoch and is_coco) else 0.01, iou_thres=0.6, save_json=final_epoch and is_coco, single_cls=opt.single_cls,