diff --git a/train.py b/train.py index 0db2f89b..43388078 100644 --- a/train.py +++ b/train.py @@ -20,7 +20,7 @@ last = wdir + 'last.pt' best = wdir + 'best.pt' results_file = 'results.txt' -# Hyperparameters (results68: 59.2 mAP@0.5 yolov3-spp-416) https://github.com/ultralytics/yolov3/issues/310 +# Hyperparameters (results68: 59.9 mAP@0.5 yolov3-spp-416) https://github.com/ultralytics/yolov3/issues/310 hyp = {'giou': 3.54, # giou loss gain 'cls': 37.4, # cls loss gain @@ -28,7 +28,7 @@ hyp = {'giou': 3.54, # giou loss gain 'obj': 49.5, # obj loss gain (*=img_size/320 if img_size != 320) 'obj_pw': 1.0, # obj BCELoss positive_weight 'iou_t': 0.225, # iou training threshold - 'lr0': 0.00579, # initial learning rate (SGD=1E-3, Adam=9E-5) + 'lr0': 0.00579, # initial learning rate (SGD=5E-3, Adam=5E-4) 'lrf': -4., # final LambdaLR learning rate = lr0 * (10 ** lrf) 'momentum': 0.937, # SGD momentum 'weight_decay': 0.000484, # optimizer weight decay @@ -94,6 +94,7 @@ def train(): pg0 += [v] # all else if opt.adam: + hyp['lr0'] *= 0.1 # reduce lr (i.e. SGD=5E-3, Adam=5E-4) optimizer = optim.Adam(pg0, lr=hyp['lr0']) # optimizer = AdaBound(pg0, lr=hyp['lr0'], final_lr=0.1) else: