From 9ef8d42f0e689f655897b845036e917e078ebc5f Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Tue, 30 Jul 2019 18:25:53 +0200 Subject: [PATCH] updates --- train.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/train.py b/train.py index 8f624b1f..df7b61d4 100644 --- a/train.py +++ b/train.py @@ -153,7 +153,7 @@ def train(cfg, # lf = lambda x: 10 ** (hyp['lrf'] * x / epochs) # exp ramp # lf = lambda x: 1 - 10 ** (hyp['lrf'] * (1 - x / epochs)) # inverse exp ramp # scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lf) - scheduler = lr_scheduler.MultiStepLR(optimizer, milestones=[round(opt.epochs * x) for x in [0.8]], gamma=0.1) + scheduler = lr_scheduler.MultiStepLR(optimizer, milestones=[round(opt.epochs * x) for x in [0.8, 0.9]], gamma=0.1) scheduler.last_epoch = start_epoch - 1 # # Plot lr schedule @@ -333,9 +333,9 @@ def train(cfg, if __name__ == '__main__': parser = argparse.ArgumentParser() - parser.add_argument('--epochs', type=int, default=100, help='number of epochs') - parser.add_argument('--batch-size', type=int, default=16, help='batch size') - parser.add_argument('--accumulate', type=int, default=4, help='number of batches to accumulate before optimizing') + parser.add_argument('--epochs', type=int, default=273, help='number of epochs') + parser.add_argument('--batch-size', type=int, default=32, help='batch size') + parser.add_argument('--accumulate', type=int, default=2, help='number of batches to accumulate before optimizing') parser.add_argument('--cfg', type=str, default='cfg/yolov3-spp.cfg', help='cfg file path') parser.add_argument('--data', type=str, default='data/coco_64img.data', help='coco.data file path') parser.add_argument('--multi-scale', action='store_true', help='train at (1/1.5)x - 1.5x sizes')