From a7e21b43152793773f16543192fb1adeb6ddadb0 Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Fri, 21 Jun 2019 10:24:06 +0200 Subject: [PATCH] updates --- train.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/train.py b/train.py index c0efe1c6..e5247683 100644 --- a/train.py +++ b/train.py @@ -208,7 +208,7 @@ def train( # Multi-Scale training if multi_scale: - if ((i + 1) / accumulate + nb * epoch) % 10 == 0: #  adjust (67% - 150%) every 10 batches + if (i + 1 + nb * epoch) / accumulate % 10 == 0: #  adjust (67% - 150%) every 10 batches img_size = random.choice(range(img_size_min, img_size_max + 1)) * 32 print('img_size = %g' % img_size) scale_factor = img_size / max(imgs.shape[-2:]) @@ -318,7 +318,7 @@ if __name__ == '__main__': parser.add_argument('--img-size', type=int, default=416, help='inference size (pixels)') parser.add_argument('--resume', action='store_true', help='resume training flag') parser.add_argument('--transfer', action='store_true', help='transfer learning flag') - parser.add_argument('--num-workers', type=int, default=4, help='number of Pytorch DataLoader workers') + parser.add_argument('--num-workers', type=int, default=0, help='number of Pytorch DataLoader workers') parser.add_argument('--dist-url', default='tcp://127.0.0.1:9999', type=str, help='distributed training init method') parser.add_argument('--rank', default=0, type=int, help='distributed training node rank') parser.add_argument('--world-size', default=1, type=int, help='number of nodes for distributed training')