diff --git a/train.py b/train.py index ff02e154..4f96e291 100644 --- a/train.py +++ b/train.py @@ -417,10 +417,10 @@ def prebias(): if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--epochs', type=int, default=273) # 500200 batches at bs 16, 117263 images = 273 epochs - parser.add_argument('--batch-size', type=int, default=16) # effective bs = batch_size * accumulate = 16 * 4 = 64 + parser.add_argument('--batch-size', type=int, default=1) # effective bs = batch_size * accumulate = 16 * 4 = 64 parser.add_argument('--accumulate', type=int, default=4, help='batches to accumulate before optimizing') parser.add_argument('--cfg', type=str, default='cfg/yolov3-spp.cfg', help='cfg file path') - parser.add_argument('--data', type=str, default='data/coco.data', help='*.data file path') + parser.add_argument('--data', type=str, default='data/coco_64img.data', help='*.data file path') parser.add_argument('--multi-scale', action='store_true', help='adjust (67% - 150%) img_size every 10 batches') parser.add_argument('--img-size', type=int, default=416, help='inference size (pixels)') parser.add_argument('--rect', action='store_true', help='rectangular training') diff --git a/utils/gcp.sh b/utils/gcp.sh index f39d3a64..c898ab60 100755 --- a/utils/gcp.sh +++ b/utils/gcp.sh @@ -4,10 +4,10 @@ rm -rf sample_data yolov3 darknet apex coco cocoapi knife knifec git clone https://github.com/ultralytics/yolov3 git clone https://github.com/NVIDIA/apex && cd apex && pip install -v --no-cache-dir --global-option="--cpp_ext" --global-option="--cuda_ext" . --user && cd .. && rm -rf apex -conda install -yc conda-forge scikit-image pycocotools +sudo conda install -yc conda-forge scikit-image pycocotools python3 -c " from yolov3.utils.google_utils import gdrive_download -gdrive_download('1HaXkef9z6y5l4vUnCYgdmEAj61c6bfWO','coco.zip')" +gdrive_download('1WQT6SOktSe8Uw6r10-2JhbEhMY5DJaph','coco.zip')" sudo shutdown # Re-clone