This commit is contained in:
Glenn Jocher 2018-11-23 19:45:39 +01:00
parent 82124805f8
commit ab9ee6aa9a
3 changed files with 11 additions and 7 deletions

View File

@ -5,7 +5,7 @@ from utils.datasets import *
from utils.utils import *
parser = argparse.ArgumentParser(prog='test.py')
parser.add_argument('-batch_size', type=int, default=64, help='size of each image batch')
parser.add_argument('-batch_size', type=int, default=32, help='size of each image batch')
parser.add_argument('-cfg', type=str, default='cfg/yolov3.cfg', help='path to model config file')
parser.add_argument('-data_config_path', type=str, default='cfg/coco.data', help='path to data config file')
parser.add_argument('-weights_path', type=str, default='weights/yolov3.pt', help='path to weights file')

View File

@ -14,6 +14,7 @@ parser.add_argument('-cfg', type=str, default='cfg/yolov3.cfg', help='cfg file p
parser.add_argument('-img_size', type=int, default=32 * 13, help='size of each image dimension')
parser.add_argument('-resume', default=False, help='resume training flag')
parser.add_argument('-batch_report', default=False, help='report TP, FP, FN, P and R per batch (slower)')
parser.add_argument('-optimizer', default='SGD', help='Optimizer')
opt = parser.parse_args()
print(opt)
@ -68,9 +69,10 @@ def main(opt):
# p.requires_grad = False
# Set optimizer
# optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, model.parameters()))
optimizer = torch.optim.SGD(filter(lambda p: p.requires_grad, model.parameters()),
lr=1e-3, momentum=.9, weight_decay=5e-4)
if opt.optimizer is 'Adam':
optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, model.parameters()), lr=1e-4, weight_decay=5e-4)
else:
optimizer = torch.optim.SGD(filter(lambda p: p.requires_grad, model.parameters()), lr=1e-3, momentum=.9, weight_decay=5e-4)
start_epoch = checkpoint['epoch'] + 1
if checkpoint['optimizer'] is not None:
@ -91,8 +93,10 @@ def main(opt):
model.to(device).train()
# Set optimizer
# optimizer = torch.optim.Adam(model.parameters(), lr=1e-4, weight_decay=5e-4)
optimizer = torch.optim.SGD(model.parameters(), lr=1e-3, momentum=.9, weight_decay=5e-4)
if opt.optimizer is 'Adam':
optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, model.parameters()), lr=1e-4, weight_decay=5e-4)
else:
optimizer = torch.optim.SGD(filter(lambda p: p.requires_grad, model.parameters()), lr=1e-3, momentum=.9, weight_decay=5e-4)
# Set scheduler
# scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=[54, 61], gamma=0.1)

View File

@ -11,7 +11,7 @@ gsutil cp gs://ultralytics/yolov3.pt yolov3/weights
python3 detect.py
# Test
python3 test.py -img_size 416 -weights_path weights/latest.pt -conf_thres 0.5
python3 test.py -img_size 416 -weights_path weights/latest.pt
# Download and Test
sudo rm -rf yolov3 && git clone https://github.com/ultralytics/yolov3 && cd yolov3