select GPU0 if multiple available

This commit is contained in:
Glenn Jocher 2019-02-16 14:33:52 +01:00
parent ee4abc8cdf
commit c828f5459f
2 changed files with 12 additions and 9 deletions

View File

@ -43,10 +43,10 @@ def train(
if resume: if resume:
checkpoint = torch.load(latest, map_location='cpu') checkpoint = torch.load(latest, map_location='cpu')
# Load weights to resume from
model.load_state_dict(checkpoint['model']) model.load_state_dict(checkpoint['model'])
if torch.cuda.device_count() > 1:
raise Exception('Multi-GPU issue: https://github.com/ultralytics/yolov3/issues/21') # if torch.cuda.device_count() > 1:
# print('Using ', torch.cuda.device_count(), ' GPUs')
# model = nn.DataParallel(model) # model = nn.DataParallel(model)
model.to(device).train() model.to(device).train()
@ -72,9 +72,7 @@ def train(
# Initialize model with darknet53 weights (optional) # Initialize model with darknet53 weights (optional)
load_darknet_weights(model, os.path.join(weights, 'darknet53.conv.74')) load_darknet_weights(model, os.path.join(weights, 'darknet53.conv.74'))
if torch.cuda.device_count() > 1: # if torch.cuda.device_count() > 1:
raise Exception('Multi-GPU not currently supported: https://github.com/ultralytics/yolov3/issues/21')
# print('Using ', torch.cuda.device_count(), ' GPUs')
# model = nn.DataParallel(model) # model = nn.DataParallel(model)
model.to(device).train() model.to(device).train()

View File

@ -6,7 +6,6 @@ def init_seeds(seed=0):
if torch.cuda.is_available(): if torch.cuda.is_available():
torch.cuda.manual_seed(seed) torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed) torch.cuda.manual_seed_all(seed)
# torch.cuda.set_device(0) # OPTIONAL: Set your GPU if multiple available
def select_device(force_cpu=False): def select_device(force_cpu=False):
@ -14,5 +13,11 @@ def select_device(force_cpu=False):
device = torch.device('cpu') device = torch.device('cpu')
else: else:
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
if torch.cuda.device_count() > 1:
print('WARNING Using GPU0 Only. Multi-GPU issue: https://github.com/ultralytics/yolov3/issues/21')
torch.cuda.set_device(0) # OPTIONAL: Set your GPU if multiple available
# print('Using ', torch.cuda.device_count(), ' GPUs')
print('Using ' + str(device) + '\n') print('Using ' + str(device) + '\n')
return device return device