This commit is contained in:
Glenn Jocher 2019-04-18 21:56:50 +02:00
parent cf5bbc97ee
commit 9b6347ac6c
2 changed files with 13 additions and 11 deletions

View File

@ -18,7 +18,7 @@ hyp = {'k': 8.4875, # loss multiple
'conf': 0.88873, # conf loss fraction 'conf': 0.88873, # conf loss fraction
'iou_t': 0.10, # iou target-anchor training threshold 'iou_t': 0.10, # iou target-anchor training threshold
'lr0': 0.001, # initial learning rate 'lr0': 0.001, # initial learning rate
'lrf': -4, # final learning rate = lr0 * (10 ** lrf) 'lrf': -5, # final learning rate = lr0 * (10 ** lrf)
'momentum': 0.9, # SGD momentum 'momentum': 0.9, # SGD momentum
'weight_decay': 0.0005, # optimizer weight decay 'weight_decay': 0.0005, # optimizer weight decay
} }
@ -88,11 +88,14 @@ def train(
# Scheduler (reduce lr at epochs 218, 245, i.e. batches 400k, 450k) # Scheduler (reduce lr at epochs 218, 245, i.e. batches 400k, 450k)
# lf = lambda x: 1 - x / epochs # linear ramp to zero # lf = lambda x: 1 - x / epochs # linear ramp to zero
# lf = lambda x: 10 ** (-2 * x / epochs) # exp ramp to lr0 * 1e-2 # lf = lambda x: 10 ** (-2 * x / epochs) # exp ramp to lr0 * 1e-2
lf = lambda x: 1 - 10 ** (hyp['lrf'] * (1 - x / epochs)) # inv exp ramp to lr0 * 1e-2 # lf = lambda x: 1 - 10 ** (hyp['lrf'] * (1 - x / epochs)) # inv exp ramp to lr0 * 1e-2
scheduler = optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lf, last_epoch=start_epoch - 1) # scheduler = optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lf, last_epoch=start_epoch - 1)
# scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[218, 245], gamma=0.1, last_epoch=start_epoch - 1) scheduler = optim.lr_scheduler.MultiStepLR(optimizer,
milestones=[218, 245],
gamma=0.1,
last_epoch=start_epoch - 1)
# # Plot lr schedule # Plot lr schedule
# y = [] # y = []
# for _ in range(epochs): # for _ in range(epochs):
# scheduler.step() # scheduler.step()

View File

@ -142,8 +142,8 @@ class LoadImagesAndLabels(Dataset): # for training/testing
x.replace('images', 'labels').replace('.bmp', '.txt').replace('.jpg', '.txt').replace('.png', '.txt') x.replace('images', 'labels').replace('.bmp', '.txt').replace('.jpg', '.txt').replace('.png', '.txt')
for x in self.img_files] for x in self.img_files]
if n < 200: # preload all images into memory if possible # if n < 200: # preload all images into memory if possible
self.imgs = [cv2.imread(img_files[i]) for i in range(n)] # self.imgs = [cv2.imread(img_files[i]) for i in range(n)]
def __len__(self): def __len__(self):
return len(self.img_files) return len(self.img_files)
@ -152,10 +152,9 @@ class LoadImagesAndLabels(Dataset): # for training/testing
img_path = self.img_files[index] img_path = self.img_files[index]
label_path = self.label_files[index] label_path = self.label_files[index]
if hasattr(self, 'imgs'): # if hasattr(self, 'imgs'):
img = self.imgs[index] # BGR # img = self.imgs[index] # BGR
else: img = cv2.imread(img_path) # BGR
img = cv2.imread(img_path) # BGR
assert img is not None, 'File Not Found ' + img_path assert img is not None, 'File Not Found ' + img_path
augment_hsv = True augment_hsv = True