This commit is contained in:
Glenn Jocher 2019-09-10 11:35:46 +02:00
parent 8fe2bf1d7f
commit c1ad7e6c2b
2 changed files with 8 additions and 6 deletions

View File

@ -27,6 +27,7 @@ hyp = {'giou': 1.582, # giou loss gain
'lrf': -4., # final LambdaLR learning rate = lr0 * (10 ** lrf)
'momentum': 0.97, # SGD momentum
'weight_decay': 0.0004569, # optimizer weight decay
'fl_gamma': 0.5, # focal loss gamma
'hsv_s': 0.5703, # image HSV-Saturation augmentation (fraction)
'hsv_v': 0.3174, # image HSV-Value augmentation (fraction)
'degrees': 1.113, # image rotation (+/- deg)
@ -420,14 +421,14 @@ if __name__ == '__main__':
# Mutate
init_seeds(seed=int(time.time()))
s = [.15, .15, .15, .15, .15, .15, .15, .00, .02, .20, .20, .20, .20, .20, .20, .20] # sigmas
s = [.15, .15, .15, .15, .15, .15, .15, .00, .02, .20, .15, .20, .20, .20, .20, .20, .20] # sigmas
for i, k in enumerate(hyp.keys()):
x = (np.random.randn(1) * s[i] + 1) ** 2.0 # plt.hist(x.ravel(), 300)
hyp[k] *= float(x) # vary by sigmas
# Clip to limits
keys = ['lr0', 'iou_t', 'momentum', 'weight_decay', 'hsv_s', 'hsv_v', 'translate', 'scale']
limits = [(1e-4, 1e-2), (0.00, 0.70), (0.60, 0.98), (0, 0.001), (0, .9), (0, .9), (0, .9), (0, .9)]
keys = ['lr0', 'iou_t', 'momentum', 'weight_decay', 'hsv_s', 'hsv_v', 'translate', 'scale', 'fl_gamma']
limits = [(1e-4, 1e-2), (0.00, 0.70), (0.60, 0.98), (0, 0.001), (0, .9), (0, .9), (0, .9), (0, .9), (0, 3)]
for k, v in zip(keys, limits):
hyp[k] = np.clip(hyp[k], v[0], v[1])

View File

@ -291,12 +291,12 @@ def wh_iou(box1, box2):
class FocalLoss(nn.Module):
# Wraps focal loss around existing loss_fcn() https://arxiv.org/pdf/1708.02002.pdf
# i.e. criteria = FocalLoss(nn.BCEWithLogitsLoss(), gamma=2.5)
def __init__(self, loss_fcn, alpha=1, gamma=0.5, reduction='mean'):
def __init__(self, loss_fcn, gamma=0.5, alpha=1, reduction='mean'):
super(FocalLoss, self).__init__()
loss_fcn.reduction = 'none' # required to apply FL to each element
self.loss_fcn = loss_fcn
self.alpha = alpha
self.gamma = gamma
self.alpha = alpha
self.reduction = reduction
def forward(self, input, target):
@ -325,7 +325,8 @@ def compute_loss(p, targets, model): # predictions, targets, model
CE = nn.CrossEntropyLoss() # weight=model.class_weights
if 'F' in arc: # add focal loss
BCEcls, BCEobj, BCE, CE = FocalLoss(BCEcls), FocalLoss(BCEobj), FocalLoss(BCE), FocalLoss(CE)
g = h['fl_gamma']
BCEcls, BCEobj, BCE, CE = FocalLoss(BCEcls, g), FocalLoss(BCEobj, g), FocalLoss(BCE, g), FocalLoss(CE, g)
# Compute losses
for i, pi in enumerate(p): # layer index, layer predictions