From 418269d739b0fc990565e54e7c55128674a313f2 Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Fri, 13 Mar 2020 16:51:30 -0700 Subject: [PATCH] FocalLoss() gamma and alpha default values --- train.py | 2 +- utils/utils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/train.py b/train.py index 0a1d5525..b34c2293 100644 --- a/train.py +++ b/train.py @@ -32,7 +32,7 @@ hyp = {'giou': 3.54, # giou loss gain 'lrf': -4., # final LambdaLR learning rate = lr0 * (10 ** lrf) 'momentum': 0.937, # SGD momentum 'weight_decay': 0.000484, # optimizer weight decay - 'fl_gamma': 0.5, # focal loss gamma + 'fl_gamma': 1.5, # focal loss gamma 'hsv_h': 0.0138, # image HSV-Hue augmentation (fraction) 'hsv_s': 0.678, # image HSV-Saturation augmentation (fraction) 'hsv_v': 0.36, # image HSV-Value augmentation (fraction) diff --git a/utils/utils.py b/utils/utils.py index 99e44665..788fd63d 100755 --- a/utils/utils.py +++ b/utils/utils.py @@ -340,7 +340,7 @@ def wh_iou(wh1, wh2): class FocalLoss(nn.Module): # Wraps focal loss around existing loss_fcn() https://arxiv.org/pdf/1708.02002.pdf # i.e. criteria = FocalLoss(nn.BCEWithLogitsLoss(), gamma=2.5) - def __init__(self, loss_fcn, gamma=0.5, alpha=1): + def __init__(self, loss_fcn, gamma=1.5, alpha=0.25): super(FocalLoss, self).__init__() self.loss_fcn = loss_fcn self.gamma = gamma