This commit is contained in:
Glenn Jocher 2020-03-12 01:09:17 -07:00
parent 41bf46a419
commit 8a1f35eac6
1 changed files with 18 additions and 15 deletions

View File

@ -364,6 +364,11 @@ class FocalLoss(nn.Module):
return loss
def smooth_BCE(eps=0.1): # https://github.com/ultralytics/yolov3/issues/238#issuecomment-598028441
# return positive, negative label smoothing BCE targets
return 1.0 - 0.5 * eps, 0.5 * eps
def compute_loss(p, targets, model): # predictions, targets, model
ft = torch.cuda.FloatTensor if p[0].is_cuda else torch.Tensor
lcls, lbox, lobj = ft([0]), ft([0]), ft([0])
@ -379,12 +384,7 @@ def compute_loss(p, targets, model): # predictions, targets, model
CE = nn.CrossEntropyLoss(reduction=red) # weight=model.class_weights
# class label smoothing https://arxiv.org/pdf/1902.04103.pdf eqn 3
smooth = False
if smooth:
e = 0.1 #  class label smoothing epsilon
cp, cn = 1.0 - e, e / (model.nc - 0.99) # class positive and negative labels
else:
cp, cn = 1.0, 0.0
cp, cn = smooth_BCE(eps=0.0)
if 'F' in arc: # add focal loss
g = h['fl_gamma']
@ -656,6 +656,7 @@ def print_model_biases(model):
print('\nModel Bias Summary: %8s%18s%18s%18s' % ('layer', 'regression', 'objectness', 'classification'))
multi_gpu = type(model) in (nn.parallel.DataParallel, nn.parallel.DistributedDataParallel)
for l in model.yolo_layers: # print pretrained biases
try:
if multi_gpu:
na = model.module.module_list[l].na # number of anchors
b = model.module.module_list[l - 1][0].bias.view(na, -1) # bias 3x85
@ -665,6 +666,8 @@ def print_model_biases(model):
print(' ' * 20 + '%8g %18s%18s%18s' % (l, '%5.2f+/-%-5.2f' % (b[:, :4].mean(), b[:, :4].std()),
'%5.2f+/-%-5.2f' % (b[:, 4].mean(), b[:, 4].std()),
'%5.2f+/-%-5.2f' % (b[:, 5:].mean(), b[:, 5:].std())))
except:
pass
def strip_optimizer(f='weights/last.pt'): # from utils.utils import *; strip_optimizer()