diff --git a/train.py b/train.py index 718bfeba..deb74928 100644 --- a/train.py +++ b/train.py @@ -136,7 +136,7 @@ def train(): for p in optimizer.param_groups: # lower param count allows more aggressive training settings: i.e. SGD ~0.1 lr0, ~0.9 momentum - p['lr'] *= 100 if opt.prebias else 10 # lr gain + p['lr'] *= 100 # lr gain if p.get('momentum') is not None: # for SGD but not Adam p['momentum'] *= 0.9