diff --git a/train.py b/train.py index 9551db3c..2099dd60 100644 --- a/train.py +++ b/train.py @@ -93,11 +93,11 @@ def train( scheduler = optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lf, last_epoch=start_epoch - 1) # scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[218, 245], gamma=0.1, last_epoch=start_epoch - 1) - # y = [] - # for epoch in range(epochs): - # scheduler.step() - # y.append(optimizer.param_groups[0]['lr']) - # plt.plot(y) + y = [] + for _ in range(epochs): + scheduler.step() + y.append(optimizer.param_groups[0]['lr']) + plt.plot(y) # Dataset dataset = LoadImagesAndLabels(train_path, img_size=img_size, augment=True) @@ -246,11 +246,11 @@ def train( def print_mutation(hyp, results): # Write mutation results a = '%11s' * len(hyp) % tuple(hyp.keys()) # hyperparam keys - b = '%11.3g' * len(results) % results # results (P, R, mAP, F1, test_loss) - c = '%11.4g' * len(hyp) % tuple(hyp.values()) # hyperparam values + b = '%11.4g' * len(hyp) % tuple(hyp.values()) # hyperparam values + c = '%11.3g' * len(results) % results # results (P, R, mAP, F1, test_loss) print('\n%s\n%s\nEvolved fitness: %s\n' % (a, b, c)) with open('evolve.txt', 'a') as f: - f.write(a + b + '\n') + f.write(c + a + '\n') if __name__ == '__main__':