updates
This commit is contained in:
parent
f88b2bd153
commit
b04ee34035
2
train.py
2
train.py
|
@ -68,7 +68,7 @@ def main(opt):
|
||||||
# optimizer = torch.optim.SGD(model.parameters(), lr=.001, momentum=.9, weight_decay=5e-4, nesterov=True)
|
# optimizer = torch.optim.SGD(model.parameters(), lr=.001, momentum=.9, weight_decay=5e-4, nesterov=True)
|
||||||
# optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, model.parameters()))
|
# optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, model.parameters()))
|
||||||
optimizer = torch.optim.Adam(model.parameters())
|
optimizer = torch.optim.Adam(model.parameters())
|
||||||
#optimizer.load_state_dict(checkpoint['optimizer'])
|
optimizer.load_state_dict(checkpoint['optimizer'])
|
||||||
|
|
||||||
start_epoch = checkpoint['epoch'] + 1
|
start_epoch = checkpoint['epoch'] + 1
|
||||||
best_loss = checkpoint['best_loss']
|
best_loss = checkpoint['best_loss']
|
||||||
|
|
Loading…
Reference in New Issue