optimizer=torch.optim.Adam(net.parameters(),lr=0.01) #学习率为0.01的学习器
scheduler=torch.optim.lr_scheduler.StepLR(optimizer,step_size=30,gamma=0.9) #每过30个epoch训练,学习率就乘gamma
for epoch in range(100):
for step, (x, y) in enumerate(train_loader):
train(…)
optimizer.step()
scheduler.step()
scheduler=torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer,mode=‘min’,factor=0.9) #mode为min,则loss不下降学习率乘以factor,max则反之
for epoch in range(100):
for step, (x, y) in enumerate(train_loader):
train(…)
optimizer.step()
scheduler.step(loss)