Testing LR schedule

Testing LR schedule#

import torch
import matplotlib.pyplot as plt

Lambda LR#

set the learnign rate of each parameter group to initial lr times a given function. When last_epoch=-1, sets initial lr as lr

model = torch.nn.Linear(2, 1)
optimizer = torch.optim.SGD(model.parameters(), lr=100)
lambda1 = lambda epoch: 0.65 ** epoch
scheduler = torch.optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda1)
lrs = []

for i in range(10):
    optimizer.step()
    lrs.append(optimizer.param_groups[0]["lr"])
#     print("Factor = ", round(0.65 ** i,3)," , Learning Rate = ",round(optimizer.param_groups[0]["lr"],3))
    scheduler.step()

plt.plot(range(10),lrs)
[<matplotlib.lines.Line2D at 0x7f7c3dfc59f0>]
../../_images/bdb7464f65720c7f49a917014c0ca4404045c4d8e6fe37ac401e558f51ac2af7.png

Exponential LR#

model = torch.nn.Linear(2, 1)
optimizer = torch.optim.SGD(model.parameters(), lr=100)
scheduler = torch.optim.lr_scheduler.ExponentialLR(optimizer, gamma=1.2)
lrs = []


for i in range(10):
    optimizer.step()
    lrs.append(optimizer.param_groups[0]["lr"])
#     print("Factor = ",0.1," , Learning Rate = ",optimizer.param_groups[0]["lr"])
    scheduler.step()

plt.plot(lrs)
[<matplotlib.lines.Line2D at 0x7f7c3bd38e20>]
../../_images/c013bae98fae4e8443201b7fd4fa79d893472e4e9e0c37cb46aa651c7f05e0b0.png

LinearLR#

model = torch.nn.Linear(2, 1)
optimizer = torch.optim.SGD(model.parameters(), lr=100)
scheduler = torch.optim.lr_scheduler.LinearLR(optimizer, start_factor=0.5, total_iters=5)
lrs = []


for i in range(10):
    optimizer.step()
    lrs.append(optimizer.param_groups[0]["lr"])
#     print("Factor = ",0.1," , Learning Rate = ",optimizer.param_groups[0]["lr"])
    scheduler.step()

plt.plot(lrs)
[<matplotlib.lines.Line2D at 0x7f7c3bc1cbb0>]
../../_images/bb7c0c359979413e389da2725785c5840fbffe11eaa567f6d4a9d2ac8fdfab1c.png