def adjust_learning_rate()

in main.py [0:0]


def adjust_learning_rate(optimizer, epoch, params):
    lr = params.lr * (params.lr_decay ** (epoch // params.step_size))
    if epoch<params.warmup_epochs:
        lr = params.warmup_lr
    for param_group in optimizer.param_groups:
        param_group['lr'] = lr