def adjust_learning_rate()

in distributed_training/src_dir/util.py [0:0]


def adjust_learning_rate(optimizer, epoch, step, len_epoch, args):
    factor = epoch // 30

    if epoch >= 80:
        factor = factor + 1

    lr = args.lr * (0.1**factor)
    
    # Warmup
    if epoch < 5:
        lr = lr * float(1 + step + epoch * len_epoch) / (5. * len_epoch)

    if args.rank == 0:
        print("epoch = {}, step = {}, lr = {}".format(epoch, step, lr))

    for param_group in optimizer.param_groups:
        param_group['lr'] = lr