def scheduler_from_config()

in seamseg/utils/misc.py [0:0]


def scheduler_from_config(scheduler_config, optimizer, epoch_length):
    assert scheduler_config["type"] in ("linear", "step", "poly", "multistep")

    params = scheduler_config.getstruct("params")

    if scheduler_config["type"] == "linear":
        if scheduler_config["update_mode"] == "batch":
            count = epoch_length * scheduler_config.getint("epochs")
        else:
            count = scheduler_config.getint("epochs")

        beta = float(params["from"])
        alpha = float(params["to"] - beta) / count

        scheduler = lr_scheduler.LambdaLR(optimizer, lambda it: it * alpha + beta)
    elif scheduler_config["type"] == "step":
        scheduler = lr_scheduler.StepLR(optimizer, params["step_size"], params["gamma"])
    elif scheduler_config["type"] == "poly":
        if scheduler_config["update_mode"] == "batch":
            count = epoch_length * scheduler_config.getint("epochs")
        else:
            count = scheduler_config.getint("epochs")
        scheduler = lr_scheduler.LambdaLR(optimizer, lambda it: (1 - float(it) / count) ** params["gamma"])
    elif scheduler_config["type"] == "multistep":
        scheduler = lr_scheduler.MultiStepLR(optimizer, params["milestones"], params["gamma"])
    else:
        raise ValueError("Unrecognized scheduler type {}, valid options: 'linear', 'step', 'poly', 'multistep'"
                         .format(scheduler_config["type"]))

    if scheduler_config.getint("burn_in_steps") != 0:
        scheduler = lr_scheduler.BurnInLR(scheduler,
                                          scheduler_config.getint("burn_in_steps"),
                                          scheduler_config.getfloat("burn_in_start"))

    return scheduler