def build_optimizer()

in solver/build.py [0:0]


def build_optimizer(args, model):
    """
        Build an optimizer from config.
    """
    no_wd_params, wd_params = [], []
    for name, param in model.named_parameters():
        if param.requires_grad:
            if ".bn" in name or ".bias" in name:
                no_wd_params.append(param)
            else:
                wd_params.append(param)
    no_wd_params = nn.ParameterList(no_wd_params)
    wd_params = nn.ParameterList(wd_params)

    weight_decay_weight = args.weight_decay_weight
    weight_decay_bn_bias = args.weight_decay_bn_bias
    base_lr = args.lr_scheduler.base_lr

    params_group = [
        {"params": wd_params, "weight_decay": float(weight_decay_weight), 'group_name':'weight'},
        {"params": no_wd_params, "weight_decay": float(weight_decay_bn_bias), 'group_name':'bn_bias'},
    ]

    if args.optimizer.method == 'sgd':
        momentum = args.optimizer.momentum
        nesterov = args.optimizer.nesterov
        optimizer = torch.optim.SGD(
            params_group,
            lr = base_lr,
            momentum = momentum,
            nesterov = nesterov,
        )
    else:
        raise ValueError(f'no optimizer {args.optimizer.method}')
    
    return optimizer