def _get_optimizer()

in utils.py [0:0]


def _get_optimizer(model,
                   optim,
                   lr: float,
                   momentum: float,
                   grad_clip: float):
    if optim == 'sgd':
        optimizer = torch.optim.SGD(_get_grad_requiring_params(model),
                               lr=lr,
                               momentum=momentum)
        optimizer.grad_clip = grad_clip
        return optimizer
    elif optim == 'adagrad':
        optimizer = AdagradWithGradClip(_get_grad_requiring_params(model),
                                   lr=lr,
                                   grad_clip=grad_clip)
        optimizer.grad_clip = 0 # done internally
        return optimizer
    elif optim == 'adam':
        optimizer = torch.optim.Adam(_get_grad_requiring_params(model),
                                   lr=lr)
        optimizer.grad_clip = grad_clip
        return optimizer
    else:
        raise RuntimeError("wrong type of optimizer "
                           "- must be 'sgd', 'adagrad' or 'adam'")