bitsandbytes/optim/adagrad.py [29:38]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        if not 0.0 <= lr:
            raise ValueError("Invalid learning rate: {}".format(lr))
        if not 0.0 <= weight_decay:
            raise ValueError("Invalid weight_decay value: {}".format(weight_decay))
        if not 0.0 <= eps:
            raise ValueError("Invalid epsilon value: {}".format(eps))
        if initial_accumulator_value != 0.0:
            raise ValueError('Initial accumulator value != 0.0 not supported!')
        if lr_decay != 0.0:
            raise ValueError('Lr Decay != 0.0 not supported!')
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



bitsandbytes/optim/adagrad.py [46:55]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        if not 0.0 <= lr:
            raise ValueError("Invalid learning rate: {}".format(lr))
        if not 0.0 <= weight_decay:
            raise ValueError("Invalid weight_decay value: {}".format(weight_decay))
        if not 0.0 <= eps:
            raise ValueError("Invalid epsilon value: {}".format(eps))
        if initial_accumulator_value != 0.0:
            raise ValueError('Initial accumulator value != 0.0 not supported!')
        if lr_decay != 0.0:
            raise ValueError('Lr Decay != 0.0 not supported!')
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



