higher/optim.py [305:314]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    r"""A differentiable version of the Adam optimizer.

    This optimizer creates a gradient tape as it updates parameters."""

    def _update(self, grouped_grads: _GroupedGradsType, **kwargs) -> None:

        zipped = zip(self.param_groups, grouped_grads)
        for group_idx, (group, grads) in enumerate(zipped):
            amsgrad = group['amsgrad']
            beta1, beta2 = group['betas']
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



higher/optim.py [380:389]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    r"""A differentiable version of the AdamW optimizer.

        This optimizer creates a gradient tape as it updates parameters."""

    def _update(self, grouped_grads: _GroupedGradsType, **kwargs) -> None:

        zipped = zip(self.param_groups, grouped_grads)
        for group_idx, (group, grads) in enumerate(zipped):
            amsgrad = group['amsgrad']
            beta1, beta2 = group['betas']
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



