opacus/grad_sample/utils.py [13:32]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
):
    """
    Registers the decorated function as the ``grad_sampler`` of ``target_class_or_classes``, which is
    the function that will be invoked every time you want to compute a per-sample gradient
    of ``target_class_or_classes``. The signature of every grad_sampler is always the same:

    >>> @register_grad_sampler(MyCustomModel)
    ... def compute_grad_sample(module, activations, backprops):
    ...    pass

    It may help you to take a look at the existing grad_samplers inside Opacus, under ``opacus.grad_sample.``
    """

    def decorator(f):
        target_classes = (
            target_class_or_classes
            if isinstance(target_class_or_classes, Sequence)
            else [target_class_or_classes]
        )
        for target_class in target_classes:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



opacus/validators/utils.py [46:67]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
):
    """
    Registers the decorated function as the ``fixer`` of ``target_class_or_classes``, which is
    the function that will be invoked every time you want to fix an incompatoble module to make
    it work for training with Opacus.
    You may supply your own validator_class that holds the registry of FIXERS.
    The signature of every fixer is always the same:

    >>> @register_module_fixer(MyCustomModel)
    ... def fix(module: nn.Module, **kwargs) -> nn.Module:
    ...    pass

    It may help you to take a look at the existing fixers inside Opacus, under ``opacus.validators.``
    """

    def decorator(f):
        target_classes = (
            target_class_or_classes
            if isinstance(target_class_or_classes, Sequence)
            else [target_class_or_classes]
        )
        for target_class in target_classes:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



