def backward_hook_activation()

in captum/attr/_utils/lrp_rules.py [0:0]


    def backward_hook_activation(module, grad_input, grad_output):
        """Backward hook to propagate relevance over non-linear activations."""
        if (
            isinstance(grad_input, tuple)
            and isinstance(grad_output, tuple)
            and len(grad_input) > len(grad_output)
        ):
            # Adds any additional elements of grad_input if applicable
            # This occurs when registering a backward hook on nn.Dropout
            # modules, which has an additional element of None in
            # grad_input
            return grad_output + grad_input[len(grad_output) :]
        return grad_output