in crypten/cryptensor.py [0:0]
def __getattribute__(self, name):
"""
Makes sure that any function call on the tensor gets recorded in order
to facilitate gradient computation using autograd.
For clarity, this function attempts to fetch functions with the following priority:
1. If name is in PROTECTED_ATTRIBUTES, fetch from the CrypTensor object.
2. If requires_grad:
a. Fetch from grad_fn.forward; if none exists
b. raise NotImplementedError telling user to use `detach()`
3. If no_grad or not requires_grad:
a. Try to fetch function from CrypTensor object
- If this fails and function is REQUIRED, raise error
b. Fetch from grad_fn.forward, ignoring AutogradContext
"""
# 1. If name is in PROTECTED_ATTRIBUTES, fetch from the CrypTensor object.
if name in CrypTensor.PROTECTED_ATTRIBUTES:
return object.__getattribute__(self, name)
# Special case for copy_ inplace.
if name == "copy_":
return object.__getattribute__(self, "copy_")
# replace Python built-in methods with corresponding method name:
name = CrypTensor.PYTHON_BUILTIN.get(name, name)
# determine inplace and modify name accordingly
inplace = name.endswith("_") and not name.endswith("__")
if inplace:
if CrypTensor.AUTOGRAD_ENABLED and self.requires_grad:
raise RuntimeError("Autograd is not supported for in-place functions.")
# Note: native in-place support is now deprecated
# Instead, CrypTensors now compute out-of-place and
# copy_ in-place
name = name[:-1]
func = self.__getattribute__(name)
def oop_and_copy(*args, **kwargs):
result = func(*args, **kwargs)
self.copy_(result)
return self
return oop_and_copy
# identify the AutogradFunction corresponding to the function name:
grad_fn = get_grad_fn(name)
# dispatch calls to size(), etc. without going through AutogradFunction:
if grad_fn is None:
return object.__getattribute__(self, name)
# 2. If requires_grad:
# a. Fetch from grad_fn.forward; if none exists
# b. raise NotImplementedError telling user to use `detach()`
if CrypTensor.AUTOGRAD_ENABLED:
if not hasattr(grad_fn, "forward"):
raise NotImplementedError(
f"Autograd forward not implemented for {name}. Please use detach()."
)
return self._get_autograd_forward_function(name, grad_fn, inplace)
# TODO: Add validation_mode / validate_correctness
# 3. If no_grad or not requires_grad:
# a. Try to fetch function from CrypTensor object
# - If this fails and function is REQUIRED, raise error
# b. Fetch from grad_fn.forward, ignoring AutogradContext
try:
return object.__getattribute__(self, name)
except AttributeError as e:
if name in CrypTensor.REQUIRED_FUNCTIONS:
raise e
assert hasattr(grad_fn, "forward")
return self._get_forward_function_no_ctx(grad_fn)