in bindings/python/py_src/safetensors/torch.py [0:0]
def storage_ptr(tensor: torch.Tensor) -> int:
try:
return tensor.untyped_storage().data_ptr()
except Exception:
# Fallback for torch==1.10
try:
return tensor.storage().data_ptr()
except NotImplementedError:
# Fallback for meta storage
return 0