in src/sagemaker_huggingface_inference_toolkit/transformers_utils.py [0:0]
def _is_gpu_available():
"""
checks if a gpu is available.
"""
if is_tf_available():
return True if len(tf.config.list_physical_devices("GPU")) > 0 else False
elif is_torch_available():
return torch.cuda.is_available()
else:
raise RuntimeError(
"At least one of TensorFlow 2.0 or PyTorch should be installed. "
"To install TensorFlow 2.0, read the instructions at https://www.tensorflow.org/install/ "
"To install PyTorch, read the instructions at https://pytorch.org/."
)