in cookbooks/aws-parallelcluster-slurm/files/default/head_node_slurm/slurm/config_utils.py [0:0]
def get_min_gpu_count_and_type(instance_types, instance_types_info, logger) -> Tuple[int, str]:
"""Return min value for GPU and associated type in the instance type list."""
min_gpu_count = None
gpu_type_min_count = "no_gpu_type"
for instance_type in instance_types:
gpu_info = instance_types_info[instance_type].get("GpuInfo", None)
gpu_count = 0
gpu_type = "no_gpu_type"
if gpu_info:
for gpus in gpu_info.get("Gpus", []):
gpu_manufacturer = gpus.get("Manufacturer", "")
if gpu_manufacturer.upper() == "NVIDIA":
gpu_count += gpus.get("Count", 0)
gpu_type = gpus.get("Name").replace(" ", "").lower()
else:
logger.info(
"ParallelCluster currently does not offer native support for '%s' GPUs. "
"Please make sure to use a custom AMI with the appropriate drivers in order to leverage "
"GPUs functionalities",
gpu_manufacturer,
)
if min_gpu_count is None or gpu_count < min_gpu_count:
min_gpu_count = gpu_count
gpu_type_min_count = gpu_type
if min_gpu_count == 0:
# gpu number lower bound
break
return min_gpu_count, gpu_type_min_count