in lisa/sut_orchestrator/azure/platform_.py [0:0]
def _get_location_info(self, location: str, log: Logger) -> AzureLocation:
cached_file_name = constants.CACHE_PATH.joinpath(
f"azure_locations_{location}.json"
)
should_refresh: bool = True
key = self._get_location_key(location)
location_data = self._locations_data_cache.get(key, None)
if not location_data:
location_data = self._load_location_info_from_file(
cached_file_name=cached_file_name, log=log
)
if location_data:
delta = datetime.now() - location_data.updated_time
# refresh cached locations every 1 day.
if delta.days < 1:
should_refresh = False
log.debug(
f"{key}: cache used: {location_data.updated_time}, "
f"sku count: {len(location_data.capabilities)}"
)
else:
log.debug(
f"{key}: cache timeout: {location_data.updated_time},"
f"sku count: {len(location_data.capabilities)}"
)
else:
log.debug(f"{key}: no cache found")
if should_refresh:
compute_client = get_compute_client(self)
log.debug(f"{key}: querying")
all_skus: List[AzureCapability] = []
paged_skus = compute_client.resource_skus.list(
f"location eq '{location}'"
).by_page()
for skus in paged_skus:
for sku_obj in skus:
try:
if sku_obj.resource_type == "virtualMachines":
if sku_obj.restrictions and any(
restriction.type == "Location"
for restriction in sku_obj.restrictions
):
# restricted on this location
continue
resource_sku = sku_obj.as_dict()
capability = self._resource_sku_to_capability(
location, sku_obj
)
# estimate vm cost for priority
assert isinstance(capability.core_count, int)
assert isinstance(capability.gpu_count, int)
estimated_cost = (
capability.core_count + capability.gpu_count * 100
)
azure_capability = AzureCapability(
location=location,
vm_size=sku_obj.name,
capability=capability,
resource_sku=resource_sku,
estimated_cost=estimated_cost,
)
all_skus.append(azure_capability)
except Exception as identifier:
log.error(f"unknown sku: {sku_obj}")
raise identifier
location_data = AzureLocation(location=location, capabilities=all_skus)
log.debug(f"{location}: saving to disk")
with open(cached_file_name, "w") as f:
json.dump(location_data.to_dict(), f) # type: ignore
log.debug(f"{key}: new data, " f"sku: {len(location_data.capabilities)}")
assert location_data
self._locations_data_cache[key] = location_data
return location_data