in common/init_deployment_config.py [0:0]
def _validate_config_resources(config: typing.Dict[str, typing.Any]) -> bool:
source = config["projectIdSource"]
target = config["projectIdTarget"]
location = config["location"]
# Checking if we can create datasets in source and target projects.
projects = [source]
if source != target:
projects.append(target)
for project in projects:
bq_client = cortex_bq_client.CortexBQClient(project=project,
location=location)
temp_dataset_name = f"tmp_cortex_{uuid.uuid4().hex}"
full_temp_dataset_name = f"{project}.{temp_dataset_name}"
try:
bq_helper.create_dataset(bq_client, full_temp_dataset_name,
location, True)
logging.info(
"✅ BigQuery in project `%s` is available "
"for writing.", project)
except (Forbidden, Unauthorized):
logging.exception(
"🛑 Insufficient permissions to create datasets "
"in project `%s`. 🛑", project)
return False
except (BadRequest, ServerError):
logging.exception(
"🛑 Error when trying to create a BigQuery dataset "
"in project `%s`. 🛑", project)
return False
finally:
try:
bq_client.delete_dataset(full_temp_dataset_name,
not_found_ok=True)
except BadRequest:
logging.warning(
"⚠️ Couldn't delete temporary dataset `%s`. "
"Please delete it manually. ⚠️", full_temp_dataset_name)
# targetBucket must exist and be writable
buckets = [
resource_validation_helper.BucketConstraints(
str(config["targetBucket"]), True, location)
]
# K9 dataset must be writable, if exist.
# If it doesn't exist, it will be created later.
datasets = [
resource_validation_helper.DatasetConstraints(
f'{source}.{config["k9"]["datasets"]["processing"]}', False, True,
location),
resource_validation_helper.DatasetConstraints(
f'{target}.{config["k9"]["datasets"]["reporting"]}', False, True,
location),
# Vertex AI dataset must be in the same region as Vertex AI region
resource_validation_helper.DatasetConstraints(
f'{source}.{config["VertexAI"]["processingDataset"]}', False, True,
config["VertexAI"]["region"])
]
return resource_validation_helper.validate_resources(buckets, datasets)