in src/utils/interactive/apply_config.py [0:0]
def apply_all(config: typing.Dict[str, typing.Any]) -> bool:
"""Applies Cortex Data Foundation configuration changes:
* enables APIs
* adds necessary role bindings on projects for Cloud Build account
* creates datasets
* adds necessary role bindings on these datasets for Cloud Build account
* creates buckets
* adds necessary role bindings on these buckets for Cloud Build account
Args:
config (typing.Dict[str, typing.Any]): Data Foundation config dictionary
Returns:
bool: True if configuration was successful, False otherwise.
"""
source_project = config["projectIdSource"]
target_project = config["projectIdTarget"]
location = config["location"]
try:
logging.info("Enabling APIs in %s.", source_project)
try:
enable_apis(source_project, SOURCE_PROJECT_APIS)
except HttpError as ex:
if ex.status_code == 400 and "billing account" in ex.reason.lower():
logging.critical(("Project %s doesn't have "
"a Billing Account linked to it."),
source_project)
return False
else:
raise
if target_project != source_project:
try:
logging.info("Enabling APIs in %s.", target_project)
enable_apis(target_project, TARGET_PROJECT_APIS)
except HttpError as ex:
if (ex.status_code == 400 and
"billing account" in ex.reason.lower()):
logging.critical(("Project %s doesn't have "
"a Billing Account linked to it."),
source_project)
return False
else:
raise
cloud_build_account = get_cloud_build_account(source_project)
logging.info("Using Cloud Build account %s.", cloud_build_account)
# Add project-wide role binding for Cloud Build account
add_project_roles(source_project, cloud_build_account, PROJECT_ROLES)
if target_project != source_project:
add_project_roles(target_project, cloud_build_account,
PROJECT_ROLES)
dataset_dicts = []
source_datasets = []
reporting_datasets = []
dataset_dicts.append(config["k9"]["datasets"])
if config.get("deploySAP"):
dataset_dicts.append(config["SAP"]["datasets"])
if config.get("deploySFDC"):
dataset_dicts.append(config["SFDC"]["datasets"])
if config.get("deployOracleEBS"):
dataset_dicts.append(config["OracleEBS"]["datasets"])
if config.get("deployMarketing"):
if config["marketing"].get("deployGoogleAds"):
dataset_dicts.append(
config["marketing"]["GoogleAds"]["datasets"])
if config["marketing"].get("deployCM360"):
dataset_dicts.append(config["marketing"]["CM360"]["datasets"])
if config["marketing"].get("deployTikTok"):
dataset_dicts.append(config["marketing"]["TikTok"]["datasets"])
if config["marketing"].get("deployLiveRamp"):
dataset_dicts.append(
config["marketing"]["LiveRamp"]["datasets"])
if config["marketing"].get("deployMeta"):
dataset_dicts.append(config["marketing"]["Meta"]["datasets"])
if config["marketing"].get("deploySFMC"):
dataset_dicts.append(config["marketing"]["SFMC"]["datasets"])
if config["marketing"].get("deployDV360"):
dataset_dicts.append(config["marketing"]["DV360"]["datasets"])
if config["marketing"].get("deployGA4"):
dataset_dicts.append({
"cdc":
config["marketing"]["GA4"]["datasets"]["cdc"][0]
["name"],
"reporting":
config["marketing"]["GA4"]["datasets"]["reporting"]
})
for dataset_dict in dataset_dicts:
for ds in dataset_dict.items():
add_to = (reporting_datasets
if ds[0] == "reporting" else source_datasets)
if ds not in add_to: # type: ignore
if ds[1] != "":
add_to.append(ds[1]) # type: ignore
# Create datasets (if needed),
# and add "roles/bigquery.dataEditor" binding on them
# for the source project's Cloud Build account.
logging.info("Creating datasets in %s.", source_project)
for ds in source_datasets:
create_bq_dataset_with_roles(source_project, location, ds,
cloud_build_account,
["roles/bigquery.dataEditor"])
# If Cross Media is enabled, create VertexAI processing dataset.
# It cannot be in a multi-location.
if config["k9"].get("deployCrossMedia"):
ds = config["VertexAI"]["processingDataset"]
vertexai_region = location.lower()
if vertexai_region == "us":
vertexai_region = "us-central1"
elif vertexai_region == "eu":
vertexai_region = "europe-west4"
create_bq_dataset_with_roles(source_project, vertexai_region, ds,
cloud_build_account,
["roles/bigquery.dataEditor"])
if target_project != source_project:
# This check is only for logging.
logging.info("Creating datasets in %s.", target_project)
for ds in reporting_datasets:
create_bq_dataset_with_roles(target_project, location, ds,
cloud_build_account,
["roles/bigquery.dataEditor"])
# Create target storage bucket (if needed),
# and add "roles/storage.admin" binding on it for Cloud Build account.
create_storage_bucket_with_roles(source_project, location,
config["targetBucket"],
cloud_build_account,
["roles/storage.admin"])
if config.get("deployMarketing"):
marketing = config["marketing"]
if marketing.get("deployCM360"):
create_storage_bucket_with_roles(
source_project, location,
marketing["CM360"]["dataTransferBucket"],
cloud_build_account, ["roles/storage.admin"])
if marketing.get("deploySFMC"):
create_storage_bucket_with_roles(
source_project, location,
marketing["SFMC"]["fileTransferBucket"],
cloud_build_account, ["roles/storage.admin"])
except (HttpError, Forbidden, Unauthorized) as ex:
if isinstance(ex, HttpError):
message = ex.reason
if ex.status_code not in (401, 403):
raise
else:
message = ex.message
logging.critical("You do not have sufficient permissions: %s", message)
return False
return True