in src/marketing/src/TikTok/src/cdc/deploy_cdc_layer.py [0:0]
def main(parsed_args):
dag_start_date = datetime.now(timezone.utc).date()
debug = parsed_args
level = logging.DEBUG if debug else logging.INFO
logging.getLogger().setLevel(level)
logging.info("Deploying CDC layer...")
logging.info(
"\n---------------------------------------\n"
"Using the following parameters from config:\n"
" RAW_PROJECT = %s\n"
" RAW_DATASET = %s\n"
" CDC_PROJECT = %s\n"
" CDC_DATASET = %s\n"
"---------------------------------------\n", RAW_PROJECT, RAW_DATASET,
CDC_PROJECT, CDC_DATASET)
logging.info("Creating required directories for generated files...")
_create_output_dir_structure()
_create_sql_output_dir_structure()
client = cortex_bq_client.CortexBQClient(project=CDC_PROJECT,
location=PROJECT_LOCATION)
if not "raw_to_cdc_tables" in SETTINGS:
logging.warning(
"❗ File '%s' is missing property `raw_to_cdc_tables`. "
"Skipping CDC DAG generation.", SETTINGS)
sys.exit()
cdc_layer_settings = SETTINGS.get("raw_to_cdc_tables")
logging.info("Processing CDC tables...")
for cdc_table_settings in cdc_layer_settings:
# Making sure all required setting attributes are provided.
missing_cdc_setting_attr = []
for attr in ("base_table", "raw_table", "row_identifiers",
"schema_file", "load_frequency"):
if cdc_table_settings.get(attr) is None or cdc_table_settings.get(
attr) == "":
missing_cdc_setting_attr.append(attr)
if missing_cdc_setting_attr:
raise ValueError(
"Setting file is missing or has empty value for one or more "
f"attributes: {missing_cdc_setting_attr} ")
table_name = cdc_table_settings.get("base_table")
raw_table = cdc_table_settings.get("raw_table")
row_identifiers = cdc_table_settings["row_identifiers"]
schema_file = cdc_table_settings.get("schema_file")
load_frequency = cdc_table_settings.get("load_frequency")
partition_details = cdc_table_settings.get("partition_details")
cluster_details = cdc_table_settings.get("cluster_details")
logging.info("-- Processing table '%s' --", table_name)
table_mapping_path = Path(SCHEMA_DIR, schema_file)
logging.info("Creating schema...")
schema = create_bq_schema(table_mapping_path, "cdc")
logging.debug("CDC Table schema: %s\n", repr_schema(schema))
logging.info("Creating CDC table...")
full_table_name = CDC_PROJECT + "." + CDC_DATASET + "." + table_name
# Check if CDC table exists.
cdc_table_exists = table_exists(client, full_table_name)
if not cdc_table_exists:
create_table_from_schema(bq_client=client,
full_table_name=full_table_name,
schema=schema,
partition_details=partition_details,
cluster_details=cluster_details)
logging.info("Table is created successfully.")
else:
logging.warning("❗ Table already exists. Not creating table.")
logging.info("Table has been created successfully.")
# DAG PY file generation.
logging.info("Generating DAG python file...")
subs = {
"cdc_sql_path": Path("cdc_sql_scripts", f"{table_name}.sql"),
"load_frequency": load_frequency,
"table_name": table_name,
"cdc_dataset": CDC_DATASET,
"project_id": CDC_PROJECT,
"start_date": dag_start_date,
"runtime_labels_dict": "", # A place holder for label dict string
"bq_location": PROJECT_LOCATION
}
# If telemetry opted in, convert CORTEX JOB LABEL dict to string
# And assign to substitution
if client.allow_telemetry:
subs["runtime_labels_dict"] = str(constants.CORTEX_JOB_LABEL)
_generate_dag_from_template(
template_file=DAG_TEMPLATE_PATH,
generation_target_directory=OUTPUT_DIR_FOR_CDC,
table_name=table_name,
subs=subs)
logging.info("Generated DAG python file.")
# DAG SQL file generation.
logging.info("Generating DAG SQL file...")
template_vals = {
"source_project_id": RAW_PROJECT,
"target_project_id": CDC_PROJECT,
"row_identifiers": row_identifiers,
"source_ds": RAW_DATASET,
"target_ds": CDC_DATASET,
"target_table": table_name,
"source_table": raw_table,
}
sql_code = render_template_file(template_path=CDC_SQL_TEMPLATE_PATH,
mapping_path=table_mapping_path,
subs=template_vals)
generated_sql_path = Path(CDC_SQL_SCRIPTS_OUTPUT_DIR,
f"{table_name}.sql")
write_generated_sql_to_disk(path=generated_sql_path,
generated_sql=sql_code)
logging.info("Generated DAG SQL file: %s", generated_sql_path)
# Populate table with test data using generated SQL script.
if POPULATE_TEST_DATA:
logging.info("Populating table with test data...")
_populate_test_data(client=client,
path_to_script=generated_sql_path,
project=CDC_PROJECT,
dataset=CDC_DATASET,
table_name=table_name)
logging.info("Test data populated.")
logging.info("Table processed successfully.")
logging.info("----------------------------")
logging.info("Processed all tables successfully.")
logging.info("Copying DAG config file...")
shutil.copyfile(src=DAG_CONFIG_INI_INPUT_PATH,
dst=DAG_CONFIG_INI_OUTPUT_PATH)
logging.info("DAG config file copied successfully.")
logging.info("✅ CDC layer deployed successfully!")