in cicd-deployers/metadata_deployer.py [0:0]
def run_deploy_data_mesh(config_file, tag_template_directories, policy_directories, lake_directories,
annotation_directories, overwrite):
"""Runs the 'deploy_data_mesh.py' script with provided arguments.
Args:
config_file (str): Path to the configuration JSON file.
tag_template_directories (str): Path to the tag template directories.
policy_directories (str): Path to the policy taxonomies directories.
lake_directories (str): Path to the lake directories.
annotation_directories (str): Path to the annotation directories.
overwrite (bool): Whether to overwrite existing data.
"""
src_code_path = "metadata/metadata-deployer/cortex_src_code"
if os.path.exists(src_code_path):
shutil.rmtree(src_code_path)
subprocess.run(["git", "rm", "-rf", "--cached", src_code_path], check=True)
subprocess.run(["git", "submodule", "add", "-f", "https://github.com/GoogleCloudPlatform/cortex-data-foundation.git",
src_code_path])
command = [
"python3",
"metadata/metadata-deployer/cortex_src_code/src/common/data_mesh/deploy_data_mesh.py",
"--config-file", config_file,
"--tag-template-directories", tag_template_directories,
"--policy-directories", policy_directories,
"--lake-directories", lake_directories,
"--annotation-directories", annotation_directories
]
if overwrite != "false":
command.append("--overwrite")
requirements_path = "metadata/metadata-deployer/cortex_src_code/requirements.in"
# Check if dependencies are installed
if not all([os.path.exists(req) for req in open(requirements_path)]):
new_lines = [
"exceptiongroup",
"google-api-core",
"google-cloud-bigquery",
"google-cloud-bigquery-datapolicies",
"google-cloud-datacatalog",
"google-cloud-dataplex",
]
add_lines_to_file(requirements_path, new_lines)
subprocess.run([f"pip", "install", "-r", requirements_path], check=True)
subprocess.run(command, check=True)