in cloud-composer/dags/sample-dataplex-with-hms-deploy.py [0:0]
def delete_environment(deploy_or_destroy):
print("BEGIN: delete_environment")
delete_environment = False
if deploy_or_destroy == "destroy":
filePath = '/home/airflow/gcs/data/' + dag_prefix_name + '.json'
if os.path.exists(filePath):
with open(filePath) as f:
data = json.load(f)
print("deployment_datetime: ", data['deployment_datetime'])
deployment_datetime = datetime.strptime(data['deployment_datetime'], "%m/%d/%Y %H:%M:%S")
difference = deployment_datetime - datetime.now()
print("difference.total_seconds(): ", abs(difference.total_seconds()))
# Test for auto_delete_hours hours
if auto_delete_hours == 0:
print("No auto delete set auto_delete_hours:",auto_delete_hours)
else:
if abs(difference.total_seconds()) > (auto_delete_hours * 60 * 60):
print("Deleting Environment >", auto_delete_hours, " hours")
delete_environment = True
else:
print("Json files does not exist (no environment deployed)")
else:
print("delete_environment is skipped since this DAG is not a destroy DAG.")
if delete_environment:
return "true"
else:
return "false"