in fast/project-templates/secops-anonymization-pipeline/source/main.py [0:0]
def anonymize_data(export_date):
"""
Trigger DLP Job and setup secops feeds to ingest data from output bucket.
:param export_date: date for which data should be anonymized
:return:
"""
client = SecOpsClient()
chronicle = client.chronicle(customer_id=SECOPS_SOURCE_CUSTOMER_ID,
project_id=SECOPS_SOURCE_PROJECT,
region=SECOPS_REGION)
export_ids = utils.get_secops_export_folders_for_date(SECOPS_EXPORT_BUCKET,
export_date=export_date)
export_finished = True
for export_id in export_ids:
export = chronicle.get_data_export(data_export_id=export_id)
LOGGER.info(f"Export response: {export}.")
if "dataExportStatus" in export and export["dataExportStatus"][
"stage"] == "FINISHED_SUCCESS":
export_state = export["dataExportStatus"]["stage"]
LOGGER.info(f"Export status: {export_state}.")
else:
export_finished = False
break
if export_finished:
for export_id in export_ids:
utils.split_and_rename_csv_to_log_files(SECOPS_EXPORT_BUCKET, export_id)
with open("dlp_job_template.json.tpl", "r") as template_file:
content = template_file.read()
template = Template(content)
rendered_str = template.render({
"export_bucket": SECOPS_EXPORT_BUCKET,
"output_bucket": SECOPS_OUTPUT_BUCKET,
"deidentify_template_id": DLP_DEIDENTIFY_TEMPLATE_ID,
"inspect_template_id": DLP_INSPECT_TEMPLATE_ID,
"export_id": export_id
})
LOGGER.info(f"Filled template: {rendered_str}")
dlp_job = json.loads(rendered_str)
LOGGER.info(dlp_job)
job_request = {
"parent": f"projects/{GCP_PROJECT_ID}/locations/{DLP_REGION}",
"inspect_job": dlp_job
}
try:
dlp_client = dlp_v2.DlpServiceClient(
client_options={'quota_project_id': GCP_PROJECT_ID})
response = dlp_client.create_dlp_job(request=job_request)
LOGGER.info(response)
except Exception as e:
LOGGER.error(f"Error during export': {e}")
raise SystemExit(f'Error during secops export: {e}')
else:
LOGGER.error("Export is not finished yet, please try again later.")
LOGGER.info("Triggered all DLP jobs successfully.")