in src/findings-writer/main.py [0:0]
def write_to_bq(input_bucket, findings_files, project):
# Flatten the arrays of findings into a single array
all_findings = []
for k, v in findings_files.items():
file = findings_files[k]
if file:
print(f"Processing file: {file}")
# Read findings file from GCS to memory
bucket = storage_client.get_bucket(input_bucket)
blob = bucket.get_blob(file)
# Append findings to all_findings list
blob_findings = blob.download_as_bytes()
findings_string = blob_findings.decode('utf-8')
all_findings = all_findings + json.loads(findings_string)
print(f"Findings file '{file}' parsed correctly")
# Configure write out
full_table_name = f"{project}.{BQ_DATASET}.{BQ_TABLE}"
# Save to BQ the metadata about redacted fields
bq_result = bq_client.insert_rows_json(table=full_table_name,
json_rows=all_findings,
ignore_unknown_values=True)
# Check if write to BQ was successfull
if (len(bq_result) == 0):
print(f"Findings inserted in BQ table: {full_table_name}")
else:
print(f"BQ insert errors: {bq_result}")