def run_sql_queries_from_gcs()

in cicd-deployers/bigquery_ddl_runner.py [0:0]


def run_sql_queries_from_gcs(project_id, location, bucket, ddl_project_id, ddl_dataset_id, ddl_data_bucket_name,
                             ddl_connection_name):
    """Searches for SQL files in a GCS bucket and runs them in BigQuery.

    Args:
        bucket_name (str): Name of the GCS bucket.
        project_id (str): Google Cloud project ID.
    """
    bigquery_client = bigquery.Client(project=project_id)
    storage_client = storage.Client(project=project_id)

    bucket = storage_client.get_bucket(bucket)
    blobs = bucket.list_blobs(prefix="", delimiter="/")

    for blob in blobs:
        if blob.name.endswith(".sql"):

            file_content = blob.download_as_string().decode("utf-8")

            # Replace variables
            updated_query = replace_variables_in_query(file_content, ddl_project_id, ddl_dataset_id,
                                                       ddl_data_bucket_name, ddl_connection_name)
            # Create a query job configuration
            job_config = bigquery.QueryJobConfig()

            # Run the SQL query in BigQuery
            query_job = bigquery_client.query(updated_query, job_config=job_config)

            # Wait for the query job to complete and print results
            results = query_job.result()
            for row in results:
                print(row)