functions/data-processing-engines/dataflow-flextemplate-job-executor/main.py [26:62]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
storage_client = storage.Client()
function_name = os.environ.get('K_SERVICE')


# df_client = dataflow.FlexTemplatesServiceClient()


@functions_framework.http
def main(request):
    """
    Cloud Function entry point for handling Dataflow job requests.

    This function processes an incoming HTTP request, extracting details about a Dataflow Flex Template job.
    It either launches a new Dataflow job or retrieves the status of an existing job based on the request.

    Args:
        request: The incoming HTTP request object.  Expected to contain a JSON payload with the following keys:
            - workflow_properties: A dictionary containing Dataflow job configuration:
                - dataflow_location: The GCP region for the Dataflow job.
                - dataflow_project_id: The GCP project ID for the Dataflow job.
                - dataflow_template_gcs_path: The GCS path to the Dataflow Flex Template.
                - dataflow_job_name: The name to assign to the Dataflow job.
                - dataflow_job_params: (Optional) A dictionary of parameters for the Dataflow job.
            - workflow_name: The name of the workflow triggering the Dataflow job.
            - job_name:  A unique identifier for the job within the workflow.
            - job_id: (Optional) The ID of an existing Dataflow job (if checking status).

    Returns:
        str:
            - If launching a new job: The Dataflow job ID (prefixed with "aef_").
            - If getting job status: The current state of the Dataflow job (e.g., "JOB_STATE_RUNNING").
            - If an error occurs: A JSON object with error details.
    """
    request_json = request.get_json(silent=True)
    print("event:" + str(request_json))

    try:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



functions/data-processing-engines/dataform-tag-executor/main.py [29:48]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
storage_client = storage.Client()
function_name = os.environ.get('K_SERVICE')

@functions_framework.http
def main(request):
    """
    Main function, likely triggered by an HTTP request. Extracts parameters, reads a repository from
    Dataform, executes the file's contents as a BigQuery query, and reports the result status or job ID.

    Args:
        request: The incoming HTTP request object.

    Returns:
        str: The status of the query execution or the job ID (if asynchronous).
    """

    request_json = request.get_json(silent=True)
    print("event:" + str(request_json))

    try:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



