datasets/census_bureau_international/pipelines/midyear_population/pipeline.yaml (73 lines of code) (raw):
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
resources:
- type: bigquery_table
table_id: "midyear_population"
description: "census_bureau_internationalspc"
dag:
airflow_version: 2
initialize:
dag_id: midyear_population
default_args:
owner: "Google"
depends_on_past: False
start_date: '2021-03-01'
max_active_runs: 1
schedule_interval: "@daily"
catchup: False
default_view: graph
tasks:
- operator: "KubernetesPodOperator"
description: "Run CSV transform within kubernetes pod"
args:
task_id: "transform_csv"
name: "midyear_population"
namespace: "default"
image_pull_policy: "Always"
image: "{{ var.json.census_bureau_international.container_registry.run_csv_transform_kub_midyear_population }}"
env_vars:
SOURCE_URL: |
"gs://pdp-feeds-staging/Census/idbzip/IDBext001.csv",
"gs://pdp-feeds-staging/Census/idbzip/IDBextCTYS.csv"
SOURCE_FILE: "files/data.csv"
TARGET_FILE: "files/data_output.csv"
CHUNKSIZE: "750000"
TARGET_GCS_BUCKET: "{{ var.value.composer_bucket }}"
TARGET_GCS_PATH: "data/census_bureau_international/midyear_population/data_output.csv"
TRANSFORM_LIST: >-
[ "obtain_population", "obtain_country", "reorder_headers" ]
REORDER_HEADERS: >-
[ "country_code", "country_name", "year", "midyear_population" ]
PIPELINE_ENGLISH_NAME: >-
"International Database (Country Names - Total Midyear Population) Delivery"
resources:
limit_memory: "8G"
limit_cpu: "3"
- operator: "GoogleCloudStorageToBigQueryOperator"
description: "Task to load CSV data to a BigQuery table"
args:
task_id: "load_to_bq"
bucket: "{{ var.value.composer_bucket }}"
source_objects: ["data/census_bureau_international/midyear_population/data_output.csv"]
source_format: "CSV"
destination_project_dataset_table: "{{ var.json.census_bureau_international.container_registry.midyear_population_destination_table }}"
skip_leading_rows: 1
allow_quoted_newlines: True
write_disposition: "WRITE_TRUNCATE"
schema_fields:
- name: "country_code"
type: "STRING"
description: "Federal Information Processing Standard (FIPS) country/area code"
mode: "REQUIRED"
- name: "country_name"
type: "STRING"
description: "Country or area name"
mode: "NULLABLE"
- name: "year"
type: "INTEGER"
description: "Year"
mode: "REQUIRED"
- name: "midyear_population"
type: "INTEGER"
description: "Population as-of mid-year"
mode: "NULLABLE"
graph_paths:
- "transform_csv >> load_to_bq"