in code/main.py [0:0]
def load_csv_to_bq(bucket, object):
# Construct a BigQuery client object.
client = bigquery.Client()
project_id = os.environ['DW_PROJECT_ID']
params = object.split("/")
table_id = "{}.{}.{}".format(project_id, params[0], params[1])
print(f"Table ID: {table_id}")
job_config = bigquery.LoadJobConfig(
autodetect=True,
skip_leading_rows=1,
# The source format defaults to CSV, so the line below is optional.
source_format=bigquery.SourceFormat.CSV,
)
uri = "gs://{}/{}".format(bucket, object)
load_job = client.load_table_from_uri(
uri, table_id, job_config=job_config
) # Make an API request.
load_job.result() # Waits for the job to complete.
destination_table = client.get_table(table_id) # Make an API request.
print("Table has now {} rows.".format(destination_table.num_rows))