in blueprints/apigee/bigquery-analytics/functions/gcs2bq/index.js [39:63]
async function loadCSVFromGCS(datasetId, tableId, timePartition, bucket, filename) {
const metadata = {
sourceFormat: 'CSV',
skipLeadingRows: 1,
maxBadRecords: 1000,
schema: {
fields: schema
},
location: LOCATION
};
logger.info(`Trying to load ${bucket}/${filename} in ${timePartition} time partition of table ${tableId}...`);
const bigquery = new BigQuery();
const storage = new Storage();
const [job] = await bigquery
.dataset(datasetId)
.table(`${tableId}\$${timePartition}`)
.load(storage.bucket(bucket).file(filename), metadata);
logger.info(`Job ${job.id} completed.`);
const errors = job.status.errors;
if (errors && errors.length > 0) {
logger.info('Errors occurred:' + JSON.stringify(errors));
throw new Error('File could not be loaded in time partition');
}
}