in blueprints/cloud-operations/scheduled-asset-inventory-export-bq/cf/main.py [0:0]
def _main(project=None, bq_project=None, bq_dataset=None, bq_table=None,
bq_table_overwrite=None, target_node=None, read_time=None,
verbose=False):
'Module entry point used by cli and cloud function wrappers.'
_configure_logging(verbose)
output_config = asset_v1.OutputConfig()
client = asset_v1.AssetServiceClient()
if bq_table_overwrite == False:
read_time = datetime.datetime.now()
output_config.bigquery_destination.table = '%s_%s' % (
bq_table, read_time.strftime('%Y%m%d'))
else:
output_config.bigquery_destination.table = '%s_latest' % (bq_table)
content_type = asset_v1.ContentType.RESOURCE
output_config.bigquery_destination.dataset = 'projects/%s/datasets/%s' % (
bq_project, bq_dataset)
output_config.bigquery_destination.separate_tables_per_asset_type = True
output_config.bigquery_destination.force = True
try:
response = client.export_assets(
request={
'parent': target_node,
'read_time': read_time,
'content_type': content_type,
'output_config': output_config
})
except (GoogleAPIError, googleapiclient.errors.HttpError) as e:
logging.debug('API Error: %s', e, exc_info=True)
raise RuntimeError(
'Error fetching Asset Inventory entries (resource manager node: %s)' %
target_node, e)