in courses/understanding_spanner/spanner-challenge-lab/import-customers-to-spanner.py [0:0]
def main(argv=None, save_main_session=True):
"""Main entry point."""
projectid = os.environ.get('GOOGLE_CLOUD_PROJECT')
parser = argparse.ArgumentParser()
parser.add_argument(
'--input',
dest='input',
default='customers.csv',
help='Input filename.')
parser.add_argument(
'--instance',
dest='instance',
default='challenge-lab-instance',
help='Spanner instance ID.')
parser.add_argument(
'--database',
dest='database',
default = 'orders-db',
help='Spanner database.')
parser.add_argument(
'--table',
dest='table',
default = 'customers',
help='Spanner table.')
known_args, pipeline_args = parser.parse_known_args(argv)
pipeline_options = PipelineOptions(pipeline_args)
pipeline_options.view_as(SetupOptions).save_main_session = save_main_session
with beam.Pipeline(options=pipeline_options) as p:
customers = p | 'Read CSV to dataframe' >> read_csv(known_args.input)
customers = ( convert.to_pcollection(customers)
| 'Convert to RegionRow class' >> beam.Map(lambda x : CustomerRow(**(x._asdict())))
| 'Reverse bits in cust_id' >> beam.Map(lambda x : CustomerRow(reverse_bits(x.cust_id), x.cust_name, x.cust_address, x.cust_state, x.cust_zip, x.cust_email, x.cust_phone))
)
customers | 'Write to Spanner' >> SpannerInsert(
project_id=projectid,
instance_id=known_args.instance,
database_id=known_args.database,
table=known_args.table)