in aurora-serverless-s3-ingestion/cdk/lambda/DataIngest.py [0:0]
def lambda_handler(event, context):
wake_aurora()
create_table()
for record in event['Records']:
source_bucket = record['s3']['bucket']['name']
key = record['s3']['object']['key']
with tempfile.TemporaryDirectory() as tmpdir:
download_path = os.path.join(tmpdir, key)
s3.download_file(source_bucket, key, download_path)
# items = read_csv(download_path)
with open(download_path, 'r') as read_obj:
# pass the file object to reader() to get the reader object
csv_reader = reader(read_obj)
# Iterate over each row in the csv using reader object
next(csv_reader)
for row in csv_reader:
counter = 0
for i in row:
if i == '':
# Transform null values into 0
row[counter] = 0
counter += 1
values = tuple(row)
sql_stm = insert_rows(values)
response2 = rdsdata.execute_statement (
resourceArn = my_resource_arn,
secretArn = my_secret_arn,
database = my_database,
sql = sql_stm
)