in src/load_data.py [0:0]
def lambda_handler(event, context):
logger.debug(f'Lambda event:{event}')
company_name = event['CompanyName']
s3_url = f"s3://{Constant.SHARED_RESOURCE_BUCKET}/{event['FilePath']}"
try:
account_data = get_account_data(s3_url)
account_updates = generate_account_updates(company_name, account_data)
if account_updates:
account_ids = [account['AccountId'] for account in account_updates]
logger.info(f"Loading account data for company {company_name} for the following AccountIds: {account_ids}")
batch_write(Constant.DB_TABLE, account_updates, is_account_data=True)
else:
logging.warning(f"No new account for company {company_name} included in {s3_url}")
notify_data = {
'SlackHandle': None,
'SlackMessage': {
'attachments': [
{
'color': '#0ec1eb',
'author_name': Constant.AUTHOR_NAME,
'author_icon': Constant.AUTHOR_ICON,
'title': 'Migration Engine Started',
'text': f"Migration Engine Started for company {company_name}",
'footer': Constant.NOTIFICATION_NOTES,
'ts': datetime.now().timestamp()
}]
}}
notify_msg(Constant.NOTIFICATION_TOPIC, Constant.NOTIFICATION_TITLE, json.dumps(notify_data))
except Exception as ex:
log_error(logger=logger, account_id=None, company_name=company_name,
error_type=Constant.ErrorType.LDE, notify=True, error=ex)
raise ex
return {"Status": Constant.StateMachineStates.COMPLETED, "CompanyName": company_name}