def lambda_handler()

in src/app.py [0:0]


def lambda_handler(event, context):

    log.debug('Running in Lambda Function')
    log.debug(json.dumps(event))
    S3_BUCKET = event['Records'][0]['s3']['bucket']['name']
    S3_KEY = event['Records'][0]['s3']['object']['key']
    S3_REGION = event['Records'][0]['awsRegion']
    OBJ_SIZE = event['Records'][0]['s3']['object']['size']
    dcm = dcmfile(source_s3_bucket=S3_BUCKET, source_s3_bucket_region=S3_REGION,
                  source_s3_key=S3_KEY, source_s3_size=OBJ_SIZE)
    ds = s3file(s3bucket=dcm.source_s3_bucket, s3key=dcm.source_s3_key,
                s3region=dcm.source_s3_bucket_region, size=dcm.source_s3_size)
    ds.eval_ext()
    if (S3_BUCKET is None or S3_KEY is None or S3_REGION is None or OBJ_SIZE is None):
        log.error(f'Empty S3 input values; S3_BUCKET={S3_BUCKET}, \
            S3_KEY={S3_KEY}, S3_REGION={S3_REGION}')
        raise ValueError
    else:
        log.info(
            f'S3 input values; S3_BUCKET={S3_BUCKET}, S3_KEY={S3_KEY}, S3_REGION={S3_REGION} FileSize={OBJ_SIZE}')
    # DCM files can be processed on Lambda by downloading the first 10 MB to process only
    if OBJ_SIZE > (MAX_LAMBDA_SIZE * 1024 * 1024) and ds.file_ext != '.dcm':
        job_name = re.sub(r'\W+', '', S3_KEY[:128])
        log.info(
            f'Filesize greater than {MAX_LAMBDA_SIZE}MB, submit to AWS BATCH Queue: {AWS_BATCH_QUEUE} JobName: {job_name}')
        try:
            batch = boto3.client('batch')
            result = batch.submit_job(
                jobName=job_name,
                jobQueue=AWS_BATCH_QUEUE,
                jobDefinition=AWS_BATCH_DEFINITION,
                containerOverrides={
                    'environment': [
                        {
                            'name': 'S3_BUCKET',
                            'value': S3_BUCKET
                        },
                        {
                            'name': 'S3_KEY',
                            'value': S3_KEY
                        },
                        {
                            'name': 'OBJ_SIZE',
                            'value': str(OBJ_SIZE)
                        },
                        {
                            'name': 'S3_REGION',
                            'value': S3_REGION
                        },
                        {
                            'name': 'GLUE_TABLE_NAME',
                            'value': GLUE_TABLE_NAME
                        },
                        {
                            'name': 'GLUE_DATABASE_NAME',
                            'value': GLUE_DATABASE_NAME
                        },
                        {
                            'name': 'S3_OUTPUT_BUCKET',
                            'value': S3_OUTPUT_BUCKET
                        },
                        {
                            'name': 'S3_OUTPUT_BUCKET_REGION',
                            'value': S3_OUTPUT_BUCKET_REGION
                        },

                        {
                            'name': 'LOGLEVEL',
                            'value': logging.getLevelName(log.level)
                        },
                        {
                            'name': 'PARTITION_COL',
                            'value': PARTITION_COL
                        },

                    ]
                }

            )
            log.info(
                f'Forwarded request to AWS Batch {dcm}, JOB_ARN: {result["jobArn"]}')
            return {
                'code': 200,
                'message': f'Forwarded request to AWS Batch {dcm}, JOB_ARN: {result["jobArn"]}'
            }
        except Exception as e:
            log.error(e)
            raise
    output_location = inspect(dcm, ds)
    return {
        'code': 200,
        'message': f'Completed job INPUT {dcm}, OUTPUT {output_location["paths"]}'
    }