def handler()

in src/s3_trigger/index.py [0:0]


def handler(event, _):
    
    for record in event['Records']:
        #read bucket and key of input file and get job_id
        bucket = record['s3']['bucket']['name']
        key = record['s3']['object']['key']
        key_prefix, job_id = os.path.split(key)

        #get parameter store values for default job configuration///////////////////////////////using env varibales atm
        command_type = os.getenv('COMMANDTYPE')   #\\\\\\\\\\\\\\ PowerShell or ShellScript
        retry = os.getenv('RETRY')     # \\\\\\\\\\\\\\\\\\\\\\\\\\ default 1
        job_success_string = os.getenv('JOBSUCCESSSTRING')     # \\\\\\\\\\\\\\\\\\\\\\\\\\ expected name or file type of result file or ''. For linux default 'upload:', for windows default 'files(s) copied' s3 write output'
        ec2_work_directory = os.getenv('EC2WORKINGDIRECTORY')     # \\\\\\\\\\\\\\\\\\\\\\\\\\ for windows needs to use '\\', default that works with user rights on windows - 'C:\\ProgramData\\Amazon\\SSM'

        #prepare the commands to be run on the instance by ssm run command, sync s3 to working directory,run executable, then write results folder back to s3 location
        if 'PowerShell' in command_type:
            ssm_document = 'AWS-RunPowerShellScript'
            command0 = 'cd {}'.format(ec2_work_directory)
            command1 = 'Copy-S3object -Bucket {} -KeyPrefix {} -LocalFolder .\\'.format(bucket, key_prefix)
            command2 = ec2_work_directory + '\\' + job_id
            command3 = 'Remove-Item -Path {} -Force'.format(command2)
            command4 = 'Write-S3object -Bucket {} -KeyPrefix {} -Folder .\\'.format(bucket, key_prefix)
            command5 = 'Remove-Item -Path {}{} -Force'.format(ec2_work_directory, '\\*.*')
            commands =[command0, command1, command2, command3, command4, command5]
        elif 'ShellScript' in command_type:
            ssm_document = 'AWS-RunShellScript'
            command0 = 'mkdir {}; cd {}'.format(ec2_work_directory, ec2_work_directory)
            command1 = 'sudo aws s3 sync s3://{}/{}/ ./'.format(bucket, key_prefix)
            command2 = 'sudo chmod +x ' + ec2_work_directory + '/' + job_id
            command3 = 'sudo ' + ec2_work_directory + '/' + job_id
            command4 = 'sudo aws s3 sync ./ s3://{}/{}/'.format(bucket, key_prefix)
            command5 = 'sudo rm {}{}'.format(ec2_work_directory, '/*.*')
            commands =[command0, command1, command2, command3, command4, command5]
        else:
            return 'unknown document type'

            chmod +x /home/user/bash/backupscript.sh

        #create message for job that goes to SQS
        message = json.dumps({ 'job_id': key, 'retry': retry, 'job_success_string': job_success_string, 'ssm_document': ssm_document, 'commands': commands})
        print(message)

        # Get the queue
        sqs_name = os.getenv('SQSINPUTNAME')
        queue = sqs.get_queue_by_name(QueueName=sqs_name)

        # Send a new job message
        response = queue.send_message(MessageBody=message)
        
    return response