def handler()

in src/trigger_step_function/index.py [0:0]


def handler(event, context):
    print(event)
    instance_id = event['detail']['instance-id']
    print(instance_id)

    # When given an instance ID as str e.g. 'i-1234567', return the value for 'scheduler_queue'
    ec2instance = ec2_r.Instance(instance_id)
    queue_name = 'nothing'
    autoscaling_group = 'nothing'
    for tags in ec2instance.tags:
        print(tags)
        if os.getenv('TAGKEY') in tags["Key"]:
            queue_name = tags["Value"]
        if 'aws:autoscaling:groupName' in tags["Key"]:
            autoscaling_group = tags["Value"]
    
    if 'nothing' in queue_name:
        print('did not start sf')
        return "Not tagged for scheduler"
    
    print(queue_name)
    
    #Get config from json file in S3    
    timeout_Job = os.getenv('TIMEOUTJOB')
    region = os.getenv('REGION')
    state_machine_name = os.getenv('STATEMACHINENAME')
    state_machine_arn = os.getenv('STATEMACHINEARN')
    sqs_name = queue_name
    sqs_name_out = queue_name + '-finished'
    sqs_name_failed = queue_name + '-failed'
    table = os.getenv('TABLENAME')

    #json input parameter payload for step functions workflow
    input = {"input" : {"sqs_name": sqs_name,
    "sqs_name_out": sqs_name_out,
    "sqs_name_failed": sqs_name_failed,
    "region": region,
    "state_machine_arn": state_machine_arn,
    "state_machine_name": state_machine_name,
    "Timeout_Job": timeout_Job,
    "instance_id": instance_id,
    "autoscaling_group": autoscaling_group,
    "table": table
    }}
    
    #start step functions wrapped workflow for instance   
    response = sf_client.start_execution(
    stateMachineArn=state_machine_arn,
    input = json.dumps(input))
    print(response)