def main()

in pipeline/iam_generator_deploy.py [0:0]


def main(event, context):

    print "Raw event: " + json.dumps(event)

    local_region = determine_region(context)

    # CodePipeline agent so we can send an exception.
    cp_c = boto3_agent_from_sts("codepipeline", "client", local_region)

    try:
        # Extract our credentials and locate our artifact from our build.
        credentials = event['CodePipeline.job']['data']['artifactCredentials']
        artifact_s3_r = boto3_agent_from_sts(
            "s3",
            "resource",
            local_region,
            credentials
        )

        input_artifact = \
            event['CodePipeline.job']['data']['inputArtifacts'][0]
        artifact_location = input_artifact['location']['s3Location']

        artifact_s3_r.meta.client.download_file(
            artifact_location['bucketName'],
            artifact_location['objectKey'],
            "/tmp/artifact"
        )

        s3_c = boto3_agent_from_sts(
            "s3",
            "client",
            os.environ["deployment_region"]
        )

        artifacts = {}
        # We need to move our CFN artifacts from our build bucket
        # to our deployment bucket which is accessible by all accounts
        # we will deploy to.
        # ZipFile supports opening a filehandle so we can copy using
        # the upload_fileobj() method.
        zf = zipfile.ZipFile('/tmp/artifact')
        for filename in zf.namelist():
            # Skip anything in our artifact that doesn't end in .template
            if not filename.endswith(".template"):
                continue
            # Determine the account name and populate our status dictionary.
            m = re.match("^.*\((\d+)\).*\.template$", filename)
            if m:
                account_id = m.group(1)
                artifacts[account_id] = {
                    "template_url": "https://s3.{}.amazonaws.com/{}/{}/{}".format(
                        os.environ["deployment_region"],
                        os.environ["deployment_bucket"],
                        os.environ["deployment_key_prefix"],
                        filename
                    )
                }
            else:
                raise ValueError(
                    "Cannot derive account number from filename {}".format(
                        filename
                    )
                )
            # Copy our build objects to our deployment bucket.
            s3_c.upload_fileobj(
                zf.open(filename),
                os.environ["deployment_bucket"],
                '{}/{}'.format(
                    os.environ["deployment_key_prefix"],
                    filename
                )
            )

        waiters = []
        for account_id in artifacts:
            (cfn_c, cfn_r) = build_clients(
                account_id,
                "cloudformation",
                os.environ['assume_role'],
                region=os.environ["deployment_region"]
            )

            waiter = deploy_stack(
                cfn_c,
                os.environ["stack_name"],
                artifacts[account_id]['template_url'],
                ["CAPABILITY_NAMED_IAM"]
            )

            waiters.append({
                "stack": os.environ['stack_name'],
                "account_id": account_id,
                "region": os.environ['deployment_region'],
                "waiter": waiter
            })

        # Stacks are deployed, lets call our waiters
        wait_for_stacks(waiters)

        cp_c.put_job_success_result(
            jobId=event['CodePipeline.job']['id'],
            executionDetails={
                'summary': "Successful deployment",
                'percentComplete': 100
            }
        )

    except Exception as e:
        cp_c.put_job_failure_result(
            jobId=event['CodePipeline.job']['id'],
            failureDetails={
                'type': 'JobFailed',
                'message': 'Exception: {}'.format(e)
            }
        )
        raise