code/cfn_ftp_port.py [258:361]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    print("----------------")

    if risk > 10:
        print("good job")
        put_job_success(job_id, 'Job succesful, minimal or no risk detected.')

    return risk, failedRules

def s3_next_step(s3, bucket, risk, failedRules, template, job_id):
    # Store data in temporary physical file
    s3Client = boto3.client('s3', config=botocore.client.Config(signature_version='s3v4'))
    tmp_file = tempfile.NamedTemporaryFile()
    tmp_zip = tempfile.NamedTemporaryFile()
    for item in template:
        tmp_file.write(item)
    tmp_file.flush()
    # Process file based on risk value
    if risk < 5:
        with zipfile.ZipFile(tmp_zip.name, 'w') as zip:
            zip.write(tmp_file.name, "valid.template.json")
            zip.close()
            s3Client.upload_file( # Add encryption support
                tmp_zip.name,
                bucket,
                'valid.template.zip')
        tmp_file.close()
        put_job_success(job_id, 'Job succesful, minimal or no risk detected.')
    elif 5 <= risk < 50:
        with zipfile.ZipFile(tmp_zip.name, 'w') as zip:
            zip.write(tmp_file.name, "flagged.template.json")
            zip.close()
            s3Client.upload_file( # Add encryption support
                tmp_zip.name,
                bucket,
                'flagged.template.zip')
        tmp_file.close()
        put_job_success(job_id, 'Job succesful, medium risk detected, manual approval needed.')
    elif risk >= 50:
        tmp_file.close()
        print("High risk file, fail pipeline")
        put_job_failure(job_id, 'Function exception: Failed filters ' + str(failedRules))
    return 0


def lambda_handler(event, context):
    """The Lambda function handler

    Validate input template for security vulnerables.  Route as appropriate based on risk assesment.

    Args:
        event: The event passed by Lambda
        context: The context passed by Lambda

    """
    try:
        # Print the entire event for tracking
        print("Received event: " + json.dumps(event, indent=2))
        print(event)
        # Extract the Job ID
        job_id = event['CodePipeline.job']['id']

        # Extract the Job Data
        job_data = event['CodePipeline.job']['data']

        # Extract the params
        user_parameters = job_data['actionConfiguration']['configuration']['UserParameters']
        decoded_parameters = json.loads(user_parameters)

        # Get the list of artifacts passed to the function
        input_artifacts = job_data['inputArtifacts']

        params = get_user_params(job_data)

        input_artifact = params['input']
        template_file = params['file']
        output_bucket = params['output']

        # Get the artifact details
        input_artifact_data = find_artifact(input_artifacts, input_artifact)

        # Get S3 client to access artifact with
        s3 = setup_s3_client(job_data)

        # Get the JSON template file out of the artifact
        template = get_template(s3, input_artifact_data, template_file)
        #print("Template: " + str(template))


        # Validate template from risk perspective. FailedRules can be used if you wish to expand the script to report failed items
        risk, failedRules = evaluate_template(template, job_id)

        # Based on risk, store the template in the correct S3 bucket for future process
        s3_next_step(s3, output_bucket, risk, failedRules, template, job_id)

    except Exception as e:
        # If any other exceptions which we didn't expect are raised
        # then fail the job and log the exception message.
        print('Function failed due to exception.')
        print(e)
        traceback.print_exc()
        put_job_failure(job_id, 'Function exception: ' + str(e))

    print('Function complete.')
    return "Complete."
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



code/cfn_secrets.py [262:365]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    print("----------------")

    if risk > 10:
        print("good job")
        put_job_success(job_id, 'Job succesful, minimal or no risk detected.')

    return risk, failedRules

def s3_next_step(s3, bucket, risk, failedRules, template, job_id):
    # Store data in temporary physical file
    s3Client = boto3.client('s3', config=botocore.client.Config(signature_version='s3v4'))
    tmp_file = tempfile.NamedTemporaryFile()
    tmp_zip = tempfile.NamedTemporaryFile()
    for item in template:
        tmp_file.write(item)
    tmp_file.flush()
    # Process file based on risk value
    if risk < 5:
        with zipfile.ZipFile(tmp_zip.name, 'w') as zip:
            zip.write(tmp_file.name, "valid.template.json")
            zip.close()
            s3Client.upload_file( # Add encryption support
                tmp_zip.name,
                bucket,
                'valid.template.zip')
        tmp_file.close()
        put_job_success(job_id, 'Job succesful, minimal or no risk detected.')
    elif 5 <= risk < 50:
        with zipfile.ZipFile(tmp_zip.name, 'w') as zip:
            zip.write(tmp_file.name, "flagged.template.json")
            zip.close()
            s3Client.upload_file( # Add encryption support
                tmp_zip.name,
                bucket,
                'flagged.template.zip')
        tmp_file.close()
        put_job_success(job_id, 'Job succesful, medium risk detected, manual approval needed.')
    elif risk >= 50:
        tmp_file.close()
        print("High risk file, fail pipeline")
        put_job_failure(job_id, 'Function exception: Failed filters ' + str(failedRules))
    return 0


def lambda_handler(event, context):
    """The Lambda function handler

    Validate input template for security vulnerables.  Route as appropriate based on risk assesment.

    Args:
        event: The event passed by Lambda
        context: The context passed by Lambda

    """
    try:
        # Print the entire event for tracking
        print("Received event: " + json.dumps(event, indent=2))
        print(event)
        # Extract the Job ID
        job_id = event['CodePipeline.job']['id']

        # Extract the Job Data
        job_data = event['CodePipeline.job']['data']

        # Extract the params
        user_parameters = job_data['actionConfiguration']['configuration']['UserParameters']
        decoded_parameters = json.loads(user_parameters)

        # Get the list of artifacts passed to the function
        input_artifacts = job_data['inputArtifacts']

        params = get_user_params(job_data)

        input_artifact = params['input']
        template_file = params['file']
        output_bucket = params['output']

        # Get the artifact details
        input_artifact_data = find_artifact(input_artifacts, input_artifact)

        # Get S3 client to access artifact with
        s3 = setup_s3_client(job_data)

        # Get the JSON template file out of the artifact
        template = get_template(s3, input_artifact_data, template_file)
        #print("Template: " + str(template))


        # Validate template from risk perspective. FailedRules can be used if you wish to expand the script to report failed items
        risk, failedRules = evaluate_template(template, job_id)

        # Based on risk, store the template in the correct S3 bucket for future process
        s3_next_step(s3, output_bucket, risk, failedRules, template, job_id)

    except Exception as e:
        # If any other exceptions which we didn't expect are raised
        # then fail the job and log the exception message.
        print('Function failed due to exception.')
        print(e)
        traceback.print_exc()
        put_job_failure(job_id, 'Function exception: ' + str(e))

    print('Function complete.')
    return "Complete."
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



