in configurations/RedshiftConfigTestingLambda.py [0:0]
def run_replay(client, what_if_timestamp, cluster_identifier, extract_s3_path, simple_replay_log_location,
simple_replay_overwrite_s3_path, bucket_name, redshift_user_name,
redshift_iam_role, db, extract_prefix, replay_prefix,script_prefix, snapshot_account_id,
replay_bootstrap_script, job_definition, job_queue):
if simple_replay_log_location is None or simple_replay_log_location == "N/A":
return "N/A"
else:
if simple_replay_overwrite_s3_path is None:
simple_replay_overwrite_s3_path="N/A"
desc = client.describe_clusters(ClusterIdentifier=cluster_identifier)['Clusters'][0]
cluster_endpoint = desc.get('Endpoint').get('Address') + ":" + str(desc.get('Endpoint').get('Port')) + "/" + db
workload_location = get_workload_location(extract_s3_path)
response = boto3.client('batch').submit_job(jobName='AmazonRedshiftReplay',
jobQueue=job_queue,
jobDefinition=job_definition,
containerOverrides={
"command": ["sh", "-c",
"yum install -y awscli; aws s3 cp $BOOTSTRAP_SCRIPT ./bootstrap.sh; sh ./bootstrap.sh"],
"environment": [
{"name": "BOOTSTRAP_SCRIPT",
"value": replay_bootstrap_script},
{"name": "WHAT_IF_TIMESTAMP", "value": what_if_timestamp},
{"name": "CLUSTER_IDENTIFIER", "value": cluster_identifier},
{"name": "CLUSTER_ENDPOINT", "value": cluster_endpoint},
{"name": "WORKLOAD_LOCATION", "value": workload_location},
{"name": "SIMPLE_REPLAY_OVERWRITE_S3_PATH",
"value": simple_replay_overwrite_s3_path},
{"name": "SIMPLE_REPLAY_LOG_LOCATION",
"value": simple_replay_log_location},
{"name": "BUCKET_NAME", "value": bucket_name},
{"name": "REDSHIFT_USER_NAME", "value": redshift_user_name},
{"name": "REDSHIFT_IAM_ROLE", "value": redshift_iam_role},
{"name": "EXTRACT_PREFIX", "value": extract_prefix},
{"name": "REPLAY_PREFIX", "value": replay_prefix},
{"name": "SCRIPT_PREFIX", "value": script_prefix},
{"name": "SNAPSHOT_ACCOUNT_ID", "value": snapshot_account_id}
]
})
return response['jobId']