in configurations/RedshiftConfigTestingLambda.py [0:0]
def run_redshift_performance_test(client, cluster_identifier, bucket_name, performance_test_bootstrap_script,
performance_test_python_script,
sql_script_s3_path, number_of_parallel_sessions_list, job_definition, job_queue,
redshift_iam_role, redshift_user_name, db,
disable_result_cache, default_output_limit, max_number_of_queries,
max_parallel_sessions, query_label_prefix):
if sql_script_s3_path is None or sql_script_s3_path == "N/A":
return "N/A"
else:
desc = client.describe_clusters(ClusterIdentifier=cluster_identifier)['Clusters'][0]
cluster_endpoint = desc.get('Endpoint').get('Address') + ":" + str(desc.get('Endpoint').get('Port')) + "/" + db
response = boto3.client('batch').submit_job(jobName='AmazonRedshiftPerformanceTesting',
jobQueue=job_queue,
jobDefinition=job_definition,
containerOverrides={
"command": ["sh", "-c",
"yum install -y awscli; aws s3 cp $BOOTSTRAP_SCRIPT ./bootstrap.sh; sh ./bootstrap.sh"],
"environment": [
{"name": "BOOTSTRAP_SCRIPT",
"value": performance_test_bootstrap_script},
{"name": "BUCKET_NAME", "value": bucket_name},
{"name": "PYTHON_SCRIPT",
"value": performance_test_python_script},
{"name": "REDSHIFT_CLUSTER_ENDPOINT",
"value": cluster_endpoint},
{"name": "REDSHIFT_IAM_ROLE", "value": redshift_iam_role},
{"name": "REDSHIFT_USER_NAME", "value": redshift_user_name},
{"name": "SQL_SCRIPT_S3_PATH", "value": sql_script_s3_path},
{"name": "NUMBER_OF_PARALLEL_SESSIONS_LIST",
"value": number_of_parallel_sessions_list},
{"name": "DISABLE_RESULT_CACHE",
"value": disable_result_cache},
{"name": "DEFAULT_OUTPUT_LIMIT",
"value": default_output_limit},
{"name": "MAX_NUMBER_OF_QUERIES",
"value": max_number_of_queries},
{"name": "MAX_PARALLEL_SESSIONS",
"value": max_parallel_sessions},
{"name": "QUERY_LABEL_PREFIX", "value": query_label_prefix}
]
})
return response['jobId']