def get_concurrency_scripts_from_s3()

in use-cases/ec2-redshift-access/scripts/redshift-concurrency-test.py [0:0]


def get_concurrency_scripts_from_s3(cluster_identifier,number_of_parallel_sessions):

    bucket, key = SQL_SCRIPT_S3_PATH.replace("s3://", "").split("/", 1)
    obj = boto3.client('s3').get_object(Bucket=bucket, Key=key)
    scripts = obj['Body'].read().decode('utf-8')
    # scripts = scripts.format(redshift_iam_role=redshift_iam_role, bucket_name=bucket_name,cluster_identifier=cluster_identifier)
    split_scripts = scripts.split(';')[:-1]
    if len(split_scripts) < number_of_parallel_sessions:
        while len(split_scripts) < number_of_parallel_sessions:
            split_scripts.extend(split_scripts)
    return split_scripts