def run_sql_script_from_s3()

in configurations/RedshiftConfigTestingLambda.py [0:0]


def run_sql_script_from_s3(script_s3_path, action, cluster_identifier, redshift_iam_role, bucket_name, db,
                           user, run_type='async', result_cache='true', with_event=False, what_if_timestamp=None,
                           comparison_stats_s3_path=None, comparison_results_s3_path=None,
                           raw_comparison_results_s3_path=None, query_label_prefix=None):
    if script_s3_path is None or script_s3_path == "N/A":
        return "N/A"
    else:
        bucket, key = script_s3_path.replace("s3://", "").split("/", 1)
        obj = boto3.client('s3').get_object(Bucket=bucket, Key=key)
        script = obj['Body'].read().decode('utf-8')
        script = script.format(redshift_iam_role=redshift_iam_role,
                               bucket_name=bucket_name,
                               cluster_identifier=cluster_identifier,
                               what_if_timestamp=what_if_timestamp,
                               comparison_stats_s3_path=comparison_stats_s3_path,
                               comparison_results_s3_path=comparison_results_s3_path,
                               raw_comparison_results_s3_path=raw_comparison_results_s3_path,
                               query_label_prefix=query_label_prefix)
        query_group_statement = "set query_group to '" + action + "';\n"
        result_cache_statement = "set enable_result_cache_for_session to " + result_cache + "; \n"
        script = query_group_statement + result_cache_statement + script
        sql_id = run_sql(cluster_identifier, db, user, script, with_event, run_type)
        return sql_id