def configure_aws_batch()

in metaflow/cmd/configure_cmd.py [0:0]


def configure_aws_batch(existing_env):
    empty_profile = False
    if not existing_env:
        empty_profile = True
    env = {}

    # Set AWS Batch Job Queue.
    env["METAFLOW_BATCH_JOB_QUEUE"] = click.prompt(
        cyan("[METAFLOW_BATCH_JOB_QUEUE]") + " AWS Batch Job Queue.",
        default=existing_env.get("METAFLOW_BATCH_JOB_QUEUE"),
        show_default=True,
    )
    # Set IAM role for AWS Batch jobs to assume.
    env["METAFLOW_ECS_S3_ACCESS_IAM_ROLE"] = click.prompt(
        cyan("[METAFLOW_ECS_S3_ACCESS_IAM_ROLE]")
        + " IAM role for AWS Batch jobs to access AWS "
        + "resources (Amazon S3 etc.).",
        default=existing_env.get("METAFLOW_ECS_S3_ACCESS_IAM_ROLE"),
        show_default=True,
    )
    # Set default Docker repository for AWS Batch jobs.
    env["METAFLOW_BATCH_CONTAINER_REGISTRY"] = click.prompt(
        cyan("[METAFLOW_BATCH_CONTAINER_REGISTRY]")
        + yellow(" (optional)")
        + " Default Docker image repository for AWS "
        + "Batch jobs. If nothing is specified, "
        + "dockerhub (hub.docker.com/) is "
        + "used as default.",
        default=existing_env.get("METAFLOW_BATCH_CONTAINER_REGISTRY", ""),
        show_default=True,
    )
    # Set default Docker image for AWS Batch jobs.
    env["METAFLOW_BATCH_CONTAINER_IMAGE"] = click.prompt(
        cyan("[METAFLOW_BATCH_CONTAINER_IMAGE]")
        + yellow(" (optional)")
        + " Default Docker image for AWS Batch jobs. "
        + "If nothing is specified, an appropriate "
        + "python image is used as default.",
        default=existing_env.get("METAFLOW_BATCH_CONTAINER_IMAGE", ""),
        show_default=True,
    )

    # Configure AWS Step Functions for scheduling.
    if click.confirm(
        "\nMetaflow can "
        + yellow("schedule your flows on AWS Step " "Functions")
        + " and trigger them at a specific cadence using "
        "Amazon EventBridge.\nTo support flows involving "
        "foreach steps, you would need access to AWS "
        "DynamoDB.\nWould you like to configure AWS Step "
        "Functions for scheduling?",
        default=empty_profile or "METAFLOW_SFN_IAM_ROLE" in existing_env,
        abort=False,
    ):
        # Configure IAM role for AWS Step Functions.
        env["METAFLOW_SFN_IAM_ROLE"] = click.prompt(
            cyan("[METAFLOW_SFN_IAM_ROLE]")
            + " IAM role for AWS Step Functions to "
            + "access AWS resources (AWS Batch, "
            + "AWS DynamoDB).",
            default=existing_env.get("METAFLOW_SFN_IAM_ROLE"),
            show_default=True,
        )
        # Configure IAM role for AWS Events Bridge.
        env["METAFLOW_EVENTS_SFN_ACCESS_IAM_ROLE"] = click.prompt(
            cyan("[METAFLOW_EVENTS_SFN_ACCESS_IAM_ROLE]")
            + " IAM role for Amazon EventBridge to "
            + "access AWS Step Functions.",
            default=existing_env.get("METAFLOW_EVENTS_SFN_ACCESS_IAM_ROLE"),
            show_default=True,
        )
        # Configure AWS DynamoDB Table for AWS Step Functions.
        env["METAFLOW_SFN_DYNAMO_DB_TABLE"] = click.prompt(
            cyan("[METAFLOW_SFN_DYNAMO_DB_TABLE]")
            + " AWS DynamoDB table name for tracking "
            + "AWS Step Functions execution metadata.",
            default=existing_env.get("METAFLOW_SFN_DYNAMO_DB_TABLE"),
            show_default=True,
        )
    return env