def _get_parser()

in awsbatch-cli/src/awsbatch/awsbsub.py [0:0]


def _get_parser():
    """
    Parse input parameters and return the ArgumentParser object.

    If the command is executed without the --cluster parameter, the command will use the default cluster_name
    specified in the [main] section of the user's awsbatch-cli.cfg configuration file and will search
    for the [cluster cluster-name] section, if the section doesn't exist, it will ask to CloudFormation
    the required information.

    If the --cluster parameter is set, the command will search for the [cluster cluster-name] section
    in the user's awsbatch-cli.cfg configuration file or, if the file doesn't exist, it will ask to CloudFormation
    the required information.

    :return: the ArgumentParser object
    """
    parser = argparse.ArgumentParser(description="Submits jobs to the cluster's Job Queue.")
    parser.add_argument(
        "-jn",
        "--job-name",
        help="The name of the job. The first character must be alphanumeric, and up to 128 letters "
        "(uppercase and lowercase), numbers, hyphens, and underscores are allowed",
    )
    parser.add_argument("-c", "--cluster", help="Cluster to use")
    parser.add_argument(
        "-cf",
        "--command-file",
        help="Identifies that the command is a file to be transferred to the compute instances",
        action="store_true",
    )
    parser.add_argument(
        "-w",
        "--working-dir",
        help="The folder to use as job working directory. "
        "If not specified the job will be executed in the job-<AWS_BATCH_JOB_ID> subfolder of the user's home",
    )
    parser.add_argument(
        "-pw",
        "--parent-working-dir",
        help="Parent folder for the job working directory. If not specified it is the user's home. "
        "A subfolder named job-<AWS_BATCH_JOB_ID> will be created in it. Alternative to the --working-dir parameter",
    )
    parser.add_argument(
        "-if",
        "--input-file",
        help="File to be transferred to the compute instances, in the job working directory. "
        "It can be expressed multiple times",
        action="append",
    )
    parser.add_argument(
        "-p",
        "--vcpus",
        help="The number of vCPUs to reserve for the container. When used in conjunction with --nodes it identifies "
        "the number of vCPUs per node. Default is 1",
        type=int,
        default=1,
    )
    parser.add_argument(
        "-m",
        "--memory",
        help="The hard limit (in MiB) of memory to present to the job. If your job attempts to exceed the memory "
        "specified here, the job is killed. Default is 128",
        type=int,
        default=128,
    )
    parser.add_argument(
        "-e",
        "--env",
        help="Comma separated list of environment variable names to export to the Job environment. "
        "Use 'all' to export all the environment variables, except the ones listed to the --env-blacklist parameter "
        "and variables starting with PCLUSTER_* and AWS_* prefix.",
    )
    parser.add_argument(
        "-eb",
        "--env-blacklist",
        help="Comma separated list of environment variable names to NOT export to the Job environment. "
        "Default: HOME, PWD, USER, PATH, LD_LIBRARY_PATH, TERM, TERMCAP.",
    )
    parser.add_argument(
        "-r",
        "--retry-attempts",
        help="The number of times to move a job to the RUNNABLE status. You may specify between 1 and 10 attempts. "
        "If the value of attempts is greater than one, the job is retried if it fails until it has moved to RUNNABLE "
        "that many times. Default value is 1",
        type=int,
        default=1,
    )
    parser.add_argument(
        "-t",
        "--timeout",
        help="The time duration in seconds (measured from the job attempt's startedAt timestamp) after which AWS "
        "Batch terminates your jobs if they have not finished. It must be at least 60 seconds",
        type=int,
    )
    # MNP parameter
    parser.add_argument(
        "-n",
        "--nodes",
        help="The number of nodes to reserve for the job. It enables Multi-Node Parallel submission",
        type=int,
    )
    # array parameters
    parser.add_argument(
        "-a",
        "--array-size",
        help="The size of the array. It can be between 2 and 10,000. If you specify array properties for a job, "
        "it becomes an array job",
        type=int,
    )
    parser.add_argument(
        "-d",
        "--depends-on",
        help="A semicolon separated list of dependencies for the job. A job can depend upon a maximum of 20 jobs. "
        "You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child "
        "array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency "
        "with a job ID for array jobs so that each index child of this job must wait for the corresponding index "
        "child of each dependency to complete before it can begin. Syntax: jobId=<string>,type=<string>;...",
    )
    parser.add_argument("-aws", "--awscli", help=argparse.SUPPRESS, action="store_true")
    parser.add_argument("-ll", "--log-level", help=argparse.SUPPRESS, default="ERROR")
    parser.add_argument(
        "command",
        help="The command to submit (it must be available on the compute instances) "
        "or the file name to be transferred (see --command-file option).",
        default=sys.stdin,
        nargs="?",
    )
    parser.add_argument("arguments", help="Arguments for the command or the command-file (optional).", nargs="*")
    return parser