def parse_arguments()

in aws/petctl.py [0:0]


def parse_arguments(args, **default_args):
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "--specs_file",
        help="see https://github.com/pytorch/elastic/blob/master/aws/README.md#create-specs-file",  # noqa B950
    )
    parser.set_defaults(**default_args)

    subparser = parser.add_subparsers(
        title="actions", description="run_job | kill_job", dest="command"
    )

    # -----------------------------------------
    # Run Job
    # -----------------------------------------
    parser_run_job = subparser.add_parser(
        "run_job", help="runs a torchelastic job on asg"
    )
    parser_run_job.add_argument("--name", required=True, help="name of the job")
    parser_run_job.add_argument(
        "--min_size",
        type=int,
        required=False,
        help="minimum number of worker hosts to continue training",
    )
    parser_run_job.add_argument(
        "--max_size",
        type=int,
        required=False,
        help="maximum number of worker hosts to allow scaling out",
    )
    parser_run_job.add_argument(
        "--size",
        type=int,
        required=True,
        help="number  of worker hosts to start the job with",
    )
    parser_run_job.add_argument(
        "--instance_type", required=False, help="Instance type to run the job on"
    )
    parser_run_job.add_argument(
        dest="script_path",
        help="script or script dir path (e.g. ~/script.py, s3://..., docker://)",
    )
    parser_run_job.set_defaults(func=run_job)

    # -----------------------------------------
    # Kill Job
    # -----------------------------------------
    parser_kill_job = subparser.add_parser(
        "kill_job", help="kills a torchelastic job on asg"
    )

    parser_kill_job.add_argument(dest="job_name", help="name of the job to kill")

    parser_kill_job.set_defaults(func=kill_job)

    # -----------------------------------------
    # List hosts in job
    # -----------------------------------------
    parser_list_hosts = subparser.add_parser(
        "list_hosts", help="lists InService hosts in the job"
    )
    parser_list_hosts.add_argument(
        dest="job_name", help="name of the job to list the hosts for"
    )
    parser_list_hosts.set_defaults(func=list_hosts)

    # -----------------------------------------
    # Upload script
    # -----------------------------------------
    parser_upload = subparser.add_parser("upload", help="uploads the file/dir to s3")
    parser_upload.add_argument(
        dest="script_path",
        help="script or script dir path (e.g. ~/script.py, s3://..., docker://)",
    )
    parser_upload.add_argument(
        dest="s3_dest",
        help="s3 destination (default: s3://{s3_bucket}/{s3_prefix}/{USER}/scripts)",
    )
    parser_upload.set_defaults(func=upload_script)

    # -----------------------------------------
    # Configure
    # -----------------------------------------
    subparser.add_parser("configure", help="configures petctl")

    # -----------------------------------------
    # Setup
    # -----------------------------------------
    parser_setup = subparser.add_parser(
        "setup", help="creates necessary aws resources and outputs a specs file"
    )
    parser_setup.add_argument(
        "--region", default="us-west-2", help="aws region to setup on"
    )
    parser_setup.add_argument(
        "--s3_bucket",
        help="s3 bucket to use for running petctl (if empty, one is created)",
    )
    parser_setup.add_argument(
        "--efs_id", help="efs id to use, if empty, one is created"
    )

    petctl_args, script_args = split_args(args[1:])
    parsed = parser.parse_args(petctl_args)
    parsed.script_args = script_args
    return parsed