def _populate_cl_params()

in egg/core/util.py [0:0]


def _populate_cl_params(arg_parser: argparse.ArgumentParser) -> argparse.ArgumentParser:
    arg_parser.add_argument(
        "--random_seed", type=int, default=None, help="Set random seed"
    )
    # trainer params
    arg_parser.add_argument(
        "--checkpoint_dir",
        type=str,
        default=None,
        help="Where the checkpoints are stored",
    )
    arg_parser.add_argument(
        "--preemptable",
        default=False,
        action="store_true",
        help="If the flag is set, Trainer would always try to initialise itself from a checkpoint",
    )

    arg_parser.add_argument(
        "--checkpoint_freq",
        type=int,
        default=0,
        help="How often the checkpoints are saved",
    )
    arg_parser.add_argument(
        "--validation_freq",
        type=int,
        default=1,
        help="The validation would be run every `validation_freq` epochs",
    )
    arg_parser.add_argument(
        "--n_epochs",
        type=int,
        default=10,
        help="Number of epochs to train (default: 10)",
    )
    arg_parser.add_argument(
        "--load_from_checkpoint",
        type=str,
        default=None,
        help="If the parameter is set, model, trainer, and optimizer states are loaded from the "
        "checkpoint (default: None)",
    )
    # cuda setup
    arg_parser.add_argument(
        "--no_cuda", default=False, help="disable cuda", action="store_true"
    )
    # dataset
    arg_parser.add_argument(
        "--batch_size",
        type=int,
        default=32,
        help="Input batch size for training (default: 32)",
    )

    # optimizer
    arg_parser.add_argument(
        "--optimizer",
        type=str,
        default="adam",
        help="Optimizer to use [adam, sgd, adagrad] (default: adam)",
    )
    arg_parser.add_argument(
        "--lr", type=float, default=1e-2, help="Learning rate (default: 1e-2)"
    )
    arg_parser.add_argument(
        "--update_freq",
        type=int,
        default=1,
        help="Learnable weights are updated every update_freq batches (default: 1)",
    )

    # Channel parameters
    arg_parser.add_argument(
        "--vocab_size",
        type=int,
        default=10,
        help="Number of symbols (terms) in the vocabulary (default: 10)",
    )
    arg_parser.add_argument(
        "--max_len", type=int, default=1, help="Max length of the sequence (default: 1)"
    )

    # Setting up tensorboard
    arg_parser.add_argument(
        "--tensorboard", default=False, help="enable tensorboard", action="store_true"
    )
    arg_parser.add_argument(
        "--tensorboard_dir", type=str, default="runs/", help="Path for tensorboard log"
    )

    arg_parser.add_argument(
        "--distributed_port",
        default=18363,
        type=int,
        help="Port to use in distributed learning",
    )

    arg_parser.add_argument(
        "--fp16",
        default=False,
        help="Use mixed-precision for training/evaluating models",
        action="store_true",
    )

    return arg_parser