def build_args()

in cp_examples/mip_finetune/train_mip.py [0:0]


def build_args(arg_defaults=None):
    pl.seed_everything(1234)
    tmp = arg_defaults
    arg_defaults = {
        "accelerator": "ddp",
        "batch_size": 32,
        "max_epochs": 50,
        "gpus": 1,
        "num_workers": 10,
        "callbacks": [],
    }
    if tmp is not None:
        arg_defaults.update(tmp)

    # ------------
    # args
    # ------------
    parser = ArgumentParser()
    parser.add_argument("--im_size", default=224, type=int)
    parser.add_argument("--uncertain_label", default=np.nan, type=float)
    parser.add_argument("--nan_label", default=np.nan, type=float)
    parser = pl.Trainer.add_argparse_args(parser)
    parser = XrayDataModule.add_model_specific_args(parser)
    parser = MIPModule.add_model_specific_args(parser)
    parser.set_defaults(**arg_defaults)
    args = parser.parse_args()

    if args.default_root_dir is None:
        args.default_root_dir = Path.cwd()

    if args.pretrained_file is None:
        warn("Pretrained file not specified, training from scratch.")
    else:
        logging.info(f"Loading pretrained file from {args.pretrained_file}")
    checkpoint_dir = Path(args.default_root_dir) / "checkpoints"
    checkpoint_dir.mkdir(exist_ok=True, parents=True)
    ckpt_list = sorted(checkpoint_dir.glob("*.ckpt"), key=os.path.getmtime)
    if ckpt_list:
        args.resume_from_checkpoint = str(ckpt_list[-1])

    args.callbacks.append(
        pl.callbacks.ModelCheckpoint(dirpath=checkpoint_dir, verbose=True)
    )

    return args