def extract_hyperparameters_from_trainer()

in optimum/graphcore/modelcard.py [0:0]


def extract_hyperparameters_from_trainer(trainer):
    hyperparameters = {k: getattr(trainer.args, k) for k in _TRAINING_ARGS_KEYS}

    hyperparameters["distributed_type"] = "IPU"

    if trainer.args.gradient_accumulation_steps > 1:
        hyperparameters["gradient_accumulation_steps"] = trainer.args.gradient_accumulation_steps

    total_train_batch_size = trainer.args.train_batch_size * trainer.ipu_config.batch_size_factor()
    if total_train_batch_size != hyperparameters["train_batch_size"]:
        hyperparameters["total_train_batch_size"] = total_train_batch_size
    total_eval_batch_size = trainer.args.eval_batch_size * trainer.ipu_config.batch_size_factor(for_inference=True)
    if total_eval_batch_size != hyperparameters["eval_batch_size"]:
        hyperparameters["total_eval_batch_size"] = total_eval_batch_size

    if trainer.args.lamb:
        hyperparameters["optimizer"] = "LAMB"
    else:
        hyperparameters[
            "optimizer"
        ] = f"Adam with betas=({trainer.args.adam_beta1},{trainer.args.adam_beta2}) and epsilon={trainer.args.adam_epsilon}"

    hyperparameters["lr_scheduler_type"] = trainer.args.lr_scheduler_type.value
    if trainer.args.warmup_ratio != 0.0:
        hyperparameters["lr_scheduler_warmup_ratio"] = trainer.args.warmup_ratio
    if trainer.args.warmup_steps != 0.0:
        hyperparameters["lr_scheduler_warmup_steps"] = trainer.args.warmup_steps
    if trainer.args.max_steps != -1:
        hyperparameters["training_steps"] = trainer.args.max_steps
    else:
        hyperparameters["num_epochs"] = trainer.args.num_train_epochs

    if trainer.args.fp32:
        hyperparameters["training precision"] = "Full Precision"
    else:
        hyperparameters["training precision"] = "Mixed Precision"
    # if trainer.args.fp16:
    #     if trainer.use_amp:
    #         hyperparameters["mixed_precision_training"] = "Native AMP"
    #     elif trainer.use_apex:
    #         hyperparameters["mixed_precision_training"] = f"Apex, opt level {trainer.args.fp16_opt_level}"

    if trainer.args.label_smoothing_factor != 0.0:
        hyperparameters["label_smoothing_factor"] = trainer.args.label_smoothing_factor

    return hyperparameters