def __init__()

in src/engine/step4/model_dev/utils/model.py [0:0]


    def __init__(self, hparams) -> None:
        """
        Creates fine-tuner object.

        Args:
            hparams(argparse.Namespace): Hyperparamters of the model object to be created.
            added_tokens(list). Tokens to be added to an existing vocab.
        """

        super(T5FineTuner, self).__init__()
        self.hparams = hparams
        self.model = T5ForConditionalGeneration.from_pretrained(hparams.model_name)
        self.tokenizer = T5Tokenizer.from_pretrained(hparams.tokenizer_name)

        if self.hparams.freeze_embeds:
            self.freeze_embeds()
        if self.hparams.freeze_encoder:
            self.freeze_params(self.model.get_encoder())
            self.assert_all_frozen(self.model.get_encoder())

        self.new_special_tokens = self.added_tokens()

        additional_special_tokens = (
            self.tokenizer.additional_special_tokens + self.new_special_tokens
        )
        self.tokenizer.add_special_tokens(
            {"additional_special_tokens": additional_special_tokens}
        )

        num_added_toks = self.tokenizer.add_special_tokens(
            {"additional_special_tokens": self.new_special_tokens}
        )
        self.model.resize_token_embeddings(len(self.tokenizer))

        n_observations_per_split = {
            "train": self.hparams.n_train,
            "validation": self.hparams.n_val,
            "test": self.hparams.n_test,
        }
        self.n_obs = {
            k: v if v >= 0 else None for k, v in n_observations_per_split.items()
        }