src/peft/peft_model.py [1555:1605]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            for name, _ in self.base_model.named_children():
                if any(module_name in name for module_name in self.modules_to_save):
                    self.cls_layer_name = name
                    break

        # to make sure classifier layer is trainable; this may add a new ModulesToSaveWrapper
        _set_trainable(self, adapter_name, module_names=getattr(peft_config, "modules_to_save", None))

    def add_adapter(self, adapter_name: str, peft_config: PeftConfig, low_cpu_mem_usage: bool = False) -> None:
        """
        Add an adapter to the model based on the passed configuration.

        This adapter is not trained. To load a trained adapter, check out [`PeftModel.load_adapter`].

        The name for the new adapter should be unique.

        The new adapter is not automatically set as the active adapter. Use [`PeftModel.set_adapter`] to set the active
        adapter.

        Args:
            adapter_name (`str`):
                The name of the adapter to be added.
            peft_config ([`PeftConfig`]):
                The configuration of the adapter to be added.
            low_cpu_mem_usage (`bool`, `optional`, defaults to `False`):
                Create empty adapter weights on meta device. Useful to speed up the process when loading saved
                adapters. Don't use this option when creating a new PEFT adapter for training.

        """
        # ensure that additional adapters also add the classifier layer to modules_to_save
        if hasattr(peft_config, "modules_to_save"):
            classifier_module_names = ["classifier", "score"]
            if peft_config.modules_to_save is None:
                peft_config.modules_to_save = classifier_module_names[:]
            else:
                peft_config.modules_to_save.extend(classifier_module_names)

        return super().add_adapter(adapter_name, peft_config, low_cpu_mem_usage=low_cpu_mem_usage)

    def forward(
        self,
        input_ids=None,
        attention_mask=None,
        inputs_embeds=None,
        labels=None,
        output_attentions=None,
        output_hidden_states=None,
        return_dict=None,
        task_ids=None,
        **kwargs,
    ):
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



src/peft/peft_model.py [2397:2447]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        for name, _ in self.base_model.named_children():
            if any(module_name in name for module_name in self.modules_to_save):
                self.cls_layer_name = name
                break

        # to make sure classifier layer is trainable; this may add a new ModulesToSaveWrapper
        _set_trainable(self, adapter_name, module_names=getattr(peft_config, "modules_to_save", None))

    def add_adapter(self, adapter_name: str, peft_config: PeftConfig, low_cpu_mem_usage: bool = False) -> None:
        """
        Add an adapter to the model based on the passed configuration.

        This adapter is not trained. To load a trained adapter, check out [`PeftModel.load_adapter`].

        The name for the new adapter should be unique.

        The new adapter is not automatically set as the active adapter. Use [`PeftModel.set_adapter`] to set the active
        adapter.

        Args:
            adapter_name (`str`):
                The name of the adapter to be added.
            peft_config ([`PeftConfig`]):
                The configuration of the adapter to be added.
            low_cpu_mem_usage (`bool`, `optional`, defaults to `False`):
                Create empty adapter weights on meta device. Useful to speed up the process when loading saved
                adapters. Don't use this option when creating a new PEFT adapter for training.

        """
        # ensure that additional adapters also add the classifier layer to modules_to_save
        if hasattr(peft_config, "modules_to_save"):
            classifier_module_names = ["classifier", "score"]
            if peft_config.modules_to_save is None:
                peft_config.modules_to_save = classifier_module_names[:]
            else:
                peft_config.modules_to_save.extend(classifier_module_names)

        return super().add_adapter(adapter_name, peft_config, low_cpu_mem_usage=low_cpu_mem_usage)

    def forward(
        self,
        input_ids=None,
        attention_mask=None,
        inputs_embeds=None,
        labels=None,
        output_attentions=None,
        output_hidden_states=None,
        return_dict=None,
        task_ids=None,
        **kwargs,
    ):
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



