in src/peft/peft_model.py [0:0]
def add_adapter(self, adapter_name: str, peft_config: PeftConfig, low_cpu_mem_usage: bool = False) -> None:
"""
Add an adapter to the model based on the passed configuration.
This adapter is not trained. To load a trained adapter, check out [`PeftModel.load_adapter`].
The name for the new adapter should be unique.
The new adapter is not automatically set as the active adapter. Use [`PeftModel.set_adapter`] to set the active
adapter.
Args:
adapter_name (`str`):
The name of the adapter to be added.
peft_config ([`PeftConfig`]):
The configuration of the adapter to be added.
low_cpu_mem_usage (`bool`, `optional`, defaults to `False`):
Create empty adapter weights on meta device. Useful to speed up the process when loading saved
adapters. Don't use this option when creating a new PEFT adapter for training.
"""
prefix = PEFT_TYPE_TO_PREFIX_MAPPING.get(peft_config.peft_type)
if prefix and adapter_name in prefix:
warnings.warn(
f"Adapter name {adapter_name} should not be contained in the prefix {prefix}."
"This may lead to reinitialization of the adapter weights during loading."
)
if peft_config.peft_type != self.peft_type:
raise ValueError(
f"Cannot combine adapters with different peft types. "
f"Found {self.peft_type} and {peft_config.peft_type}."
)
try:
if peft_config.is_prompt_learning:
self.peft_config[adapter_name] = peft_config
if hasattr(self.config, "to_dict"):
dict_config = self.config.to_dict()
else:
dict_config = self.config
peft_config = _prepare_prompt_learning_config(peft_config, dict_config)
self._setup_prompt_encoder(adapter_name)
set_additional_trainable_modules(
model=self.base_model,
peft_config=peft_config,
model_config=BaseTuner.get_model_config(self),
adapter_name=adapter_name,
)
elif peft_config.is_adaption_prompt:
self.base_model.add_adapter(adapter_name, peft_config)
set_additional_trainable_modules(
model=self.base_model,
peft_config=peft_config,
model_config=BaseTuner.get_model_config(self),
adapter_name=adapter_name,
)
else:
self.peft_config[adapter_name] = peft_config
self.base_model.inject_adapter(
self.base_model.model, adapter_name, low_cpu_mem_usage=low_cpu_mem_usage
)
except Exception: # something went wrong, roll back
if adapter_name in self.peft_config:
del self.peft_config[adapter_name]
raise