def get_lora_config()

in src/hyperpod_nemo_adapter/collections/model/sagemaker_base_model.py [0:0]


    def get_lora_config(self):
        target_modules = None
        if self._cfg.peft.get("target_modules", None) is not None:
            target_modules = list(self._cfg.peft.target_modules)
            if type(target_modules) is list and len(target_modules) == 1:
                target_modules = target_modules[0]
        lora_config = LoraConfig(
            target_modules=target_modules or "all-linear",
            # Alpha parameter for LoRA scaling
            lora_alpha=self._cfg.peft.alpha,
            # Dropout probability for LoRA layers
            lora_dropout=self._cfg.peft.dropout,
            # LoRA attention dimension
            r=self._cfg.peft.rank,
            bias="none",
            task_type="CAUSAL_LM",
            inference_mode=False,
        )
        return lora_config