def __post_init__()

in src/peft/tuners/lora/config.py [0:0]


    def __post_init__(self):
        super().__post_init__()
        self.peft_type = PeftType.LORA
        self.target_modules = (
            set(self.target_modules) if isinstance(self.target_modules, list) else self.target_modules
        )
        self.exclude_modules = (
            set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
        )

        # if target_modules is a regex expression, then layers_to_transform should be None
        if isinstance(self.target_modules, str) and self.layers_to_transform is not None:
            raise ValueError("`layers_to_transform` cannot be used when `target_modules` is a str.")

        # if target_modules is a regex expression, then layers_pattern should be None
        if isinstance(self.target_modules, str) and self.layers_pattern is not None:
            raise ValueError("`layers_pattern` cannot be used when `target_modules` is a str.")

        # check for layers_to_transform and layers_pattern
        if self.layers_pattern and not self.layers_to_transform:
            raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")

        if self.use_dora and self.megatron_config:
            raise ValueError("DoRA does not support megatron_core, please set `use_dora=False`.")

        # handle init_lora_weights and loftq_config
        if self.init_lora_weights == "loftq":
            import importlib

            if not importlib.util.find_spec("scipy"):
                raise ImportError("The required package 'scipy' is not installed. Please install it to continue.")
            if not self.loftq_config:
                raise ValueError("`loftq_config` must be specified when `init_lora_weights` is 'loftq'.")
            if not isinstance(self.loftq_config, dict):
                # convert loftq_config to dict
                self.loftq_config = vars(self.loftq_config)
        elif self.loftq_config:
            self.loftq_config = {}
            warnings.warn("`loftq_config` specified but will be ignored when `init_lora_weights` is not 'loftq'.")

        elif self.init_lora_weights == "eva" and self.eva_config is None:
            warnings.warn("`init_lora_weights` is 'eva' but `eva_config` is not specified. Using default EVA config.")
            self.eva_config = EvaConfig()
        elif self.init_lora_weights != "eva" and self.eva_config is not None:
            warnings.warn("`eva_config` specified but will be ignored when `init_lora_weights` is not 'eva'.")

        elif self.init_lora_weights == "corda" and self.corda_config is None:
            warnings.warn(
                "`init_lora_weights` is 'corda' but `corda_config` is not specified. Using default CorDA config."
            )
            self.corda_config = CordaConfig()
        elif self.init_lora_weights != "corda" and self.corda_config is not None:
            warnings.warn("`corda_config` specified but will be ignored when `init_lora_weights` is not 'corda'.")

        if self.lora_bias:
            if self.init_lora_weights not in (True, False):
                raise ValueError(
                    f"The argument lora_bias=True is only supported with init_lora_weights=True or False, got "
                    f"init_lora_weights={self.init_lora_weights} instead."
                )
            if self.use_dora:
                raise ValueError("The argument lora_bias=True is not supported for DoRA, please pass use_dora=False")

        # Using post training conversion of modified base weights to restore their initial values PiSSA/CorDA/OLoRA cannot
        # be correctly done when using rslora + rank_pattern/alpha_pattern. We can't really know if the user intends
        # this when they'll eventually call save_pretrained (i.e. if they'll pass
        # path_initial_model_for_weight_conversionl). Therefore, we only warn but don't raise an error here.
        if (
            self.use_rslora
            and (self.rank_pattern or self.alpha_pattern)
            and (
                (isinstance(self.init_lora_weights, str) and (self.init_lora_weights.startswith("pissa")))
                or (self.init_lora_weights == "olora")
                or (self.init_lora_weights == "corda")
            )
        ):
            msg = (
                "Using Rank-Stabilized LoRA with rank_pattern/alpha_pattern and post-training conversion of modified "
                "base weights PiSSA/CorDA/OLoRA means that you won't be able to pass "
                "`path_initial_model_for_weight_conversion` to `save_pretrained` to restore the initial values of the "
                "base weights; if you intend to do this, please ensure not to use rslora or rank_pattern/alpha_pattern."
            )
            warnings.warn(msg)

        self._custom_modules: Optional[dict[type[nn.Module], type[nn.Module]]] = None