def __post_init__()

in src/peft/tuners/adalora/config.py [0:0]


    def __post_init__(self):
        super().__post_init__()
        self.peft_type = PeftType.ADALORA

        if self.use_dora:
            raise ValueError(f"{self.peft_type} does not support DoRA.")

        if self.loftq_config:
            raise ValueError(f"{self.peft_type} does not support LOFTQ.")

        self.target_modules = (
            set(self.target_modules) if isinstance(self.target_modules, list) else self.target_modules
        )
        self.exclude_modules = (
            set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
        )
        # if target_modules is a regex expression, then layers_to_transform should be None
        if isinstance(self.target_modules, str) and self.layers_to_transform is not None:
            raise ValueError("`layers_to_transform` cannot be used when `target_modules` is a str.")

        # check for layers_to_transform and layers_pattern
        if self.layers_pattern and not self.layers_to_transform:
            raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")

        # Check if 'r' has been set to a non-default value
        if self.r != 8:  # 8 is the default value for 'r' in LoraConfig
            warnings.warn(
                "Note that `r` is not used in AdaLora and will be ignored."
                "If you intended to set the initial rank, use `init_r` instead."
            )

        if self.total_step is None or self.total_step <= 0:
            raise ValueError("AdaLoRA does not work when `total_step` is None, supply a value > 0.")

        if self.tinit >= (self.total_step - self.tfinal):
            raise ValueError(
                "The supplied schedule values don't allow for a budgeting phase. Decrease `tfinal`/`tinit` or "
                "increase `total_step`."
            )