src/peft/tuners/vera/bnb.py [33:76]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        def __init__(
            self,
            base_layer: torch.nn.Module,
            adapter_name: str,
            vera_A,
            vera_B,
            r: int = 0,
            vera_dropout: float = 0.0,
            fan_in_fan_out: bool = False,
            init_weights: bool = True,
            d_initial: float = 0.1,
            **kwargs,
        ) -> None:
            super().__init__()
            VeraLayer.__init__(self, base_layer)
            self.fan_in_fan_out = fan_in_fan_out

            self._active_adapter = adapter_name
            self.update_layer(
                adapter_name,
                vera_A,
                vera_B,
                r,
                vera_dropout=vera_dropout,
                init_weights=init_weights,
                d_initial=d_initial,
            )

        def merge(self, safe_merge: bool = False, adapter_names: Optional[list[str]] = None) -> None:
            if self.merged:
                warnings.warn(
                    f"Already following adapters were merged {','.join(self.merged_adapters)}. "
                    f"You are now additionally merging {','.join(self.active_adapters)}."
                )

            adapter_names = check_adapters_to_merge(self, adapter_names)
            if not adapter_names:
                return

            for active_adapter in adapter_names:
                if active_adapter not in self.vera_lambda_d.keys():
                    continue

                warnings.warn(
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



src/peft/tuners/vera/bnb.py [247:290]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        def __init__(
            self,
            base_layer: torch.nn.Module,
            adapter_name: str,
            vera_A,
            vera_B,
            r: int = 0,
            vera_dropout: float = 0.0,
            fan_in_fan_out: bool = False,
            init_weights: bool = True,
            d_initial: float = 0.1,
            **kwargs,
        ) -> None:
            super().__init__()
            VeraLayer.__init__(self, base_layer)
            self.fan_in_fan_out = fan_in_fan_out

            self._active_adapter = adapter_name
            self.update_layer(
                adapter_name,
                vera_A,
                vera_B,
                r,
                vera_dropout=vera_dropout,
                init_weights=init_weights,
                d_initial=d_initial,
            )

        def merge(self, safe_merge: bool = False, adapter_names: Optional[list[str]] = None) -> None:
            if self.merged:
                warnings.warn(
                    f"Already following adapters were merged {','.join(self.merged_adapters)}. "
                    f"You are now additionally merging {','.join(self.active_adapters)}."
                )

            adapter_names = check_adapters_to_merge(self, adapter_names)
            if not adapter_names:
                return

            for active_adapter in adapter_names:
                if active_adapter not in self.vera_lambda_d.keys():
                    continue

                warnings.warn(
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



