src/peft/tuners/randlora/bnb.py [33:83]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        def __init__(
            self,
            base_layer: torch.nn.Module,
            adapter_name: str,
            randlora_A,
            randlora_B,
            r: int = 0,
            randlora_alpha: int = 0,
            randlora_dropout: float = 0.0,
            fan_in_fan_out: bool = False,
            init_weights: bool = True,
            **kwargs,
        ) -> None:
            super().__init__()
            RandLoraLayer.__init__(self, base_layer)
            self.fan_in_fan_out = fan_in_fan_out

            self._active_adapter = adapter_name
            self.update_layer(
                adapter_name,
                randlora_A,
                randlora_B,
                r,
                randlora_alpha=randlora_alpha,
                randlora_dropout=randlora_dropout,
                init_weights=init_weights,
            )

        def merge(self, safe_merge: bool = False, adapter_names: Optional[list[str]] = None) -> None:
            """
            Merge the active adapter weights into the base weights

            Args:
                safe_merge (`bool`, *optional*):
                    If True, the merge operation will be performed in a copy of the original weights and check for NaNs
                    before merging the weights. This is useful if you want to check if the merge operation will produce
                    NaNs. Defaults to `False`.
                adapter_names (`list[str]`, *optional*):
                    The list of adapter names that should be merged. If None, all active adapters will be merged.
                    Defaults to `None`.
            """

            adapter_names = check_adapters_to_merge(self, adapter_names)
            if not adapter_names:
                return

            for active_adapter in adapter_names:
                if active_adapter not in self.randlora_lambda.keys():
                    continue

                warnings.warn(
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



src/peft/tuners/randlora/bnb.py [261:310]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        def __init__(
            self,
            base_layer: torch.nn.Module,
            adapter_name: str,
            randlora_A,
            randlora_B,
            r: int = 0,
            randlora_alpha: int = 0,
            randlora_dropout: float = 0.0,
            fan_in_fan_out: bool = False,
            init_weights: bool = True,
            **kwargs,
        ) -> None:
            super().__init__()
            RandLoraLayer.__init__(self, base_layer)
            self.fan_in_fan_out = fan_in_fan_out
            self._active_adapter = adapter_name
            self.update_layer(
                adapter_name,
                randlora_A,
                randlora_B,
                r,
                randlora_alpha=randlora_alpha,
                randlora_dropout=randlora_dropout,
                init_weights=init_weights,
            )

        def merge(self, safe_merge: bool = False, adapter_names: Optional[list[str]] = None) -> None:
            """
            Merge the active adapter weights into the base weights

            Args:
                safe_merge (`bool`, *optional*):
                    If True, the merge operation will be performed in a copy of the original weights and check for NaNs
                    before merging the weights. This is useful if you want to check if the merge operation will produce
                    NaNs. Defaults to `False`.
                adapter_names (`list[str]`, *optional*):
                    The list of adapter names that should be merged. If None, all active adapters will be merged.
                    Defaults to `None`.
            """

            adapter_names = check_adapters_to_merge(self, adapter_names)
            if not adapter_names:
                return

            for active_adapter in adapter_names:
                if active_adapter not in self.randlora_lambda.keys():
                    continue

                warnings.warn(
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



