src/peft/tuners/vera/bnb.py [203:241]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
                for active_adapter in self.active_adapters:
                    if active_adapter not in self.vera_lambda_d.keys():
                        continue

                    lambda_d = self.vera_lambda_d[active_adapter]
                    lambda_b = self.vera_lambda_b[active_adapter]

                    vera_A = self.vera_A[active_adapter]
                    vera_B = self.vera_B[active_adapter]

                    dropout = self.vera_dropout[active_adapter]

                    requires_conversion = not torch.is_autocast_enabled()
                    if requires_conversion:
                        expected_dtype = result.dtype
                        compute_dtype = lambda_d.dtype
                        if x.dtype != compute_dtype:
                            x = x.to(compute_dtype)

                    sliced_A = vera_A[:, : self.in_features].to(x.device)
                    sliced_B = vera_B[: self.out_features, :].to(x.device)

                    x_temp = dropout(x.to(lambda_d.dtype))

                    adapter_output = lambda_b * torch.nn.functional.linear(
                        lambda_d * torch.nn.functional.linear(x_temp, sliced_A), sliced_B
                    )

                    if requires_conversion:
                        adapter_output = adapter_output.to(expected_dtype)

                    result = result + adapter_output

            # Ensure the output tensor has the same dtype as the input tensor
            return result.to(x.dtype)

        def __repr__(self) -> str:
            rep = super().__repr__()
            return "vera." + rep
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



