def __init__()

in optimum/neuron/peft/tuners/lora/layer.py [0:0]


    def __init__(self, base_layer: nn.Module, ephemeral_gpu_offload: bool = False, **kwargs) -> None:
        self.base_layer = base_layer
        self.r = {}
        self.lora_alpha = {}
        self.scaling = {}
        self.lora_dropout = nn.ModuleDict({})
        self.lora_A = nn.ModuleDict({})
        self.lora_B = nn.ModuleDict({})
        # For Embedding layer
        self.lora_embedding_A = nn.ParameterDict({})
        self.lora_embedding_B = nn.ParameterDict({})
        # Mark the weight as unmerged
        self._disable_adapters = False
        self.merged_adapters = []
        self.use_dora: dict[str, bool] = {}
        self.lora_bias: dict[str, bool] = {}
        self.lora_magnitude_vector = torch.nn.ModuleDict()  # for DoRA
        self._caches: dict[str, Any] = {}
        self.ephemeral_gpu_offload: bool = ephemeral_gpu_offload
        self.kwargs = kwargs

        base_layer = self.get_base_layer()
        if isinstance(base_layer, NxDGQAQKVColumnParallelLinear):
            in_features, out_features = base_layer.input_size, base_layer.output_sizes
        elif isinstance(base_layer, BaseParallelLinear):
            in_features, out_features = base_layer.input_size, base_layer.output_size
        elif isinstance(base_layer, nn.Conv2d):
            raise NotImplementedError("Conv2d is not supported for LoRA with optimum-neuron.")
        elif isinstance(base_layer, nn.Conv3d):
            raise NotImplementedError("Conv3d is not supported for LoRA with optimum-neuron.")
        elif isinstance(base_layer, NxDParallelEmbedding):
            in_features, out_features = base_layer.num_embeddings, base_layer.embedding_dim
        elif isinstance(base_layer, nn.Conv1d):
            raise NotImplementedError("Conv1d is not supported for LoRA with optimum-neuron.")
        else:
            raise NotImplementedError(
                f"LoRA is not supported for {base_layer.__class__.__name__} with optimum-neuron."
            )

        self.in_features = in_features
        self.out_features = out_features