in src/peft/utils/hotswap.py [0:0]
def _get_padded_conv2d(lora_module: torch.nn.Module, target_rank: int, is_lora_A: bool) -> torch.nn.Conv2d:
"""
Get a new Conv2d layer for LoRA with padded weights according to the target rank.
Args:
lora_module (nn.Module):
The LoRA sub-module (e.g. module.lora_A[adapter_name]).
target_rank (int):
The desired rank to pad to.
is_lora_A (bool):
True if this is the LoRA A matrix, False if LoRA B.
Returns:
nn.Conv2d:
A newly created and padded Conv2d layer. If the rank already fit, the original layer is returned.
"""
weight = lora_module.weight
# For Conv2d: [out_channels, in_channels, kernel_height, kernel_width]
out_channels, in_channels, kh, kw = weight.shape
original_rank = out_channels if is_lora_A else in_channels
if original_rank == target_rank:
return lora_module
if original_rank > target_rank:
raise ValueError(
f"Trying to pad the adapter to the target rank {target_rank}, but the original rank is larger "
f"({original_rank}). This is not possible."
)
# lora_A and lora_B are always nn.Conv2d
if is_lora_A:
# LoRA A affects out_channels
padded = torch.zeros(target_rank, in_channels, kh, kw, device=weight.device, dtype=weight.dtype)
padded[:out_channels, :, :, :] = weight
new_layer = torch.nn.Conv2d(
in_channels,
target_rank,
kernel_size=lora_module.kernel_size,
stride=lora_module.stride,
padding=lora_module.padding,
bias=lora_module.bias is not None,
groups=lora_module.groups,
)
else:
# LoRA B affects in_channels
padded = torch.zeros(out_channels, target_rank, kh, kw, device=weight.device, dtype=weight.dtype)
padded[:, :in_channels, :, :] = weight
new_layer = torch.nn.Conv2d(
target_rank,
out_channels,
kernel_size=lora_module.kernel_size,
stride=lora_module.stride,
padding=lora_module.padding,
bias=lora_module.bias is not None,
groups=lora_module.groups,
)
# Sanity check
if new_layer.weight.shape != padded.shape:
raise ValueError(
"Something went wrong when trying to pad the LoRA weights, the new shape should be "
f"{padded.shape} but {new_layer.weight.shape} was found. Please open an issue on PEFT "
"(https://github.com/huggingface/peft/issues) and report this error."
)
if (lora_module.bias is not None) and (new_layer.bias.shape != lora_module.bias.shape):
raise ValueError(
"Something went wrong when trying to pad the LoRA Conv2d bias, the new shape should be "
f"{lora_module.bias.shape} but {new_layer.bias.shape} was found. Please open an issue on PEFT "
"(https://github.com/huggingface/peft/issues) and report this error."
)
new_layer.weight.data = padded
# Copy bias if present
if lora_module.bias is not None:
new_layer.bias.data = lora_module.bias.data
return new_layer