in bitsandbytes/nn/modules.py [0:0]
def maybe_rearrange_weight(state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs):
weight = state_dict.get(f"{prefix}weight")
if weight is None:
# if the state dict has no weights for this layer (e.g., LoRA finetuning), do nothing
return
weight_format = state_dict.pop(f"{prefix}weight_format", "row")
if isinstance(weight_format, torch.Tensor):
weight_format = weight_format.item()
# For new weights format storage type, we explicitly check
# if weights_format is on the mapping
if isinstance(weight_format, int) and weight_format not in INVERSE_LINEAR_8BIT_WEIGHTS_FORMAT_MAPPING:
raise ValueError(f"Expected supported weight format - got {weight_format}")
elif isinstance(weight_format, int) and weight_format in INVERSE_LINEAR_8BIT_WEIGHTS_FORMAT_MAPPING:
weight_format = INVERSE_LINEAR_8BIT_WEIGHTS_FORMAT_MAPPING[weight_format]
if weight_format != "row":
tile_indices = get_tile_inds(weight_format, weight.device)
state_dict[f"{prefix}weight"] = undo_layout(weight, tile_indices)