in optimum/exporters/openvino/model_patcher.py [0:0]
def __enter__(self):
super().__enter__()
# override signature to have position_ids
if "position_ids" not in inspect.signature(self._model.forward).parameters:
self._model._orig_forward = self._model.forward
def forward(
self,
input_ids: torch.LongTensor = None,
attention_mask: Optional[torch.Tensor] = None,
past_key_values: Optional[Tuple[torch.FloatTensor]] = None,
inputs_embeds: Optional[torch.FloatTensor] = None,
labels: Optional[torch.LongTensor] = None,
use_cache: Optional[bool] = None,
output_attentions: Optional[bool] = False,
output_hidden_states: Optional[bool] = False,
return_dict: Optional[bool] = True,
position_ids: Optional[torch.LongTensor] = None,
):
return self._orig_forward(
input_ids=input_ids,
attention_mask=attention_mask,
past_key_values=past_key_values,
inputs_embeds=inputs_embeds,
labels=labels,
use_cache=past_key_values is not None,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=self.config.return_dict,
)
self._model.forward = types.MethodType(forward, self._model)
for layer in self._model.model.layers:
layer.self_attn._orig_forward = layer.self_attn.forward
layer.self_attn.forward = types.MethodType(_baichuan13b_atten_forward, layer.self_attn)
else:
for layer in self._model.model.layers:
layer.self_attn._orig_forward = layer.self_attn.forward
layer.self_attn.forward = types.MethodType(_baichuan7b_attn_forward, layer.self_attn)