in optimum/amd/ryzenai/modeling.py [0:0]
def forward(self, **kwargs):
use_torch = isinstance(next(iter(kwargs.values())), torch.Tensor)
# converts pytorch inputs into numpy inputs for onnx
onnx_inputs = self._prepare_onnx_inputs(use_torch=use_torch, **kwargs)
# run inference
onnx_outputs = self.model.run(None, onnx_inputs)
outputs = self._prepare_onnx_outputs(onnx_outputs, use_torch=use_torch)
# converts output to namedtuple for pipelines post-processing
return ModelOutput(outputs)