in src/sagemaker_huggingface_inference_toolkit/transformers_utils.py [0:0]
def get_pipeline(task: str, device: int, model_dir: Path, **kwargs) -> Pipeline:
"""
create pipeline class for a specific task based on local saved model
"""
if task is None:
raise EnvironmentError(
"The task for this model is not set: Please set one: https://huggingface.co/docs#how-is-a-models-type-of-inference-api-and-widget-determined"
)
hf_pipeline = pipeline(task=task, model=model_dir, tokenizer=model_dir, device=device, **kwargs)
# wrapp specific pipeline to support better ux
if task == "conversational":
hf_pipeline = wrap_conversation_pipeline(hf_pipeline)
return hf_pipeline