in src/huggingface_inference_toolkit/handler.py [0:0]
def get_inference_handler_either_custom_or_default_handler(model_dir: Path, task: Optional[str] = None) -> Any:
"""
Returns the appropriate inference handler based on the given model directory and task.
Args:
model_dir (Path): The directory path where the model is stored.
task (Optional[str]): The task for which the inference handler is required. Defaults to None.
Returns:
InferenceHandler: The appropriate inference handler based on the given model directory and task.
"""
custom_pipeline = check_and_register_custom_pipeline_from_directory(model_dir)
if custom_pipeline is not None:
return custom_pipeline
if os.environ.get("AIP_MODE", None) == "PREDICTION":
return VertexAIHandler(model_dir=model_dir, task=task)
return HuggingFaceHandler(model_dir=model_dir, task=task)