def _need_python_service()

in tensorflow/inference/docker/build_artifacts/sagemaker/serve.py [0:0]


    def _need_python_service(self):
        if (
            os.path.exists(INFERENCE_PATH)
            or os.path.exists(REQUIREMENTS_PATH)
            or os.path.exists(PYTHON_LIB_PATH)
        ):
            self._enable_python_service = True
        if os.environ.get("SAGEMAKER_MULTI_MODEL_UNIVERSAL_BUCKET") and os.environ.get(
            "SAGEMAKER_MULTI_MODEL_UNIVERSAL_PREFIX"
        ):
            self._enable_python_service = True