tensorflow/inference/docker/build_artifacts/sagemaker/serve.py [32:48]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
CODE_DIR = (
    "/opt/ml/code"
    if os.environ.get("SAGEMAKER_MULTI_MODEL", "False").lower() == "true"
    else "/opt/ml/model/code"
)
PYTHON_LIB_PATH = os.path.join(CODE_DIR, "lib")
REQUIREMENTS_PATH = os.path.join(CODE_DIR, "requirements.txt")
INFERENCE_PATH = os.path.join(CODE_DIR, "inference.py")


class ServiceManager(object):
    def __init__(self):
        self._state = "initializing"
        self._nginx = None
        self._tfs = []
        self._gunicorn = None
        self._gunicorn_command = None
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



tensorflow/inference/docker/build_artifacts/sagemaker_neuron/serve.py [32:48]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
CODE_DIR = (
    "/opt/ml/code"
    if os.environ.get("SAGEMAKER_MULTI_MODEL", "False").lower() == "true"
    else "/opt/ml/model/code"
)
PYTHON_LIB_PATH = os.path.join(CODE_DIR, "lib")
REQUIREMENTS_PATH = os.path.join(CODE_DIR, "requirements.txt")
INFERENCE_PATH = os.path.join(CODE_DIR, "inference.py")


class ServiceManager(object):
    def __init__(self):
        self._state = "initializing"
        self._nginx = None
        self._tfs = []
        self._gunicorn = None
        self._gunicorn_command = None
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



