def _import_custom_modules()

in tensorflow/inference/docker/build_artifacts/sagemaker/python_service.py [0:0]


    def _import_custom_modules(self, model_name):
        inference_script_path = "/opt/ml/models/{}/model/code/inference.py".format(model_name)
        python_lib_path = "/opt/ml/models/{}/model/code/lib".format(model_name)
        if os.path.exists(python_lib_path):
            log.info(
                "Add Python code library for the model {} found at path {}.".format(
                    model_name, python_lib_path
                )
            )
            sys.path.append(python_lib_path)
        else:
            log.info(
                "Python code library for the model {} not found at path {}.".format(
                    model_name, python_lib_path
                )
            )
        if os.path.exists(inference_script_path):
            log.info(
                "Importing handlers from model-specific inference script for the model {} found at path {}.".format(
                    model_name, inference_script_path
                )
            )
            handler, input_handler, output_handler = self._import_handlers(inference_script_path)
            model_handlers = self._make_handler(handler, input_handler, output_handler)
            self.model_handlers[model_name] = model_handlers
        else:
            log.info(
                "Model-specific inference script for the model {} not found at path {}.".format(
                    model_name, inference_script_path
                )
            )