in tensorflow/inference/docker/build_artifacts/sagemaker/serve.py [0:0]
def _create_tfs_config(self):
models = tfs_utils.find_models()
if not models:
raise ValueError("no SavedModel bundles found!")
if self._tfs_default_model_name == "None":
default_model = os.path.basename(models[0])
if default_model:
self._tfs_default_model_name = default_model
log.info("using default model name: {}".format(self._tfs_default_model_name))
else:
log.info("no default model detected")
# config (may) include duplicate 'config' keys, so we can't just dump a dict
config = "model_config_list: {\n"
for m in models:
config += " config: {\n"
config += " name: '{}'\n".format(os.path.basename(m))
config += " base_path: '{}'\n".format(m)
config += " model_platform: 'tensorflow'\n"
config += " model_version_policy: {\n"
config += " specific: {\n"
for version in tfs_utils.find_model_versions(m):
config += " versions: {}\n".format(version)
config += " }\n"
config += " }\n"
config += " }\n"
config += "}\n"
log.info("tensorflow serving model config: \n%s\n", config)
with open(self._tfs_config_path, "w", encoding="utf8") as f:
f.write(config)