in web_demo.py [0:0]
def fetch_model(model_name: str, dtype=torch.bfloat16):
global args, DEPLOY_MODELS
if args.local_path:
model_path = args.local_path
else:
model_path = model_name
if model_name in DEPLOY_MODELS:
model_info = DEPLOY_MODELS[model_name]
print(f"{model_name} has been loaded.")
else:
print(f"{model_name} is loading...")
DEPLOY_MODELS[model_name] = load_model(model_path, dtype=dtype)
print(f"Load {model_name} successfully...")
model_info = DEPLOY_MODELS[model_name]
return model_info