in hugegraph-llm/src/hugegraph_llm/models/llms/init_llm.py [0:0]
def get_chat_llm(self):
if self.chat_llm_type == "qianfan_wenxin":
return QianfanClient(
model_name=llm_settings.qianfan_chat_language_model,
api_key=llm_settings.qianfan_chat_api_key,
secret_key=llm_settings.qianfan_chat_secret_key
)
if self.chat_llm_type == "openai":
return OpenAIClient(
api_key=llm_settings.openai_chat_api_key,
api_base=llm_settings.openai_chat_api_base,
model_name=llm_settings.openai_chat_language_model,
max_tokens=llm_settings.openai_chat_tokens,
)
if self.chat_llm_type == "ollama/local":
return OllamaClient(
model=llm_settings.ollama_chat_language_model,
host=llm_settings.ollama_chat_host,
port=llm_settings.ollama_chat_port,
)
if self.chat_llm_type == "litellm":
return LiteLLMClient(
api_key=llm_settings.litellm_chat_api_key,
api_base=llm_settings.litellm_chat_api_base,
model_name=llm_settings.litellm_chat_language_model,
max_tokens=llm_settings.litellm_chat_tokens,
)
raise Exception("chat llm type is not supported !")