def get_text2gql_llm()

in hugegraph-llm/src/hugegraph_llm/models/llms/init_llm.py [0:0]


    def get_text2gql_llm(self):
        if self.text2gql_llm_type == "qianfan_wenxin":
            return QianfanClient(
                model_name=llm_settings.qianfan_text2gql_language_model,
                api_key=llm_settings.qianfan_text2gql_api_key,
                secret_key=llm_settings.qianfan_text2gql_secret_key
            )
        if self.text2gql_llm_type == "openai":
            return OpenAIClient(
                api_key=llm_settings.openai_text2gql_api_key,
                api_base=llm_settings.openai_text2gql_api_base,
                model_name=llm_settings.openai_text2gql_language_model,
                max_tokens=llm_settings.openai_text2gql_tokens,
            )
        if self.text2gql_llm_type == "ollama/local":
            return OllamaClient(
                model=llm_settings.ollama_text2gql_language_model,
                host=llm_settings.ollama_text2gql_host,
                port=llm_settings.ollama_text2gql_port,
            )
        if self.text2gql_llm_type == "litellm":
            return LiteLLMClient(
                api_key=llm_settings.litellm_text2gql_api_key,
                api_base=llm_settings.litellm_text2gql_api_base,
                model_name=llm_settings.litellm_text2gql_language_model,
                max_tokens=llm_settings.litellm_text2gql_tokens,
            )
        raise Exception("text2gql llm type is not supported !")