def get_extract_llm()

in hugegraph-llm/src/hugegraph_llm/models/llms/init_llm.py [0:0]


    def get_extract_llm(self):
        if self.extract_llm_type == "qianfan_wenxin":
            return QianfanClient(
                model_name=llm_settings.qianfan_extract_language_model,
                api_key=llm_settings.qianfan_extract_api_key,
                secret_key=llm_settings.qianfan_extract_secret_key
            )
        if self.extract_llm_type == "openai":
            return OpenAIClient(
                api_key=llm_settings.openai_extract_api_key,
                api_base=llm_settings.openai_extract_api_base,
                model_name=llm_settings.openai_extract_language_model,
                max_tokens=llm_settings.openai_extract_tokens,
            )
        if self.extract_llm_type == "ollama/local":
            return OllamaClient(
                model=llm_settings.ollama_extract_language_model,
                host=llm_settings.ollama_extract_host,
                port=llm_settings.ollama_extract_port,
            )
        if self.extract_llm_type == "litellm":
            return LiteLLMClient(
                api_key=llm_settings.litellm_extract_api_key,
                api_base=llm_settings.litellm_extract_api_base,
                model_name=llm_settings.litellm_extract_language_model,
                max_tokens=llm_settings.litellm_extract_tokens,
            )
        raise Exception("extract llm type is not supported !")