def semantic_search_input()

in azure/functions/decorators/function_app.py [0:0]


    def semantic_search_input(self,
                              arg_name: str,
                              connection_name: str,
                              collection: str,
                              query: Optional[str] = None,
                              embeddings_model: Optional[OpenAIModels] = OpenAIModels.DefaultEmbeddingsModel,  # NoQA
                              chat_model: Optional[OpenAIModels] = OpenAIModels.DefaultChatModel,  # NoQA
                              system_prompt: Optional[str] = semantic_search_system_prompt,  # NoQA
                              max_knowledge_count: Optional[int] = 1,
                              data_type: Optional[
                                  Union[DataType, str]] = None,
                              **kwargs) \
            -> Callable[..., Any]:
        """
        The semantic search feature allows you to import documents into a
        vector database using an output binding and query the documents in that
        database using an input binding. For example, you can have a function
        that imports documents into a vector database and another function that
        issues queries to OpenAI using content stored in the vector database as
         context (also known as the Retrieval Augmented Generation, or RAG
         technique).

        Ref: https://platform.openai.com/docs/guides/embeddings

        :param arg_name: The name of binding parameter in the function code.
        :param connection_name: app setting or environment variable which
        contains a connection string value.
        :param collection: The name of the collection or table to search or
        store.
        :param query: The semantic query text to use for searching.
        :param embeddings_model: The ID of the model to use for embeddings.
        The default value is "text-embedding-ada-002".
        :param chat_model: The name of the Large Language Model to invoke for
        chat responses. The default value is "gpt-3.5-turbo".
        :param system_prompt: Optional. The system prompt to use for prompting
        the large language model.
        :param max_knowledge_count: Optional. The number of knowledge items to
        inject into the SystemPrompt. Default value: 1
        :param data_type: Optional. Defines how Functions runtime should treat
        the parameter value. Default value: None
        :param kwargs: Keyword arguments for specifying additional binding
        fields to include in the binding json

        :return: Decorator function.
        """

        @self._configure_function_builder
        def wrap(fb):
            def decorator():
                fb.add_binding(
                    binding=SemanticSearchInput(
                        name=arg_name,
                        connection_name=connection_name,
                        collection=collection,
                        query=query,
                        embeddings_model=embeddings_model,
                        chat_model=chat_model,
                        system_prompt=system_prompt,
                        max_knowledge_count=max_knowledge_count,
                        data_type=parse_singular_param_to_enum(data_type,
                                                               DataType),
                        **kwargs))
                return fb

            return decorator()

        return wrap