in azure/functions/decorators/function_app.py [0:0]
def embeddings_store_output(self,
arg_name: str,
input: str,
input_type: InputType,
connection_name: str,
collection: str,
model: Optional[OpenAIModels] = OpenAIModels.DefaultEmbeddingsModel, # NoQA
max_chunk_length: Optional[int] = 8 * 1024,
max_overlap: Optional[int] = 128,
data_type: Optional[
Union[DataType, str]] = None,
**kwargs) \
-> Callable[..., Any]:
"""
Supported list of embeddings store is extensible, and more can be
added by authoring a specially crafted NuGet package. Visit the
currently supported vector specific folder for specific usage
information:
- Azure AI Search
- Azure Data Explorer
- Azure Cosmos DB using MongoDB
:param arg_name: The name of binding parameter in the function code.
:param input: The input to generate embeddings for.
:param input_type: The type of the input.
:param connection_name: The name of an app setting or environment
variable which contains a connection string value
:param collection: The collection or table to search.
:param model: The ID of the model to use.
:param max_chunk_length: The maximum number of characters to chunk the
input into.
:param max_overlap: The maximum number of characters to overlap between
chunks.
:param data_type: Optional. Defines how Functions runtime should treat
the parameter value. Default value: None
:param kwargs: Keyword arguments for specifying additional binding
fields to include in the binding json
:return: Decorator function.
"""
@self._configure_function_builder
def wrap(fb):
def decorator():
fb.add_binding(
binding=EmbeddingsStoreOutput(
name=arg_name,
input=input,
input_type=input_type,
connection_name=connection_name,
collection=collection,
model=model,
max_chunk_length=max_chunk_length,
max_overlap=max_overlap,
data_type=parse_singular_param_to_enum(data_type,
DataType),
**kwargs))
return fb
return decorator()
return wrap