supporting-blog-content/using-autogen-with-elasticsearch/using_autogen_with_elasticsearch_notebook.ipynb (687 lines of code) (raw):
{
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "LlrEjmtJNpuX"
},
"source": [
"# Using AutoGen with Elasticsearch\n",
"\n",
"This notebook demonstrates how to use AutoGen with Elasticsearch. This notebook is based on the article [Using AutoGen with Elasticsearch](https://www.elastic.co/search-labs/blog/using-autogen-with-elasticsearch)."
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "GNaAN-GNO5qp"
},
"source": [
"## Installing dependencies and importing packages"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"collapsed": true,
"id": "D1SqWMbbASRS",
"outputId": "e47bb704-f9b7-4457-bd13-0819ea4fabe6"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Requirement already satisfied: autogen in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (0.7.6)\n",
"Requirement already satisfied: elasticsearch==8.17 in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (8.17.0)\n",
"Requirement already satisfied: nest-asyncio in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (1.6.0)\n",
"Requirement already satisfied: elastic-transport<9,>=8.15.1 in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from elasticsearch==8.17) (8.17.0)\n",
"Requirement already satisfied: pyautogen==0.7.6 in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from autogen) (0.7.6)\n",
"Requirement already satisfied: asyncer==0.0.8 in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from pyautogen==0.7.6->autogen) (0.0.8)\n",
"Requirement already satisfied: diskcache in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from pyautogen==0.7.6->autogen) (5.6.3)\n",
"Requirement already satisfied: docker in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from pyautogen==0.7.6->autogen) (7.1.0)\n",
"Requirement already satisfied: fast-depends<3,>=2.4.12 in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from pyautogen==0.7.6->autogen) (2.4.12)\n",
"Requirement already satisfied: httpx<1,>=0.28.1 in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from pyautogen==0.7.6->autogen) (0.28.1)\n",
"Requirement already satisfied: packaging in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from pyautogen==0.7.6->autogen) (24.2)\n",
"Requirement already satisfied: pydantic<3,>=2.6.1 in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from pyautogen==0.7.6->autogen) (2.10.6)\n",
"Requirement already satisfied: python-dotenv in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from pyautogen==0.7.6->autogen) (1.0.1)\n",
"Requirement already satisfied: termcolor in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from pyautogen==0.7.6->autogen) (2.5.0)\n",
"Requirement already satisfied: tiktoken in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from pyautogen==0.7.6->autogen) (0.7.0)\n",
"Requirement already satisfied: anyio<5.0,>=3.4.0 in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from asyncer==0.0.8->pyautogen==0.7.6->autogen) (4.8.0)\n",
"Requirement already satisfied: urllib3<3,>=1.26.2 in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from elastic-transport<9,>=8.15.1->elasticsearch==8.17) (2.3.0)\n",
"Requirement already satisfied: certifi in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from elastic-transport<9,>=8.15.1->elasticsearch==8.17) (2025.1.31)\n",
"Requirement already satisfied: httpcore==1.* in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from httpx<1,>=0.28.1->pyautogen==0.7.6->autogen) (1.0.7)\n",
"Requirement already satisfied: idna in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from httpx<1,>=0.28.1->pyautogen==0.7.6->autogen) (3.10)\n",
"Requirement already satisfied: h11<0.15,>=0.13 in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from httpcore==1.*->httpx<1,>=0.28.1->pyautogen==0.7.6->autogen) (0.14.0)\n",
"Requirement already satisfied: annotated-types>=0.6.0 in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from pydantic<3,>=2.6.1->pyautogen==0.7.6->autogen) (0.7.0)\n",
"Requirement already satisfied: pydantic-core==2.27.2 in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from pydantic<3,>=2.6.1->pyautogen==0.7.6->autogen) (2.27.2)\n",
"Requirement already satisfied: typing-extensions>=4.12.2 in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from pydantic<3,>=2.6.1->pyautogen==0.7.6->autogen) (4.12.2)\n",
"Requirement already satisfied: requests>=2.26.0 in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from docker->pyautogen==0.7.6->autogen) (2.32.3)\n",
"Requirement already satisfied: regex>=2022.1.18 in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from tiktoken->pyautogen==0.7.6->autogen) (2024.11.6)\n",
"Requirement already satisfied: sniffio>=1.1 in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from anyio<5.0,>=3.4.0->asyncer==0.0.8->pyautogen==0.7.6->autogen) (1.3.1)\n",
"Requirement already satisfied: charset-normalizer<4,>=2 in /Users/jeffreyrengifo/.pyenv/versions/3.12.2/lib/python3.12/site-packages (from requests>=2.26.0->docker->pyautogen==0.7.6->autogen) (3.4.1)\n",
"\n",
"\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.0\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m25.0.1\u001b[0m\n",
"\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n",
"Note: you may need to restart the kernel to use updated packages.\n"
]
}
],
"source": [
"%pip install autogen elasticsearch==8.17 nest-asyncio"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "rAesontNXpLu",
"outputId": "ffae8dc1-09fa-4943-b13c-fd26019e11ab"
},
"outputs": [],
"source": [
"import json\n",
"import os\n",
"import nest_asyncio\n",
"import requests\n",
"\n",
"from getpass import getpass\n",
"from autogen import (\n",
" AssistantAgent,\n",
" GroupChat,\n",
" GroupChatManager,\n",
" UserProxyAgent,\n",
" register_function,\n",
")\n",
"from elasticsearch import Elasticsearch\n",
"from elasticsearch.helpers import bulk\n",
"\n",
"nest_asyncio.apply()"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "NwOmnk99Pfh3"
},
"source": [
"## Declaring variables"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"id": "GVKJKfFpPWuj"
},
"outputs": [],
"source": [
"os.environ[\"ELASTIC_ENDPOINT\"] = getpass(\"Elastic Enpoint: \")\n",
"os.environ[\"ELASTIC_API_KEY\"] = getpass(\"Elastic Api Key: \")\n",
"os.environ[\"SERPER_API_KEY\"] = getpass(\"Seper API Key: \")\n",
"os.environ[\"OPENAI_API_KEY\"] = getpass(\"OpenAI API Key: \")"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "3O2HclcYHEsS"
},
"source": [
"## Instance a Elasticsearch client"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "1LWiop8NYiQF"
},
"outputs": [],
"source": [
"# Elasticsearch client\n",
"_client = Elasticsearch(\n",
" os.environ[\"ELASTIC_ENDPOINT\"],\n",
" api_key=os.environ[\"ELASTIC_API_KEY\"],\n",
")"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "9lvPHaXjPlfu"
},
"source": [
"## Creating mappings and inference endpoint"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "jInERNQajoNh",
"outputId": "63644767-5cb2-485e-8904-bc5c412d548e"
},
"outputs": [],
"source": [
"try:\n",
" _client.options(\n",
" request_timeout=60, max_retries=3, retry_on_timeout=True\n",
" ).inference.put(\n",
" task_type=\"sparse_embedding\",\n",
" inference_id=\"jobs-candidates-inference\",\n",
" body={\n",
" \"service\": \"elasticsearch\",\n",
" \"service_settings\": {\n",
" \"adaptive_allocations\": {\"enabled\": True},\n",
" \"num_threads\": 1,\n",
" \"model_id\": \".elser_model_2\",\n",
" },\n",
" },\n",
" )\n",
"\n",
" print(\"Inference endpoint created successfully.\")\n",
"\n",
"except Exception as e:\n",
" print(\n",
" f\"Error creating inference endpoint: {e.info['error']['root_cause'][0]['reason'] }\"\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "tc88YzAYw31e",
"outputId": "48c13cab-284f-4d7f-eb45-dee009f51130"
},
"outputs": [],
"source": [
"try:\n",
" _client.indices.create(\n",
" index=\"available-candidates\",\n",
" body={\n",
" \"mappings\": {\n",
" \"properties\": {\n",
" \"candidate_name\": {\"type\": \"text\", \"copy_to\": \"semantic_field\"},\n",
" \"position_title\": {\"type\": \"text\", \"copy_to\": \"semantic_field\"},\n",
" \"profile_description\": {\n",
" \"type\": \"text\",\n",
" \"copy_to\": \"semantic_field\",\n",
" },\n",
" \"expected_salary\": {\"type\": \"text\", \"copy_to\": \"semantic_field\"},\n",
" \"skills\": {\"type\": \"keyword\", \"copy_to\": \"semantic_field\"},\n",
" \"semantic_field\": {\n",
" \"type\": \"semantic_text\",\n",
" \"inference_id\": \"jobs-candidates-inference\",\n",
" },\n",
" }\n",
" }\n",
" },\n",
" )\n",
"\n",
" print(\"index created successfully\")\n",
"except Exception as e:\n",
" print(\n",
" f\"Error creating inference endpoint: {e.info['error']['root_cause'][0]['reason'] }\"\n",
" )"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "G1634zNrv1OS"
},
"source": [
"## Indexing documents"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "LVr6TR8qlw2M"
},
"outputs": [],
"source": [
"documents = [\n",
" {\n",
" \"candidate_name\": \"John\",\n",
" \"position_title\": \"Software Engineer\",\n",
" \"expected_salary\": \"$85,000 - $120,000\",\n",
" \"profile_description\": \"Experienced software engineer with expertise in backend development, cloud computing, and scalable system architecture.\",\n",
" \"skills\": [\"Python\", \"Java\", \"AWS\", \"Microservices\", \"Docker\", \"Kubernetes\"],\n",
" },\n",
" {\n",
" \"candidate_name\": \"Emily\",\n",
" \"position_title\": \"Data Scientist\",\n",
" \"expected_salary\": \"$90,000 - $140,000\",\n",
" \"profile_description\": \"Data scientist with strong analytical skills and experience in machine learning and big data processing.\",\n",
" \"skills\": [\"Python\", \"SQL\", \"TensorFlow\", \"Pandas\", \"Hadoop\", \"Spark\"],\n",
" },\n",
" {\n",
" \"candidate_name\": \"Michael\",\n",
" \"position_title\": \"DevOps Engineer\",\n",
" \"expected_salary\": \"$95,000 - $130,000\",\n",
" \"profile_description\": \"DevOps specialist focused on automation, CI/CD pipelines, and infrastructure as code.\",\n",
" \"skills\": [\"Terraform\", \"Ansible\", \"Jenkins\", \"Docker\", \"Kubernetes\", \"AWS\"],\n",
" },\n",
" {\n",
" \"candidate_name\": \"Sarah\",\n",
" \"position_title\": \"Product Manager\",\n",
" \"expected_salary\": \"$110,000 - $150,000\",\n",
" \"profile_description\": \"Product manager with a technical background, skilled in agile methodologies and user-centered design.\",\n",
" \"skills\": [\"JIRA\", \"Agile\", \"Scrum\", \"A/B Testing\", \"SQL\", \"UX Research\"],\n",
" },\n",
" {\n",
" \"candidate_name\": \"David\",\n",
" \"position_title\": \"UX/UI Designer\",\n",
" \"expected_salary\": \"$70,000 - $110,000\",\n",
" \"profile_description\": \"Creative UX/UI designer with experience in user research, wireframing, and interactive prototyping.\",\n",
" \"skills\": [\"Figma\", \"Adobe XD\", \"Sketch\", \"HTML\", \"CSS\", \"JavaScript\"],\n",
" },\n",
" {\n",
" \"candidate_name\": \"Jessica\",\n",
" \"position_title\": \"Cybersecurity Analyst\",\n",
" \"expected_salary\": \"$100,000 - $140,000\",\n",
" \"profile_description\": \"Cybersecurity expert with experience in threat detection, penetration testing, and compliance.\",\n",
" \"skills\": [\n",
" \"Python\",\n",
" \"SIEM\",\n",
" \"Penetration Testing\",\n",
" \"Ethical Hacking\",\n",
" \"Nmap\",\n",
" \"Metasploit\",\n",
" ],\n",
" },\n",
" {\n",
" \"candidate_name\": \"Robert\",\n",
" \"position_title\": \"Cloud Architect\",\n",
" \"expected_salary\": \"$120,000 - $180,000\",\n",
" \"profile_description\": \"Cloud architect specializing in designing secure and scalable cloud infrastructures.\",\n",
" \"skills\": [\"AWS\", \"Azure\", \"GCP\", \"Kubernetes\", \"Terraform\", \"CI/CD\"],\n",
" },\n",
" {\n",
" \"candidate_name\": \"Sophia\",\n",
" \"position_title\": \"AI/ML Engineer\",\n",
" \"expected_salary\": \"$100,000 - $160,000\",\n",
" \"profile_description\": \"Machine learning engineer with experience in deep learning, NLP, and computer vision.\",\n",
" \"skills\": [\"Python\", \"PyTorch\", \"TensorFlow\", \"Scikit-Learn\", \"OpenCV\", \"NLP\"],\n",
" },\n",
" {\n",
" \"candidate_name\": \"Daniel\",\n",
" \"position_title\": \"QA Engineer\",\n",
" \"expected_salary\": \"$60,000 - $100,000\",\n",
" \"profile_description\": \"Quality assurance engineer focused on automated testing, test-driven development, and software reliability.\",\n",
" \"skills\": [\"Selenium\", \"JUnit\", \"Cypress\", \"Postman\", \"Git\", \"CI/CD\"],\n",
" },\n",
" {\n",
" \"candidate_name\": \"Emma\",\n",
" \"position_title\": \"Technical Support Specialist\",\n",
" \"expected_salary\": \"$50,000 - $85,000\",\n",
" \"profile_description\": \"Technical support specialist with expertise in troubleshooting, customer support, and IT infrastructure.\",\n",
" \"skills\": [\n",
" \"Linux\",\n",
" \"Windows Server\",\n",
" \"Networking\",\n",
" \"SQL\",\n",
" \"Help Desk\",\n",
" \"Scripting\",\n",
" ],\n",
" },\n",
"]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "N6cI3toLl3DP",
"outputId": "157845b6-e279-4ab3-f46f-989b7d450c32"
},
"outputs": [],
"source": [
"def build_data():\n",
" for doc in documents:\n",
" yield {\"_index\": \"available-candidates\", \"_source\": doc}\n",
"\n",
"\n",
"try:\n",
" success, errors = bulk(_client, build_data())\n",
" if errors:\n",
" print(\"Errors during indexing:\", errors)\n",
" else:\n",
" print(f\"{success} documents indexed successfully\")\n",
"\n",
"except Exception as e:\n",
" print(f\"Error: {str(e)}\")"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "P53v_KzjH9Ab"
},
"source": [
"## AutoGen"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "crbeYLTzVw76"
},
"source": [
"### AI endpoint configuration"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "i2fZZLDgxtPu"
},
"outputs": [],
"source": [
"config_list = [{\"model\": \"gpt-4o-mini\", \"api_key\": os.environ[\"OPENAI_API_KEY\"]}]\n",
"ai_endpoint_config = {\"config_list\": config_list}"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "r7Mnp6ItseCw"
},
"source": [
"## Agents"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "kSrlRYAEWBSA"
},
"outputs": [],
"source": [
"user_proxy = UserProxyAgent(\n",
" name=\"Admin\",\n",
" system_message=\"\"\"You are a human administrator.\n",
" Your role is to interact with agents and tools to execute tasks efficiently.\n",
" Execute tasks and agents in a logical order, ensuring that all agents perform\n",
" their duties correctly. All tasks must be approved by you before proceeding.\"\"\",\n",
" human_input_mode=\"NEVER\",\n",
" code_execution_config=False,\n",
" is_termination_msg=lambda msg: msg.get(\"content\") is not None\n",
" and \"TERMINATE\" in msg[\"content\"],\n",
" llm_config=ai_endpoint_config,\n",
")\n",
"\n",
"researcher = AssistantAgent(\n",
" name=\"Researcher\",\n",
" system_message=\"\"\"You are a Researcher.\n",
" Your role is to use the 'search_in_internet' tool to find individual\n",
" job offers realted to the candidates profiles. Each job offer must include a direct link to a specific position,\n",
" not just a category or group of offers. Ensure that all job offers are relevant and accurate.\"\"\",\n",
" llm_config=ai_endpoint_config,\n",
")\n",
"\n",
"retriever = AssistantAgent(\n",
" name=\"Retriever\",\n",
" llm_config=ai_endpoint_config,\n",
" system_message=\"\"\"You are a Retriever.\n",
" Your task is to use the 'elasticsearch_hybrid_search' tool to retrieve\n",
" candidate profiles from Elasticsearch.\"\"\",\n",
")\n",
"\n",
"matcher = AssistantAgent(\n",
" name=\"Matcher\",\n",
" system_message=\"\"\"Your role is to match job offers with suitable candidates.\n",
" The matches must be accurate and beneficial for both parties.\n",
" Only match candidates with job offers that fit their qualifications.\"\"\",\n",
" llm_config=ai_endpoint_config,\n",
")\n",
"\n",
"critic = AssistantAgent(\n",
" name=\"Critic\",\n",
" system_message=\"\"\"You are the Critic.\n",
" Your task is to verify the accuracy of job-candidate matches.\n",
" If the matches are correct, inform the Admin and include the word 'TERMINATE' to end the process.\"\"\", # End condition\n",
" llm_config=ai_endpoint_config,\n",
")"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "v3baRjxrtOA4"
},
"source": [
"## Tools setup\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "zDWozwAqXH7B"
},
"outputs": [],
"source": [
"async def elasticsearch_hybrid_search(question: str):\n",
" \"\"\"\n",
" Search in Elasticsearch using semantic search capabilities.\n",
" \"\"\"\n",
"\n",
" response = _client.search(\n",
" index=\"available-candidates\",\n",
" body={\n",
" \"_source\": {\n",
" \"includes\": [\n",
" \"candidate_name\",\n",
" \"position_title\",\n",
" \"profile_description\",\n",
" \"expected_salary\",\n",
" \"skills\",\n",
" ],\n",
" },\n",
" \"size\": 10,\n",
" \"retriever\": {\n",
" \"rrf\": {\n",
" \"retrievers\": [\n",
" {\n",
" \"standard\": {\n",
" \"query\": {\"match\": {\"position_title\": question}}\n",
" }\n",
" },\n",
" {\n",
" \"standard\": {\n",
" \"query\": {\n",
" \"semantic\": {\n",
" \"field\": \"semantic_field\",\n",
" \"query\": question,\n",
" }\n",
" }\n",
" }\n",
" },\n",
" ]\n",
" }\n",
" },\n",
" },\n",
" )\n",
"\n",
" hits = response[\"hits\"][\"hits\"]\n",
"\n",
" if not hits:\n",
" return \"\"\n",
"\n",
" result = json.dumps([hit[\"_source\"] for hit in hits], indent=2)\n",
"\n",
" return result\n",
"\n",
"\n",
"async def search_in_internet(query: str):\n",
" \"\"\"Search in internet using Serper and retrieve results in json format\"\"\"\n",
"\n",
" url = \"https://google.serper.dev/search\"\n",
" headers = {\n",
" \"X-API-KEY\": os.environ[\"SERPER_API_KEY\"],\n",
" \"Content-Type\": \"application/json\",\n",
" }\n",
"\n",
" payload = json.dumps({\"q\": query})\n",
" response = requests.request(\"POST\", url, headers=headers, data=payload)\n",
" original_results = response.json()\n",
"\n",
" related_searches = original_results.get(\"relatedSearches\", [])\n",
" original_organics = original_results.get(\"organic\", [])\n",
"\n",
" for search in related_searches:\n",
" payload = json.dumps({\"q\": search.get(\"query\")})\n",
" response = requests.request(\"POST\", url, headers=headers, data=payload)\n",
" original_organics.extend(response.json().get(\"organic\", []))\n",
"\n",
" return original_organics"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "cAYwonikXP8b"
},
"outputs": [],
"source": [
"register_function(\n",
" elasticsearch_hybrid_search,\n",
" caller=retriever,\n",
" executor=user_proxy,\n",
" name=\"elasticsearch_hybrid_search\",\n",
" description=\"A method retrieve information from Elasticsearch using semantic search capabilities\",\n",
")\n",
"\n",
"register_function(\n",
" search_in_internet,\n",
" caller=researcher,\n",
" executor=user_proxy,\n",
" name=\"search_in_internet\",\n",
" description=\"A method for search in internet\",\n",
")"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "PMFhgK9CXTze"
},
"source": [
"## Starting agents"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "MNcpUJsKXZPK",
"outputId": "e6fc07d5-16d0-4f33-c7e1-bc18373c5537"
},
"outputs": [],
"source": [
"groupchat = GroupChat(\n",
" agents=[user_proxy, researcher, retriever, matcher, critic],\n",
" messages=[],\n",
" max_round=50,\n",
")\n",
"\n",
"manager = GroupChatManager(groupchat=groupchat, llm_config=ai_endpoint_config)\n",
"\n",
"user_proxy.initiate_chat(\n",
" manager,\n",
" message=\"\"\"Compare the candidate profiles retrieved by the Retriever with the job offers\n",
" found by the Researcher on the internet.\n",
" Both candidate profiles and job offers are related to the software industry.\n",
" Ensure that each match is accurate and beneficial for both parties.\n",
" Each candidate should be matched with a single job offer.\n",
" Include the job offer link provided by the Researcher.\"\"\",\n",
")"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "S6WZMJayyzxh"
},
"source": [
"## Cleaning environment\n",
"\n",
"Delete the resources used to prevent them from consuming resources."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "9UcQwa41yy_x",
"outputId": "b4bbe1a1-d6ac-474e-ea34-4f96ba2c9810"
},
"outputs": [],
"source": [
"# Cleanup - Delete Index\n",
"_client.indices.delete(index=\"available-candidates\", ignore=[400, 404])\n",
"\n",
"# Cleanup - Inference endpoint\n",
"_client.inference.delete(inference_id=\"jobs-candidates-inference\", ignore=[400, 404])"
]
}
],
"metadata": {
"colab": {
"collapsed_sections": [
"9lvPHaXjPlfu"
],
"provenance": []
},
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.2"
}
},
"nbformat": 4,
"nbformat_minor": 0
}