{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "import os\n", "\n", "os.environ[\"SERPAPI_API_KEY\"] = \"3d009ed0a9764853b3e75ee2c70ceb3aabd92b810456307c5af711559ec4aff7\"" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "/Users/freddy/sources/components/chatbot-with-tools/.venv/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", " from .autonotebook import tqdm as notebook_tqdm\n", "You're loading a tool from the Hub from None. Please make sure this is a source that you trust as the code within that tool will be executed on your machine. Always verify the code of the tools that you load. We recommend specifying a `revision` to ensure you're loading the code that you have checked.\n" ] }, { "ename": "ImportError", "evalue": "Could not import serpapi python package. Please install it with `pip install google-search-results`.", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mImportError\u001b[0m Traceback (most recent call last)", "File \u001b[0;32m~/sources/components/chatbot-with-tools/.venv/lib/python3.10/site-packages/langchain_community/utilities/serpapi.py:68\u001b[0m, in \u001b[0;36mSerpAPIWrapper.validate_environment\u001b[0;34m(cls, values)\u001b[0m\n\u001b[1;32m 67\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m---> 68\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mserpapi\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m GoogleSearch\n\u001b[1;32m 70\u001b[0m values[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msearch_engine\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m GoogleSearch\n", "\u001b[0;31mImportError\u001b[0m: cannot import name 'GoogleSearch' from 'serpapi' (/Users/freddy/sources/components/chatbot-with-tools/.venv/lib/python3.10/site-packages/serpapi/__init__.py)", "\nDuring handling of the above exception, another exception occurred:\n", "\u001b[0;31mImportError\u001b[0m Traceback (most recent call last)", "Cell \u001b[0;32mIn[2], line 9\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[38;5;66;03m# Import tool from Hub\u001b[39;00m\n\u001b[1;32m 6\u001b[0m image_generation_tool \u001b[38;5;241m=\u001b[39m load_tool(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mm-ric/text-to-image\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m----> 9\u001b[0m search_tool \u001b[38;5;241m=\u001b[39m Tool\u001b[38;5;241m.\u001b[39mfrom_langchain(\u001b[43mload_tools\u001b[49m\u001b[43m(\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mserpapi\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m[\u001b[38;5;241m0\u001b[39m])\n\u001b[1;32m 11\u001b[0m llm_engine \u001b[38;5;241m=\u001b[39m HfEngine(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmeta-llama/Meta-Llama-3-70B-Instruct\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 12\u001b[0m \u001b[38;5;66;03m# Initialize the agent with both tools\u001b[39;00m\n", "File \u001b[0;32m~/sources/components/chatbot-with-tools/.venv/lib/python3.10/site-packages/langchain_community/agent_toolkits/load_tools.py:726\u001b[0m, in \u001b[0;36mload_tools\u001b[0;34m(tool_names, llm, callbacks, allow_dangerous_tools, **kwargs)\u001b[0m\n\u001b[1;32m 724\u001b[0m _get_tool_func, extra_keys \u001b[38;5;241m=\u001b[39m _EXTRA_OPTIONAL_TOOLS[name]\n\u001b[1;32m 725\u001b[0m sub_kwargs \u001b[38;5;241m=\u001b[39m {k: kwargs[k] \u001b[38;5;28;01mfor\u001b[39;00m k \u001b[38;5;129;01min\u001b[39;00m extra_keys \u001b[38;5;28;01mif\u001b[39;00m k \u001b[38;5;129;01min\u001b[39;00m kwargs}\n\u001b[0;32m--> 726\u001b[0m tool \u001b[38;5;241m=\u001b[39m \u001b[43m_get_tool_func\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43msub_kwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 727\u001b[0m tools\u001b[38;5;241m.\u001b[39mappend(tool)\n\u001b[1;32m 728\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n", "File \u001b[0;32m~/sources/components/chatbot-with-tools/.venv/lib/python3.10/site-packages/langchain_community/agent_toolkits/load_tools.py:380\u001b[0m, in \u001b[0;36m_get_serpapi\u001b[0;34m(**kwargs)\u001b[0m\n\u001b[1;32m 376\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_get_serpapi\u001b[39m(\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m BaseTool:\n\u001b[1;32m 377\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m Tool(\n\u001b[1;32m 378\u001b[0m name\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mSearch\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 379\u001b[0m description\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mA search engine. Useful for when you need to answer questions about current events. Input should be a search query.\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[0;32m--> 380\u001b[0m func\u001b[38;5;241m=\u001b[39m\u001b[43mSerpAPIWrapper\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241m.\u001b[39mrun,\n\u001b[1;32m 381\u001b[0m coroutine\u001b[38;5;241m=\u001b[39mSerpAPIWrapper(\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\u001b[38;5;241m.\u001b[39marun,\n\u001b[1;32m 382\u001b[0m )\n", "File \u001b[0;32m~/sources/components/chatbot-with-tools/.venv/lib/python3.10/site-packages/pydantic/v1/main.py:339\u001b[0m, in \u001b[0;36mBaseModel.__init__\u001b[0;34m(__pydantic_self__, **data)\u001b[0m\n\u001b[1;32m 333\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 334\u001b[0m \u001b[38;5;124;03mCreate a new model by parsing and validating input data from keyword arguments.\u001b[39;00m\n\u001b[1;32m 335\u001b[0m \n\u001b[1;32m 336\u001b[0m \u001b[38;5;124;03mRaises ValidationError if the input data cannot be parsed to form a valid model.\u001b[39;00m\n\u001b[1;32m 337\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 338\u001b[0m \u001b[38;5;66;03m# Uses something other than `self` the first arg to allow \"self\" as a settable attribute\u001b[39;00m\n\u001b[0;32m--> 339\u001b[0m values, fields_set, validation_error \u001b[38;5;241m=\u001b[39m \u001b[43mvalidate_model\u001b[49m\u001b[43m(\u001b[49m\u001b[43m__pydantic_self__\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[38;5;18;43m__class__\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 340\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m validation_error:\n\u001b[1;32m 341\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m validation_error\n", "File \u001b[0;32m~/sources/components/chatbot-with-tools/.venv/lib/python3.10/site-packages/pydantic/v1/main.py:1100\u001b[0m, in \u001b[0;36mvalidate_model\u001b[0;34m(model, input_data, cls)\u001b[0m\n\u001b[1;32m 1098\u001b[0m \u001b[38;5;28;01mcontinue\u001b[39;00m\n\u001b[1;32m 1099\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m-> 1100\u001b[0m values \u001b[38;5;241m=\u001b[39m \u001b[43mvalidator\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcls_\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvalues\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1101\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (\u001b[38;5;167;01mValueError\u001b[39;00m, \u001b[38;5;167;01mTypeError\u001b[39;00m, \u001b[38;5;167;01mAssertionError\u001b[39;00m) \u001b[38;5;28;01mas\u001b[39;00m exc:\n\u001b[1;32m 1102\u001b[0m errors\u001b[38;5;241m.\u001b[39mappend(ErrorWrapper(exc, loc\u001b[38;5;241m=\u001b[39mROOT_KEY))\n", "File \u001b[0;32m~/sources/components/chatbot-with-tools/.venv/lib/python3.10/site-packages/langchain_community/utilities/serpapi.py:72\u001b[0m, in \u001b[0;36mSerpAPIWrapper.validate_environment\u001b[0;34m(cls, values)\u001b[0m\n\u001b[1;32m 70\u001b[0m values[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msearch_engine\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m GoogleSearch\n\u001b[1;32m 71\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mImportError\u001b[39;00m:\n\u001b[0;32m---> 72\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mImportError\u001b[39;00m(\n\u001b[1;32m 73\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCould not import serpapi python package. \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 74\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mPlease install it with `pip install google-search-results`.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 75\u001b[0m )\n\u001b[1;32m 76\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m values\n", "\u001b[0;31mImportError\u001b[0m: Could not import serpapi python package. Please install it with `pip install google-search-results`." ] } ], "source": [ "from transformers import Tool, load_tool, ReactCodeAgent, HfEngine\n", "# Import tool from LangChain\n", "from langchain.agents import load_tools\n", "\n", "# Import tool from Hub\n", "image_generation_tool = load_tool(\"m-ric/text-to-image\")\n", "\n", "\n", "search_tool = Tool.from_langchain(load_tools([\"serpapi\"])[0])\n", "\n", "llm_engine = HfEngine(\"meta-llama/Meta-Llama-3-70B-Instruct\")\n", "# Initialize the agent with both tools\n", "agent = ReactCodeAgent(tools=[image_generation_tool, search_tool], llm_engine=llm_engine)\n", "\n", "# Run it!\n", "#agent.run(\"Generate me a photo of the car that James bond drove in the latest movie.\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import json\n", "\n", "json.dump(agent.logs, open(\"logs.json\", \"w\"), indent=2)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from transformers.agents import Agent\n", "from gradio.data_classes import GradioModel\n", "from typing import Literal, List, Generator, Optional\n", "from threading import Thread\n", "import time\n", "\n", "class OpenAIMessage(GradioModel):\n", " role: Literal[\"system\", \"user\", \"assistant\", \"tool\"]\n", " content: str\n", " reasoning: bool = False\n", " tool_name: Optional[str] = None\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def stream_from_agent(\n", " agent: Agent,\n", " prompt: str\n", ") -> Generator[List[OpenAIMessage], None, None]:\n", " \"\"\"Run Python code in a process and capture logs in real-time to yield them.\"\"\"\n", "\n", " thread = Thread(target=agent.run, args=(prompt,))\n", " num_messages = 0\n", "\n", " # Start process and pull logs while it runs\n", " thread.start()\n", " while thread.is_alive():\n", " if len(agent.logs) > num_messages:\n", " new_messages = agent.logs[num_messages:]\n", " for message in new_messages:\n", " if not len(message):\n", " continue\n", " if message.get(\"rationale\"):\n", " yield OpenAIMessage(\n", " role=\"assistant\",\n", " content=message[\"rationale\"],\n", " reasoning=True\n", " )\n", " if message.get(\"tool_call\"):\n", " yield OpenAIMessage(\n", " role=\"tool\",\n", " tool_name=message[\"tool_call\"][\"tool_name\"],\n", " content=message['tool_call'][\"tool_arguments\"],\n", " reasoning=True\n", " )\n", " num_messages = len(agent.logs)\n", " time.sleep(0.1)\n", "\n", " thread.join(0.1)\n", "\n", " if len(agent.logs) > num_messages:\n", " new_messages = agent.logs[num_messages:]\n", " for message in new_messages:\n", " if message.get(\"rationale\"):\n", " yield OpenAIMessage(\n", " role=\"assistant\",\n", " content=message[\"rationale\"],\n", " reasoning=True\n", " )\n", " if message.get(\"tool_call\"):\n", " yield OpenAIMessage(\n", " role=\"tool\",\n", " tool_name=message[\"tool_call\"][\"tool_name\"],\n", " content=message.get(\"tool_arguments\", \"\"),\n", " )\n", " num_messages = len(agent.logs)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "messages = []\n", "for msg in stream_from_agent(agent, \"Generate me a photo of a cartoon cat.\"):\n", " messages.append(msg)\n", " print(\"MSG\", msg)\n", "print(messages)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "agent.outputs" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "agent.logs[2]" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "agent.logs[3]" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "agent.run(\"What kind of car is this?\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "agent.logs" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "agent.run??" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": ".venv", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.13" } }, "nbformat": 4, "nbformat_minor": 2 }