{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# app\n", "\n", "> Gradio app.py" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#| default_exp app" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#| hide\n", "from nbdev.showdoc import *" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# | export\n", "import copy\n", "import os\n", "import gradio as gr\n", "import constants\n", "from lv_recipe_chatbot.vegan_recipe_assistant import (\n", " SYSTEM_PROMPT,\n", " vegan_recipe_edamam_search,\n", " VEGAN_RECIPE_SEARCH_TOOL_SCHEMA,\n", ")\n", "from openai import OpenAI, AssistantEventHandler\n", "from typing_extensions import override\n", "import json\n", "from functools import partial" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#| hide\n", "import time\n", "from dotenv import load_dotenv" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "True" ] }, "execution_count": null, "metadata": {}, "output_type": "execute_result" } ], "source": [ "#| eval: false\n", "load_dotenv()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "[GPT4 streaming output example on hugging face 🤗](https://huggingface.co/spaces/ysharma/ChatGPT4/blob/main/app.pyhttps://huggingface.co/spaces/ysharma/ChatGPT4/blob/main/app.py) \n", "[Gradio lite let's you insert Gradio app in browser JS](https://www.gradio.app/guides/gradio-litehttps://www.gradio.app/guides/gradio-lite) \n", "[Streaming output](https://www.gradio.app/main/guides/streaming-outputshttps://www.gradio.app/main/guides/streaming-outputs)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#| eval: false\n", "client = OpenAI()\n", "assistant = client.beta.assistants.create(\n", " name=\"Vegan Recipe Finder\",\n", " instructions=SYSTEM_PROMPT,\n", " # + \"\\nChoose the best single matching recipe to the user's query out of the vegan recipe search returned recipes\",\n", " model=\"gpt-4o\",\n", " tools=[VEGAN_RECIPE_SEARCH_TOOL_SCHEMA],\n", ")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "class EventHandler(AssistantEventHandler):\n", " @override\n", " def on_event(self, event):\n", " # Retrieve events that are denoted with 'requires_action'\n", " # since these will have our tool_calls\n", " if event.event == \"thread.run.requires_action\":\n", " run_id = event.data.id # Retrieve the run ID from the event data\n", " self.handle_requires_action(event.data, run_id)\n", "\n", " def handle_requires_action(self, data, run_id):\n", " tool_outputs = []\n", " for tool_call in data.required_action.submit_tool_outputs.tool_calls:\n", " if tool_call.function.name == \"vegan_recipe_edamam_search\":\n", " fn_args = json.loads(tool_call.function.arguments)\n", " data = vegan_recipe_edamam_search(\n", " query=fn_args.get(\"query\"),\n", " )\n", " tool_outputs.append({\"tool_call_id\": tool_call.id, \"output\": data})\n", "\n", " self.submit_tool_outputs(tool_outputs, run_id)\n", "\n", " def submit_tool_outputs(self, tool_outputs, run_id):\n", " client.beta.threads.runs.submit_tool_outputs_stream(\n", " thread_id=self.current_run.thread_id,\n", " run_id=self.current_run.id,\n", " tool_outputs=tool_outputs,\n", " event_handler=EventHandler(),\n", " )" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#| export\n", "def handle_requires_action(data):\n", " tool_outputs = []\n", " for tool_call in data.required_action.submit_tool_outputs.tool_calls:\n", " if tool_call.function.name == \"vegan_recipe_edamam_search\":\n", " fn_args = json.loads(tool_call.function.arguments)\n", " data = vegan_recipe_edamam_search(\n", " query=fn_args.get(\"query\"),\n", " )\n", " tool_outputs.append({\"tool_call_id\": tool_call.id, \"output\": data})\n", " return tool_outputs" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def run_conversation() -> str:\n", " run = client.beta.threads.runs.create_and_poll(\n", " thread_id=thread.id,\n", " assistant_id=assistant.id,\n", " )\n", " while True:\n", " tool_outputs = []\n", " tool_calls = (\n", " []\n", " if not run.required_action\n", " else run.required_action.submit_tool_outputs.tool_calls\n", " )\n", "\n", " for tool_call in tool_calls:\n", " if tool_call.function.name == \"vegan_recipe_edamam_search\":\n", " fn_args = json.loads(tool_call.function.arguments)\n", " data = vegan_recipe_edamam_search(\n", " query=fn_args.get(\"query\"),\n", " )\n", " tool_outputs.append({\"tool_call_id\": tool_call.id, \"output\": data})\n", "\n", " if tool_outputs:\n", " try:\n", " run = client.beta.threads.runs.submit_tool_outputs_and_poll(\n", " thread_id=thread.id,\n", " run_id=run.id,\n", " tool_outputs=tool_outputs,\n", " )\n", " print(\"Tool outputs submitted successfully.\")\n", "\n", " except Exception as e:\n", " print(\"Failed to submit tool outputs:\", e)\n", " return \"Sorry failed to run tools. Try again with a different query.\"\n", "\n", " if run.status == \"completed\":\n", " messages = client.beta.threads.messages.list(thread_id=thread.id)\n", " data = messages.data\n", " content = data[0].content\n", " return content[0].text.value\n", " time.sleep(0.05)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#| export\n", "def run_convo_stream(thread, content: str, client: OpenAI, assistant):\n", " message = client.beta.threads.messages.create(\n", " thread_id=thread.id,\n", " role=\"user\",\n", " content=content,\n", " )\n", " stream = client.beta.threads.runs.create(\n", " thread_id=thread.id,\n", " assistant_id=assistant.id,\n", " stream=True,\n", " )\n", " for event in stream:\n", " if event.event == \"thread.message.delta\":\n", " yield event.data.delta.content[0].text.value\n", "\n", " if event.event == \"thread.run.requires_action\":\n", " tool_outputs = handle_requires_action(event.data)\n", " stream = client.beta.threads.runs.submit_tool_outputs(\n", " run_id=event.data.id,\n", " thread_id=thread.id,\n", " tool_outputs=tool_outputs,\n", " stream=True,\n", " )\n", " for event in stream:\n", " if event.event == \"thread.message.delta\":\n", " yield event.data.delta.content[0].text.value" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "skip\n" ] } ], "source": [ "%%script echo skip\n", "thread = client.beta.threads.create()\n", "\n", "test_msgs = [\n", " \"Hello\",\n", " \"What can I make with tempeh, whole wheat bread, and lettuce?\",\n", "]\n", "for m in test_msgs:\n", " for txt in run_convo_stream(thread, m, client, assistant):\n", " print(txt, end=\"\")\n", " print()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#| export\n", "def predict(message, history, client: OpenAI, assistant, thread):\n", " # note that history is a flat list of text messages\n", " reply = \"\"\n", " files = message[\"files\"]\n", " txt = message[\"text\"]\n", "\n", " if files:\n", " if files[-1].split(\".\")[-1] not in [\"jpg\", \"png\", \"jpeg\", \"webp\"]:\n", " return \"Sorry only accept image files\"\n", "\n", " file = message[\"files\"][-1]\n", " file = client.files.create(\n", " file=open(\n", " file,\n", " \"rb\",\n", " ),\n", " purpose=\"vision\",\n", " )\n", "\n", " for reply_txt in run_convo_stream(\n", " thread,\n", " content=[\n", " {\n", " \"type\": \"text\",\n", " \"text\": \"What vegan ingredients do you see in this image? Also list out a few combinations of the ingredients that go well together. Lastly, suggest a recipe based on one of those combos using the vegan recipe seach tool.\",\n", " },\n", " {\"type\": \"image_file\", \"image_file\": {\"file_id\": file.id}},\n", " ],\n", " client=client,\n", " assistant=assistant,\n", " ):\n", " reply += reply_txt\n", " yield reply\n", "\n", " elif txt:\n", " for reply_txt in run_convo_stream(thread, txt, client, assistant):\n", " reply += reply_txt\n", " yield reply" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#| export\n", "def create_demo(client: OpenAI, assistant):\n", " # https://www.gradio.app/main/guides/creating-a-chatbot-fast#customizing-your-chatbot\n", " # on chatbot start/ first msg after clear\n", " thread = client.beta.threads.create()\n", "\n", " # sample_images = []\n", " # all_imgs = [f\"{SAMPLE_IMG_DIR}/{img}\" for img in os.listdir(SAMPLE_IMG_DIR)]\n", " # for i, img in enumerate(all_imgs):\n", " # if i in [\n", " # 1,\n", " # 2,\n", " # 3,\n", " # ]:\n", " # sample_images.append(img)\n", " pred = partial(predict, client=client, assistant=assistant, thread=thread)\n", " with gr.ChatInterface(\n", " fn=pred,\n", " multimodal=True,\n", " chatbot=gr.Chatbot(\n", " placeholder=\"Hello!\\nI am a animal advocate AI that is capable of recommending vegan recipes.\\nUpload an image or write a message below to get started!\"\n", " ),\n", " ) as demo:\n", " gr.Markdown(\n", " \"\"\"🔃 **Refresh the page to start from scratch** \n", " \n", " Recipe search tool powered by the [Edamam API](https://www.edamam.com/) \n", " \n", " ![Edamam Logo](https://www.edamam.com/assets/img/small-logo.png)\"\"\"\n", " )\n", "\n", " # clear.click(lambda: None, None, chatbot, queue=False).then(bot.reset)\n", " return demo" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "skip\n" ] } ], "source": [ "%%script echo skip\n", "if \"demo\" in globals():\n", " demo.close()\n", "\n", "demo = create_demo(client, assistant)\n", "demo.launch()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#| hide\n", "import nbdev\n", "\n", "nbdev.nbdev_export()" ] } ], "metadata": { "kernelspec": { "display_name": "local-lv-chatbot", "language": "python", "name": "local-lv-chatbot" } }, "nbformat": 4, "nbformat_minor": 4 }