File size: 1,335 Bytes
5d7a966
1
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: llm_openai"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio openai>=1.0.0"]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["# This is a simple general-purpose chatbot built on top of OpenAI API. \n", "# Before running this, make sure you have exported your OpenAI API key as an environment variable:\n", "# export OPENAI_API_KEY=\"your-openai-api-key\"\n", "\n", "from openai import OpenAI\n", "import gradio as gr\n", "\n", "client = OpenAI()\n", "\n", "def predict(message, history):\n", "    history.append({\"role\": \"user\", \"content\": message})\n", "    stream = client.chat.completions.create(messages=history, model=\"gpt-4o-mini\", stream=True)\n", "    chunks = []\n", "    for chunk in stream:\n", "        chunks.append(chunk.choices[0].delta.content or \"\")\n", "        yield \"\".join(chunks)\n", "\n", "demo = gr.ChatInterface(predict, type=\"messages\")\n", "\n", "if __name__ == \"__main__\":\n", "    demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}