{ "nbformat": 4, "nbformat_minor": 0, "metadata": { "colab": { "provenance": [], "toc_visible": true }, "kernelspec": { "name": "python3", "display_name": "Python 3" }, "language_info": { "name": "python" } }, "cells": [ { "cell_type": "markdown", "source": [ "# Installation" ], "metadata": { "id": "3BsHLvatMMBp" } }, { "cell_type": "code", "source": [ "!pip install -q openai\n", "!pip install -q gradio" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "EEuRe-IZGfcO", "outputId": "553d78e9-a5e0-4326-eba1-01e347de5a1d" }, "execution_count": 1, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m70.3/70.3 kB\u001b[0m \u001b[31m1.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.0/1.0 MB\u001b[0m \u001b[31m13.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m158.8/158.8 kB\u001b[0m \u001b[31m7.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m269.3/269.3 kB\u001b[0m \u001b[31m2.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m114.2/114.2 kB\u001b[0m \u001b[31m3.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m17.3/17.3 MB\u001b[0m \u001b[31m42.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m200.1/200.1 kB\u001b[0m \u001b[31m9.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m129.7/129.7 kB\u001b[0m \u001b[31m2.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m57.0/57.0 kB\u001b[0m \u001b[31m2.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m45.7/45.7 kB\u001b[0m \u001b[31m2.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m57.8/57.8 kB\u001b[0m \u001b[31m3.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m286.2/286.2 kB\u001b[0m \u001b[31m16.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25h Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m75.3/75.3 kB\u001b[0m \u001b[31m7.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m50.5/50.5 kB\u001b[0m \u001b[31m4.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m140.5/140.5 kB\u001b[0m \u001b[31m6.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m66.9/66.9 kB\u001b[0m \u001b[31m5.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m70.6/70.6 kB\u001b[0m \u001b[31m7.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.3/58.3 kB\u001b[0m \u001b[31m6.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25h Building wheel for ffmpy (setup.py) ... \u001b[?25l\u001b[?25hdone\n" ] } ] }, { "cell_type": "markdown", "source": [ "# Imports" ], "metadata": { "id": "unIGGtl_MO4u" } }, { "cell_type": "code", "source": [ "import openai\n", "import gradio as gr" ], "metadata": { "id": "dOxJoWGvLFne" }, "execution_count": 2, "outputs": [] }, { "cell_type": "markdown", "source": [ "# API Key\n", "\n", "openai.api_key = Replace this with your API key: https://beta.openai.com/docs/quickstart/add-your-api-key\n" ], "metadata": { "id": "XRwvS_F2MYCZ" } }, { "cell_type": "code", "source": [ "openai.api_key = \"sk-\" # Replace this with your API key: https://beta.openai.com/docs/quickstart/add-your-api-key" ], "metadata": { "id": "ev3uSJn_MUdv" }, "execution_count": 7, "outputs": [] }, { "cell_type": "markdown", "source": [ "# OpenAI Chat" ], "metadata": { "id": "cfpMV1W7MdBW" } }, { "cell_type": "code", "source": [ "def openai_chat(prompt):\n", " completions = openai.Completion.create(\n", " engine=\"text-davinci-003\",\n", " prompt=prompt,\n", " max_tokens=1024,\n", " n=1,\n", " temperature=0.5,\n", " )\n", "\n", " message = completions.choices[0].text\n", " return message.strip()" ], "metadata": { "id": "h8EY5yomJDC_" }, "execution_count": 8, "outputs": [] }, { "cell_type": "markdown", "source": [ "# Gradio Interface Function" ], "metadata": { "id": "Z8j5l3L1MgTo" } }, { "cell_type": "code", "source": [ "def chatbot(key, input, history=[]):\n", " openai.api_key = key\n", " output = openai_chat(input)\n", " history.append((input, output))\n", " return history, history" ], "metadata": { "id": "njt_0emtJDJp" }, "execution_count": 10, "outputs": [] }, { "cell_type": "markdown", "source": [ "# Launch Interface" ], "metadata": { "id": "IIr6DORaMjWf" } }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 1000 }, "id": "XuUV25fQGB0I", "outputId": "677e9563-29af-41c1-d8a2-40fddffd6a02" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Colab notebook detected. This cell will run indefinitely so that you can see errors and logs. To turn off, set debug=False in launch().\n", "Note: opening Chrome Inspector may crash demo inside Colab notebooks.\n", "\n", "To create a public link, set `share=True` in `launch()`.\n" ] }, { "output_type": "display_data", "data": { "text/plain": [ "" ], "application/javascript": [ "(async (port, path, width, height, cache, element) => {\n", " if (!google.colab.kernel.accessAllowed && !cache) {\n", " return;\n", " }\n", " element.appendChild(document.createTextNode(''));\n", " const url = await google.colab.kernel.proxyPort(port, {cache});\n", "\n", " const external_link = document.createElement('div');\n", " external_link.innerHTML = `\n", "
\n", " Running on \n", " https://localhost:${port}${path}\n", " \n", "
\n", " `;\n", " element.appendChild(external_link);\n", "\n", " const iframe = document.createElement('iframe');\n", " iframe.src = new URL(path, url).toString();\n", " iframe.height = height;\n", " iframe.allow = \"autoplay; camera; microphone; clipboard-read; clipboard-write;\"\n", " iframe.width = width;\n", " iframe.style.border = 0;\n", " element.appendChild(iframe);\n", " })(7860, \"/\", \"100%\", 500, false, window.element)" ] }, "metadata": {} }, { "output_type": "stream", "name": "stderr", "text": [ "Traceback (most recent call last):\n", " File \"/usr/local/lib/python3.9/dist-packages/gradio/routes.py\", line 401, in run_predict\n", " output = await app.get_blocks().process_api(\n", " File \"/usr/local/lib/python3.9/dist-packages/gradio/blocks.py\", line 1302, in process_api\n", " result = await self.call_function(\n", " File \"/usr/local/lib/python3.9/dist-packages/gradio/blocks.py\", line 1025, in call_function\n", " prediction = await anyio.to_thread.run_sync(\n", " File \"/usr/local/lib/python3.9/dist-packages/anyio/to_thread.py\", line 31, in run_sync\n", " return await get_asynclib().run_sync_in_worker_thread(\n", " File \"/usr/local/lib/python3.9/dist-packages/anyio/_backends/_asyncio.py\", line 937, in run_sync_in_worker_thread\n", " return await future\n", " File \"/usr/local/lib/python3.9/dist-packages/anyio/_backends/_asyncio.py\", line 867, in run\n", " result = context.run(func, *args)\n", " File \"\", line 3, in chatbot\n", " output = openai_chat(input)\n", " File \"\", line 2, in openai_chat\n", " completions = openai.Completion.create(\n", " File \"/usr/local/lib/python3.9/dist-packages/openai/api_resources/completion.py\", line 25, in create\n", " return super().create(*args, **kwargs)\n", " File \"/usr/local/lib/python3.9/dist-packages/openai/api_resources/abstract/engine_api_resource.py\", line 153, in create\n", " response, _, api_key = requestor.request(\n", " File \"/usr/local/lib/python3.9/dist-packages/openai/api_requestor.py\", line 226, in request\n", " resp, got_stream = self._interpret_response(result, stream)\n", " File \"/usr/local/lib/python3.9/dist-packages/openai/api_requestor.py\", line 620, in _interpret_response\n", " self._interpret_response_line(\n", " File \"/usr/local/lib/python3.9/dist-packages/openai/api_requestor.py\", line 683, in _interpret_response_line\n", " raise self.handle_error_response(\n", "openai.error.AuthenticationError: You didn't provide an API key. You need to provide your API key in an Authorization header using Bearer auth (i.e. Authorization: Bearer YOUR_KEY), or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys.\n" ] } ], "source": [ "gr.Interface(fn = chatbot,\n", " inputs = [\"text\",\"text\",'state'],\n", " outputs = [\"chatbot\",'state']).launch(debug = True)" ] }, { "cell_type": "code", "source": [], "metadata": { "id": "u54uD3Z8Ig-6" }, "execution_count": null, "outputs": [] } ] }