AutoGeneralAI commited on
Commit
0d048b9
1 Parent(s): ee4b1bc

Synced repo using 'sync_with_huggingface' Github Action

Browse files
Files changed (1) hide show
  1. demo.ipynb +285 -0
demo.ipynb ADDED
@@ -0,0 +1,285 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "nbformat": 4,
3
+ "nbformat_minor": 0,
4
+ "metadata": {
5
+ "colab": {
6
+ "provenance": [],
7
+ "toc_visible": true
8
+ },
9
+ "kernelspec": {
10
+ "name": "python3",
11
+ "display_name": "Python 3"
12
+ },
13
+ "language_info": {
14
+ "name": "python"
15
+ }
16
+ },
17
+ "cells": [
18
+ {
19
+ "cell_type": "markdown",
20
+ "source": [
21
+ "# Installation"
22
+ ],
23
+ "metadata": {
24
+ "id": "3BsHLvatMMBp"
25
+ }
26
+ },
27
+ {
28
+ "cell_type": "code",
29
+ "source": [
30
+ "!pip install -q openai\n",
31
+ "!pip install -q gradio"
32
+ ],
33
+ "metadata": {
34
+ "colab": {
35
+ "base_uri": "https://localhost:8080/"
36
+ },
37
+ "id": "EEuRe-IZGfcO",
38
+ "outputId": "553d78e9-a5e0-4326-eba1-01e347de5a1d"
39
+ },
40
+ "execution_count": 1,
41
+ "outputs": [
42
+ {
43
+ "output_type": "stream",
44
+ "name": "stdout",
45
+ "text": [
46
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m70.3/70.3 kB\u001b[0m \u001b[31m1.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
47
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.0/1.0 MB\u001b[0m \u001b[31m13.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
48
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m158.8/158.8 kB\u001b[0m \u001b[31m7.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
49
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m269.3/269.3 kB\u001b[0m \u001b[31m2.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
50
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m114.2/114.2 kB\u001b[0m \u001b[31m3.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
51
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m17.3/17.3 MB\u001b[0m \u001b[31m42.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
52
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m200.1/200.1 kB\u001b[0m \u001b[31m9.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
53
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m129.7/129.7 kB\u001b[0m \u001b[31m2.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
54
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m57.0/57.0 kB\u001b[0m \u001b[31m2.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
55
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m45.7/45.7 kB\u001b[0m \u001b[31m2.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
56
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m57.8/57.8 kB\u001b[0m \u001b[31m3.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
57
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m286.2/286.2 kB\u001b[0m \u001b[31m16.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
58
+ "\u001b[?25h Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
59
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m75.3/75.3 kB\u001b[0m \u001b[31m7.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
60
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m50.5/50.5 kB\u001b[0m \u001b[31m4.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
61
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m140.5/140.5 kB\u001b[0m \u001b[31m6.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
62
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━���━━━━━━━━━━\u001b[0m \u001b[32m66.9/66.9 kB\u001b[0m \u001b[31m5.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
63
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m70.6/70.6 kB\u001b[0m \u001b[31m7.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
64
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.3/58.3 kB\u001b[0m \u001b[31m6.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
65
+ "\u001b[?25h Building wheel for ffmpy (setup.py) ... \u001b[?25l\u001b[?25hdone\n"
66
+ ]
67
+ }
68
+ ]
69
+ },
70
+ {
71
+ "cell_type": "markdown",
72
+ "source": [
73
+ "# Imports"
74
+ ],
75
+ "metadata": {
76
+ "id": "unIGGtl_MO4u"
77
+ }
78
+ },
79
+ {
80
+ "cell_type": "code",
81
+ "source": [
82
+ "import openai\n",
83
+ "import gradio as gr"
84
+ ],
85
+ "metadata": {
86
+ "id": "dOxJoWGvLFne"
87
+ },
88
+ "execution_count": 2,
89
+ "outputs": []
90
+ },
91
+ {
92
+ "cell_type": "markdown",
93
+ "source": [
94
+ "# API Key\n",
95
+ "\n",
96
+ "openai.api_key = Replace this with your API key: https://beta.openai.com/docs/quickstart/add-your-api-key\n"
97
+ ],
98
+ "metadata": {
99
+ "id": "XRwvS_F2MYCZ"
100
+ }
101
+ },
102
+ {
103
+ "cell_type": "code",
104
+ "source": [
105
+ "openai.api_key = \"sk-\" # Replace this with your API key: https://beta.openai.com/docs/quickstart/add-your-api-key"
106
+ ],
107
+ "metadata": {
108
+ "id": "ev3uSJn_MUdv"
109
+ },
110
+ "execution_count": 7,
111
+ "outputs": []
112
+ },
113
+ {
114
+ "cell_type": "markdown",
115
+ "source": [
116
+ "# OpenAI Chat"
117
+ ],
118
+ "metadata": {
119
+ "id": "cfpMV1W7MdBW"
120
+ }
121
+ },
122
+ {
123
+ "cell_type": "code",
124
+ "source": [
125
+ "def openai_chat(prompt):\n",
126
+ " completions = openai.Completion.create(\n",
127
+ " engine=\"text-davinci-003\",\n",
128
+ " prompt=prompt,\n",
129
+ " max_tokens=1024,\n",
130
+ " n=1,\n",
131
+ " temperature=0.5,\n",
132
+ " )\n",
133
+ "\n",
134
+ " message = completions.choices[0].text\n",
135
+ " return message.strip()"
136
+ ],
137
+ "metadata": {
138
+ "id": "h8EY5yomJDC_"
139
+ },
140
+ "execution_count": 8,
141
+ "outputs": []
142
+ },
143
+ {
144
+ "cell_type": "markdown",
145
+ "source": [
146
+ "# Gradio Interface Function"
147
+ ],
148
+ "metadata": {
149
+ "id": "Z8j5l3L1MgTo"
150
+ }
151
+ },
152
+ {
153
+ "cell_type": "code",
154
+ "source": [
155
+ "def chatbot(key, input, history=[]):\n",
156
+ " openai.api_key = key\n",
157
+ " output = openai_chat(input)\n",
158
+ " history.append((input, output))\n",
159
+ " return history, history"
160
+ ],
161
+ "metadata": {
162
+ "id": "njt_0emtJDJp"
163
+ },
164
+ "execution_count": 10,
165
+ "outputs": []
166
+ },
167
+ {
168
+ "cell_type": "markdown",
169
+ "source": [
170
+ "# Launch Interface"
171
+ ],
172
+ "metadata": {
173
+ "id": "IIr6DORaMjWf"
174
+ }
175
+ },
176
+ {
177
+ "cell_type": "code",
178
+ "execution_count": null,
179
+ "metadata": {
180
+ "colab": {
181
+ "base_uri": "https://localhost:8080/",
182
+ "height": 1000
183
+ },
184
+ "id": "XuUV25fQGB0I",
185
+ "outputId": "677e9563-29af-41c1-d8a2-40fddffd6a02"
186
+ },
187
+ "outputs": [
188
+ {
189
+ "output_type": "stream",
190
+ "name": "stdout",
191
+ "text": [
192
+ "Colab notebook detected. This cell will run indefinitely so that you can see errors and logs. To turn off, set debug=False in launch().\n",
193
+ "Note: opening Chrome Inspector may crash demo inside Colab notebooks.\n",
194
+ "\n",
195
+ "To create a public link, set `share=True` in `launch()`.\n"
196
+ ]
197
+ },
198
+ {
199
+ "output_type": "display_data",
200
+ "data": {
201
+ "text/plain": [
202
+ "<IPython.core.display.Javascript object>"
203
+ ],
204
+ "application/javascript": [
205
+ "(async (port, path, width, height, cache, element) => {\n",
206
+ " if (!google.colab.kernel.accessAllowed && !cache) {\n",
207
+ " return;\n",
208
+ " }\n",
209
+ " element.appendChild(document.createTextNode(''));\n",
210
+ " const url = await google.colab.kernel.proxyPort(port, {cache});\n",
211
+ "\n",
212
+ " const external_link = document.createElement('div');\n",
213
+ " external_link.innerHTML = `\n",
214
+ " <div style=\"font-family: monospace; margin-bottom: 0.5rem\">\n",
215
+ " Running on <a href=${new URL(path, url).toString()} target=\"_blank\">\n",
216
+ " https://localhost:${port}${path}\n",
217
+ " </a>\n",
218
+ " </div>\n",
219
+ " `;\n",
220
+ " element.appendChild(external_link);\n",
221
+ "\n",
222
+ " const iframe = document.createElement('iframe');\n",
223
+ " iframe.src = new URL(path, url).toString();\n",
224
+ " iframe.height = height;\n",
225
+ " iframe.allow = \"autoplay; camera; microphone; clipboard-read; clipboard-write;\"\n",
226
+ " iframe.width = width;\n",
227
+ " iframe.style.border = 0;\n",
228
+ " element.appendChild(iframe);\n",
229
+ " })(7860, \"/\", \"100%\", 500, false, window.element)"
230
+ ]
231
+ },
232
+ "metadata": {}
233
+ },
234
+ {
235
+ "output_type": "stream",
236
+ "name": "stderr",
237
+ "text": [
238
+ "Traceback (most recent call last):\n",
239
+ " File \"/usr/local/lib/python3.9/dist-packages/gradio/routes.py\", line 401, in run_predict\n",
240
+ " output = await app.get_blocks().process_api(\n",
241
+ " File \"/usr/local/lib/python3.9/dist-packages/gradio/blocks.py\", line 1302, in process_api\n",
242
+ " result = await self.call_function(\n",
243
+ " File \"/usr/local/lib/python3.9/dist-packages/gradio/blocks.py\", line 1025, in call_function\n",
244
+ " prediction = await anyio.to_thread.run_sync(\n",
245
+ " File \"/usr/local/lib/python3.9/dist-packages/anyio/to_thread.py\", line 31, in run_sync\n",
246
+ " return await get_asynclib().run_sync_in_worker_thread(\n",
247
+ " File \"/usr/local/lib/python3.9/dist-packages/anyio/_backends/_asyncio.py\", line 937, in run_sync_in_worker_thread\n",
248
+ " return await future\n",
249
+ " File \"/usr/local/lib/python3.9/dist-packages/anyio/_backends/_asyncio.py\", line 867, in run\n",
250
+ " result = context.run(func, *args)\n",
251
+ " File \"<ipython-input-10-39e04040fe2f>\", line 3, in chatbot\n",
252
+ " output = openai_chat(input)\n",
253
+ " File \"<ipython-input-8-075d166a6c82>\", line 2, in openai_chat\n",
254
+ " completions = openai.Completion.create(\n",
255
+ " File \"/usr/local/lib/python3.9/dist-packages/openai/api_resources/completion.py\", line 25, in create\n",
256
+ " return super().create(*args, **kwargs)\n",
257
+ " File \"/usr/local/lib/python3.9/dist-packages/openai/api_resources/abstract/engine_api_resource.py\", line 153, in create\n",
258
+ " response, _, api_key = requestor.request(\n",
259
+ " File \"/usr/local/lib/python3.9/dist-packages/openai/api_requestor.py\", line 226, in request\n",
260
+ " resp, got_stream = self._interpret_response(result, stream)\n",
261
+ " File \"/usr/local/lib/python3.9/dist-packages/openai/api_requestor.py\", line 620, in _interpret_response\n",
262
+ " self._interpret_response_line(\n",
263
+ " File \"/usr/local/lib/python3.9/dist-packages/openai/api_requestor.py\", line 683, in _interpret_response_line\n",
264
+ " raise self.handle_error_response(\n",
265
+ "openai.error.AuthenticationError: You didn't provide an API key. You need to provide your API key in an Authorization header using Bearer auth (i.e. Authorization: Bearer YOUR_KEY), or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys.\n"
266
+ ]
267
+ }
268
+ ],
269
+ "source": [
270
+ "gr.Interface(fn = chatbot,\n",
271
+ " inputs = [\"text\",\"text\",'state'],\n",
272
+ " outputs = [\"chatbot\",'state']).launch(debug = True)"
273
+ ]
274
+ },
275
+ {
276
+ "cell_type": "code",
277
+ "source": [],
278
+ "metadata": {
279
+ "id": "u54uD3Z8Ig-6"
280
+ },
281
+ "execution_count": null,
282
+ "outputs": []
283
+ }
284
+ ]
285
+ }