pxovela commited on
Commit
b3db8e9
1 Parent(s): 97cfeff

Upload prompt_helper_sterilized.ipynb

Browse files
Files changed (1) hide show
  1. prompt_helper_sterilized.ipynb +117 -0
prompt_helper_sterilized.ipynb ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "from openai import ChatCompletion,Completion, api_key\n",
10
+ "import openai\n",
11
+ "import time\n",
12
+ "import random"
13
+ ]
14
+ },
15
+ {
16
+ "cell_type": "code",
17
+ "execution_count": 2,
18
+ "metadata": {},
19
+ "outputs": [],
20
+ "source": [
21
+ "openai.api_key = ''"
22
+ ]
23
+ },
24
+ {
25
+ "cell_type": "code",
26
+ "execution_count": 3,
27
+ "metadata": {},
28
+ "outputs": [],
29
+ "source": [
30
+ "user_prompt = \"vacuum cleaner transformer.\""
31
+ ]
32
+ },
33
+ {
34
+ "cell_type": "code",
35
+ "execution_count": 4,
36
+ "metadata": {},
37
+ "outputs": [],
38
+ "source": [
39
+ "def crazy_prompt(prompt, models=models):\n",
40
+ " models = [\"ft:gpt-3.5-turbo-0613:followfox-ai::85z5rcYC\"]\n",
41
+ " prompt_sys = \"You are autoregressive language model that works at followfox.ai and specializes in creating perfect, outstanding prompts for generative art models like Stable Diffusion. Your job is to take user ideas, capture ALL main parts, and turn into amazing prompts. You have to capture everything from the user's prompt and then use your talent to make the prompt amazing. You are a master of art styles, terminology, pop culture, and photography across the globe. Make sure overall prompt is coherent stylistically and captures user's idea. Aim for 25 words. Respond only with the new prompt.\"\n",
42
+ " # Randomly select a model from the list\n",
43
+ " selected_model = random.choice(models)\n",
44
+ "\n",
45
+ " words = prompt.split()\n",
46
+ " prompt = \" \".join(words[:15])\n",
47
+ " \n",
48
+ " msgs = [{\"role\": \"system\", \"content\": prompt_sys}, # Note: 'prompt_sys' is not defined in the given code\n",
49
+ " {\"role\": \"user\", \"content\": prompt}]\n",
50
+ " \n",
51
+ " try: \n",
52
+ " response = ChatCompletion.create(model=selected_model, messages=msgs)\n",
53
+ " return response['choices'][0]['message']['content']\n",
54
+ " except openai.error.RateLimitError as e:\n",
55
+ " retry_after = int(e.headers.get(\"retry-after\", 60))\n",
56
+ " print(f\"Rate limit exceeded, waiting for {retry_after} seconds...\")\n",
57
+ " time.sleep(retry_after)\n",
58
+ " return call_api(prompt, models) # Recursive call"
59
+ ]
60
+ },
61
+ {
62
+ "cell_type": "code",
63
+ "execution_count": 5,
64
+ "metadata": {},
65
+ "outputs": [
66
+ {
67
+ "ename": "AuthenticationError",
68
+ "evalue": "You didn't provide an API key. You need to provide your API key in an Authorization header using Bearer auth (i.e. Authorization: Bearer YOUR_KEY), or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys.",
69
+ "output_type": "error",
70
+ "traceback": [
71
+ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
72
+ "\u001b[0;31mAuthenticationError\u001b[0m Traceback (most recent call last)",
73
+ "\u001b[1;32m/home/irakli/prompt_fixer/prompt_helper_sterilized.ipynb Cell 5\u001b[0m line \u001b[0;36m1\n\u001b[0;32m----> <a href='vscode-notebook-cell://wsl%2Bubuntu-20.04/home/irakli/prompt_fixer/prompt_helper_sterilized.ipynb#W4sdnNjb2RlLXJlbW90ZQ%3D%3D?line=0'>1</a>\u001b[0m crazy_prompt(user_prompt)\n",
74
+ "\u001b[1;32m/home/irakli/prompt_fixer/prompt_helper_sterilized.ipynb Cell 5\u001b[0m line \u001b[0;36m1\n\u001b[1;32m <a href='vscode-notebook-cell://wsl%2Bubuntu-20.04/home/irakli/prompt_fixer/prompt_helper_sterilized.ipynb#W4sdnNjb2RlLXJlbW90ZQ%3D%3D?line=9'>10</a>\u001b[0m msgs \u001b[39m=\u001b[39m [{\u001b[39m\"\u001b[39m\u001b[39mrole\u001b[39m\u001b[39m\"\u001b[39m: \u001b[39m\"\u001b[39m\u001b[39msystem\u001b[39m\u001b[39m\"\u001b[39m, \u001b[39m\"\u001b[39m\u001b[39mcontent\u001b[39m\u001b[39m\"\u001b[39m: prompt_sys}, \u001b[39m# Note: 'prompt_sys' is not defined in the given code\u001b[39;00m\n\u001b[1;32m <a href='vscode-notebook-cell://wsl%2Bubuntu-20.04/home/irakli/prompt_fixer/prompt_helper_sterilized.ipynb#W4sdnNjb2RlLXJlbW90ZQ%3D%3D?line=10'>11</a>\u001b[0m {\u001b[39m\"\u001b[39m\u001b[39mrole\u001b[39m\u001b[39m\"\u001b[39m: \u001b[39m\"\u001b[39m\u001b[39muser\u001b[39m\u001b[39m\"\u001b[39m, \u001b[39m\"\u001b[39m\u001b[39mcontent\u001b[39m\u001b[39m\"\u001b[39m: prompt}]\n\u001b[1;32m <a href='vscode-notebook-cell://wsl%2Bubuntu-20.04/home/irakli/prompt_fixer/prompt_helper_sterilized.ipynb#W4sdnNjb2RlLXJlbW90ZQ%3D%3D?line=12'>13</a>\u001b[0m \u001b[39mtry\u001b[39;00m: \n\u001b[0;32m---> <a href='vscode-notebook-cell://wsl%2Bubuntu-20.04/home/irakli/prompt_fixer/prompt_helper_sterilized.ipynb#W4sdnNjb2RlLXJlbW90ZQ%3D%3D?line=13'>14</a>\u001b[0m response \u001b[39m=\u001b[39m ChatCompletion\u001b[39m.\u001b[39;49mcreate(model\u001b[39m=\u001b[39;49mselected_model, messages\u001b[39m=\u001b[39;49mmsgs)\n\u001b[1;32m <a href='vscode-notebook-cell://wsl%2Bubuntu-20.04/home/irakli/prompt_fixer/prompt_helper_sterilized.ipynb#W4sdnNjb2RlLXJlbW90ZQ%3D%3D?line=14'>15</a>\u001b[0m \u001b[39mreturn\u001b[39;00m response[\u001b[39m'\u001b[39m\u001b[39mchoices\u001b[39m\u001b[39m'\u001b[39m][\u001b[39m0\u001b[39m][\u001b[39m'\u001b[39m\u001b[39mmessage\u001b[39m\u001b[39m'\u001b[39m][\u001b[39m'\u001b[39m\u001b[39mcontent\u001b[39m\u001b[39m'\u001b[39m]\n\u001b[1;32m <a href='vscode-notebook-cell://wsl%2Bubuntu-20.04/home/irakli/prompt_fixer/prompt_helper_sterilized.ipynb#W4sdnNjb2RlLXJlbW90ZQ%3D%3D?line=15'>16</a>\u001b[0m \u001b[39mexcept\u001b[39;00m openai\u001b[39m.\u001b[39merror\u001b[39m.\u001b[39mRateLimitError \u001b[39mas\u001b[39;00m e:\n",
75
+ "File \u001b[0;32m~/anaconda3/envs/fastai/lib/python3.10/site-packages/openai/api_resources/chat_completion.py:25\u001b[0m, in \u001b[0;36mChatCompletion.create\u001b[0;34m(cls, *args, **kwargs)\u001b[0m\n\u001b[1;32m 23\u001b[0m \u001b[39mwhile\u001b[39;00m \u001b[39mTrue\u001b[39;00m:\n\u001b[1;32m 24\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[0;32m---> 25\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39msuper\u001b[39;49m()\u001b[39m.\u001b[39;49mcreate(\u001b[39m*\u001b[39;49margs, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs)\n\u001b[1;32m 26\u001b[0m \u001b[39mexcept\u001b[39;00m TryAgain \u001b[39mas\u001b[39;00m e:\n\u001b[1;32m 27\u001b[0m \u001b[39mif\u001b[39;00m timeout \u001b[39mis\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39mNone\u001b[39;00m \u001b[39mand\u001b[39;00m time\u001b[39m.\u001b[39mtime() \u001b[39m>\u001b[39m start \u001b[39m+\u001b[39m timeout:\n",
76
+ "File \u001b[0;32m~/anaconda3/envs/fastai/lib/python3.10/site-packages/openai/api_resources/abstract/engine_api_resource.py:155\u001b[0m, in \u001b[0;36mEngineAPIResource.create\u001b[0;34m(cls, api_key, api_base, api_type, request_id, api_version, organization, **params)\u001b[0m\n\u001b[1;32m 129\u001b[0m \u001b[39m@classmethod\u001b[39m\n\u001b[1;32m 130\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mcreate\u001b[39m(\n\u001b[1;32m 131\u001b[0m \u001b[39mcls\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 138\u001b[0m \u001b[39m*\u001b[39m\u001b[39m*\u001b[39mparams,\n\u001b[1;32m 139\u001b[0m ):\n\u001b[1;32m 140\u001b[0m (\n\u001b[1;32m 141\u001b[0m deployment_id,\n\u001b[1;32m 142\u001b[0m engine,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 152\u001b[0m api_key, api_base, api_type, api_version, organization, \u001b[39m*\u001b[39m\u001b[39m*\u001b[39mparams\n\u001b[1;32m 153\u001b[0m )\n\u001b[0;32m--> 155\u001b[0m response, _, api_key \u001b[39m=\u001b[39m requestor\u001b[39m.\u001b[39;49mrequest(\n\u001b[1;32m 156\u001b[0m \u001b[39m\"\u001b[39;49m\u001b[39mpost\u001b[39;49m\u001b[39m\"\u001b[39;49m,\n\u001b[1;32m 157\u001b[0m url,\n\u001b[1;32m 158\u001b[0m params\u001b[39m=\u001b[39;49mparams,\n\u001b[1;32m 159\u001b[0m headers\u001b[39m=\u001b[39;49mheaders,\n\u001b[1;32m 160\u001b[0m stream\u001b[39m=\u001b[39;49mstream,\n\u001b[1;32m 161\u001b[0m request_id\u001b[39m=\u001b[39;49mrequest_id,\n\u001b[1;32m 162\u001b[0m request_timeout\u001b[39m=\u001b[39;49mrequest_timeout,\n\u001b[1;32m 163\u001b[0m )\n\u001b[1;32m 165\u001b[0m \u001b[39mif\u001b[39;00m stream:\n\u001b[1;32m 166\u001b[0m \u001b[39m# must be an iterator\u001b[39;00m\n\u001b[1;32m 167\u001b[0m \u001b[39massert\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39misinstance\u001b[39m(response, OpenAIResponse)\n",
77
+ "File \u001b[0;32m~/anaconda3/envs/fastai/lib/python3.10/site-packages/openai/api_requestor.py:299\u001b[0m, in \u001b[0;36mAPIRequestor.request\u001b[0;34m(self, method, url, params, headers, files, stream, request_id, request_timeout)\u001b[0m\n\u001b[1;32m 278\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mrequest\u001b[39m(\n\u001b[1;32m 279\u001b[0m \u001b[39mself\u001b[39m,\n\u001b[1;32m 280\u001b[0m method,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 287\u001b[0m request_timeout: Optional[Union[\u001b[39mfloat\u001b[39m, Tuple[\u001b[39mfloat\u001b[39m, \u001b[39mfloat\u001b[39m]]] \u001b[39m=\u001b[39m \u001b[39mNone\u001b[39;00m,\n\u001b[1;32m 288\u001b[0m ) \u001b[39m-\u001b[39m\u001b[39m>\u001b[39m Tuple[Union[OpenAIResponse, Iterator[OpenAIResponse]], \u001b[39mbool\u001b[39m, \u001b[39mstr\u001b[39m]:\n\u001b[1;32m 289\u001b[0m result \u001b[39m=\u001b[39m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mrequest_raw(\n\u001b[1;32m 290\u001b[0m method\u001b[39m.\u001b[39mlower(),\n\u001b[1;32m 291\u001b[0m url,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 297\u001b[0m request_timeout\u001b[39m=\u001b[39mrequest_timeout,\n\u001b[1;32m 298\u001b[0m )\n\u001b[0;32m--> 299\u001b[0m resp, got_stream \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_interpret_response(result, stream)\n\u001b[1;32m 300\u001b[0m \u001b[39mreturn\u001b[39;00m resp, got_stream, \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mapi_key\n",
78
+ "File \u001b[0;32m~/anaconda3/envs/fastai/lib/python3.10/site-packages/openai/api_requestor.py:710\u001b[0m, in \u001b[0;36mAPIRequestor._interpret_response\u001b[0;34m(self, result, stream)\u001b[0m\n\u001b[1;32m 702\u001b[0m \u001b[39mreturn\u001b[39;00m (\n\u001b[1;32m 703\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_interpret_response_line(\n\u001b[1;32m 704\u001b[0m line, result\u001b[39m.\u001b[39mstatus_code, result\u001b[39m.\u001b[39mheaders, stream\u001b[39m=\u001b[39m\u001b[39mTrue\u001b[39;00m\n\u001b[1;32m 705\u001b[0m )\n\u001b[1;32m 706\u001b[0m \u001b[39mfor\u001b[39;00m line \u001b[39min\u001b[39;00m parse_stream(result\u001b[39m.\u001b[39miter_lines())\n\u001b[1;32m 707\u001b[0m ), \u001b[39mTrue\u001b[39;00m\n\u001b[1;32m 708\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[1;32m 709\u001b[0m \u001b[39mreturn\u001b[39;00m (\n\u001b[0;32m--> 710\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_interpret_response_line(\n\u001b[1;32m 711\u001b[0m result\u001b[39m.\u001b[39;49mcontent\u001b[39m.\u001b[39;49mdecode(\u001b[39m\"\u001b[39;49m\u001b[39mutf-8\u001b[39;49m\u001b[39m\"\u001b[39;49m),\n\u001b[1;32m 712\u001b[0m result\u001b[39m.\u001b[39;49mstatus_code,\n\u001b[1;32m 713\u001b[0m result\u001b[39m.\u001b[39;49mheaders,\n\u001b[1;32m 714\u001b[0m stream\u001b[39m=\u001b[39;49m\u001b[39mFalse\u001b[39;49;00m,\n\u001b[1;32m 715\u001b[0m ),\n\u001b[1;32m 716\u001b[0m \u001b[39mFalse\u001b[39;00m,\n\u001b[1;32m 717\u001b[0m )\n",
79
+ "File \u001b[0;32m~/anaconda3/envs/fastai/lib/python3.10/site-packages/openai/api_requestor.py:775\u001b[0m, in \u001b[0;36mAPIRequestor._interpret_response_line\u001b[0;34m(self, rbody, rcode, rheaders, stream)\u001b[0m\n\u001b[1;32m 773\u001b[0m stream_error \u001b[39m=\u001b[39m stream \u001b[39mand\u001b[39;00m \u001b[39m\"\u001b[39m\u001b[39merror\u001b[39m\u001b[39m\"\u001b[39m \u001b[39min\u001b[39;00m resp\u001b[39m.\u001b[39mdata\n\u001b[1;32m 774\u001b[0m \u001b[39mif\u001b[39;00m stream_error \u001b[39mor\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39m200\u001b[39m \u001b[39m<\u001b[39m\u001b[39m=\u001b[39m rcode \u001b[39m<\u001b[39m \u001b[39m300\u001b[39m:\n\u001b[0;32m--> 775\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mhandle_error_response(\n\u001b[1;32m 776\u001b[0m rbody, rcode, resp\u001b[39m.\u001b[39mdata, rheaders, stream_error\u001b[39m=\u001b[39mstream_error\n\u001b[1;32m 777\u001b[0m )\n\u001b[1;32m 778\u001b[0m \u001b[39mreturn\u001b[39;00m resp\n",
80
+ "\u001b[0;31mAuthenticationError\u001b[0m: You didn't provide an API key. You need to provide your API key in an Authorization header using Bearer auth (i.e. Authorization: Bearer YOUR_KEY), or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys."
81
+ ]
82
+ }
83
+ ],
84
+ "source": [
85
+ "crazy_prompt(user_prompt)"
86
+ ]
87
+ },
88
+ {
89
+ "cell_type": "code",
90
+ "execution_count": null,
91
+ "metadata": {},
92
+ "outputs": [],
93
+ "source": []
94
+ }
95
+ ],
96
+ "metadata": {
97
+ "kernelspec": {
98
+ "display_name": "fastai",
99
+ "language": "python",
100
+ "name": "python3"
101
+ },
102
+ "language_info": {
103
+ "codemirror_mode": {
104
+ "name": "ipython",
105
+ "version": 3
106
+ },
107
+ "file_extension": ".py",
108
+ "mimetype": "text/x-python",
109
+ "name": "python",
110
+ "nbconvert_exporter": "python",
111
+ "pygments_lexer": "ipython3",
112
+ "version": "3.10.10"
113
+ }
114
+ },
115
+ "nbformat": 4,
116
+ "nbformat_minor": 2
117
+ }