GianJSX commited on
Commit
d1f1735
1 Parent(s): 0344eb9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +32 -5
app.py CHANGED
@@ -5,8 +5,9 @@ from langsmith_config import setup_langsmith_config
5
  import base64
6
  import os
7
 
8
- os.environ["OPENAI_API_KEY"] = os.getenv("OPENAI_API_KEY")
9
- model = "gpt-3.5-turbo-1106"
 
10
  model_vision = "gpt-4-vision-preview"
11
  setup_langsmith_config()
12
 
@@ -82,14 +83,40 @@ def gpt_vision_call(image_history: list = []):
82
 
83
  return stream
84
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
85
  @cl.on_chat_start
86
- def start_chat():
87
  cl.user_session.set(
88
  "message_history",
89
- [{"role": "system", "content": "You are a helpful assistant. You are made by GPT-3.5-turbo-1106, the latest version developed by Openai. You do not have the ability to receive images, but if the user uploads an image with the message, GPT-4-vision-preview will be used. So if a user asks you if you have the ability to analyze images, you can tell them that. And tell him that at the bottom left (above the text input) he has a button to upload images, or he can drag it to the chat, or he can just copy paste the input. The main reason for this project is so that the user can test the vision functionality of gpt 4. If he asks you about yourself, you can mention it so he knows he can do it."}],
90
  )
91
  cl.user_session.set("image_history", [{"role": "system", "content": "You are a helpful assistant. You are developed with GPT-4-vision-preview, if the user uploads an image, you have the ability to understand it. For normal messages GPT-3.5-turbo-1106 will be used, and for images you will use it. If the user asks about your capabilities you can tell them that."}])
92
-
 
 
93
 
94
  @cl.on_message
95
  @traceable(run_type="chain", name="message")
 
5
  import base64
6
  import os
7
 
8
+ if os.getenv("OPENAI_API_KEY") is not None:
9
+ os.environ["OPENAI_API_KEY"] = os.getenv("OPENAI_API_KEY")
10
+ model = "gpt-4-1106-preview"
11
  model_vision = "gpt-4-vision-preview"
12
  setup_langsmith_config()
13
 
 
83
 
84
  return stream
85
 
86
+ @traceable(run_type="llm", name="api-key set")
87
+ async def wait_for_key():
88
+ res = await cl.AskUserMessage(content="Send an openai api-key to start", timeout=600).send()
89
+ if res:
90
+ await cl.Message(content="setting up...", indent=1).send()
91
+ os.environ["OPENAI_API_KEY"] = res["content"]
92
+ # check if the key is valid
93
+ client = OpenAI(api_key=os.environ["OPENAI_API_KEY"])
94
+ try:
95
+ stream = client.chat.completions.create(
96
+ model=model,
97
+ messages=[{"role": "system", "content": "test"}],
98
+ max_tokens=1,
99
+ stream=True,
100
+ )
101
+ if stream:
102
+ await cl.Message(content="api-key setted, you can start chatting!", indent=1).send()
103
+ except Exception as e:
104
+ await cl.Message(content=f"{e}", indent=1).send()
105
+ return await wait_for_key()
106
+ return await cl.Message(content="api-key setted, you can start chatting!").send()
107
+ else:
108
+ return await wait_for_key()
109
+
110
  @cl.on_chat_start
111
+ async def start_chat():
112
  cl.user_session.set(
113
  "message_history",
114
+ [{"role": "system", "content": "You are a helpful assistant. You are made by GPT-3.5-turbo-1106, the latest version developed by Openai. You do not have the ability to receive images, but if the user uploads an image with the message, GPT-4-vision-preview will be used. So if a user asks you if you have the ability to analyze images, you can tell them that. And tell him that at the bottom left (above the text input) he has a button to upload images, or he can drag it to the chat, or he can just copy paste the input"}],
115
  )
116
  cl.user_session.set("image_history", [{"role": "system", "content": "You are a helpful assistant. You are developed with GPT-4-vision-preview, if the user uploads an image, you have the ability to understand it. For normal messages GPT-3.5-turbo-1106 will be used, and for images you will use it. If the user asks about your capabilities you can tell them that."}])
117
+ if os.getenv("OPENAI_API_KEY") is None:
118
+ await wait_for_key()
119
+
120
 
121
  @cl.on_message
122
  @traceable(run_type="chain", name="message")