GianJSX commited on
Commit
26a7296
1 Parent(s): 0d16aa4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -8
app.py CHANGED
@@ -4,14 +4,11 @@ from langsmith.run_helpers import traceable
4
  from langsmith_config import setup_langsmith_config
5
  import base64
6
  import os
7
- import uuid
8
 
9
  os.environ["OPENAI_API_KEY"] = os.getenv("OPENAI_API_KEY")
10
  model = "gpt-3.5-turbo-1106"
11
  model_vision = "gpt-4-vision-preview"
12
  setup_langsmith_config()
13
- # generate UUID for the user from python
14
- user_id = str(uuid.uuid4())
15
 
16
  def process_images(msg: cl.Message):
17
  # Processing images exclusively
@@ -61,7 +58,7 @@ def handle_vision_call(msg, image_history):
61
  image_history.clear()
62
  return stream
63
 
64
- @traceable(run_type="llm", name="gpt 3 turbo call", metadata={"user": user_id})
65
  async def gpt_call(message_history: list = []):
66
  client = OpenAI()
67
 
@@ -69,11 +66,10 @@ async def gpt_call(message_history: list = []):
69
  model=model,
70
  messages=message_history,
71
  stream=True,
72
- user=user_id,
73
  )
74
  return stream
75
 
76
- @traceable(run_type="llm", name="gpt 4 turbo vision call", metadata={"user": user_id})
77
  def gpt_vision_call(image_history: list = []):
78
  client = OpenAI()
79
 
@@ -82,7 +78,6 @@ def gpt_vision_call(image_history: list = []):
82
  messages=image_history,
83
  max_tokens=500,
84
  stream=True,
85
- user=user_id,
86
  )
87
 
88
  return stream
@@ -97,7 +92,7 @@ def start_chat():
97
 
98
 
99
  @cl.on_message
100
- @traceable(run_type="chain", name="message", metadata={"user": user_id})
101
  async def on_message(msg: cl.Message):
102
  message_history = cl.user_session.get("message_history")
103
  image_history = cl.user_session.get("image_history")
 
4
  from langsmith_config import setup_langsmith_config
5
  import base64
6
  import os
 
7
 
8
  os.environ["OPENAI_API_KEY"] = os.getenv("OPENAI_API_KEY")
9
  model = "gpt-3.5-turbo-1106"
10
  model_vision = "gpt-4-vision-preview"
11
  setup_langsmith_config()
 
 
12
 
13
  def process_images(msg: cl.Message):
14
  # Processing images exclusively
 
58
  image_history.clear()
59
  return stream
60
 
61
+ @traceable(run_type="llm", name="gpt 3 turbo call")
62
  async def gpt_call(message_history: list = []):
63
  client = OpenAI()
64
 
 
66
  model=model,
67
  messages=message_history,
68
  stream=True,
 
69
  )
70
  return stream
71
 
72
+ @traceable(run_type="llm", name="gpt 4 turbo vision call")
73
  def gpt_vision_call(image_history: list = []):
74
  client = OpenAI()
75
 
 
78
  messages=image_history,
79
  max_tokens=500,
80
  stream=True,
 
81
  )
82
 
83
  return stream
 
92
 
93
 
94
  @cl.on_message
95
+ @traceable(run_type="chain", name="message")
96
  async def on_message(msg: cl.Message):
97
  message_history = cl.user_session.get("message_history")
98
  image_history = cl.user_session.get("image_history")