Update vlm.py
Browse files
vlm.py
CHANGED
@@ -97,7 +97,7 @@ def build_messages(message: dict, history: list[tuple]):
|
|
97 |
# get response
|
98 |
#
|
99 |
def get_response(messages: list[dict]):
|
100 |
-
"""
|
101 |
|
102 |
Args:
|
103 |
messages: list of messages to send to the model
|
@@ -106,3 +106,16 @@ def get_response(messages: list[dict]):
|
|
106 |
logger.info(f"{response=}")
|
107 |
return response.choices[0].message.content
|
108 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
97 |
# get response
|
98 |
#
|
99 |
def get_response(messages: list[dict]):
|
100 |
+
"""Get the model's response.
|
101 |
|
102 |
Args:
|
103 |
messages: list of messages to send to the model
|
|
|
106 |
logger.info(f"{response=}")
|
107 |
return response.choices[0].message.content
|
108 |
|
109 |
+
#
|
110 |
+
# stream response
|
111 |
+
#
|
112 |
+
def stream_response(messages: list[dict]):
|
113 |
+
"""Stream the model's response.
|
114 |
+
|
115 |
+
Args:
|
116 |
+
messages: list of messages to send to the model
|
117 |
+
"""
|
118 |
+
response = ""
|
119 |
+
for chunk in client.chat.stream(model=model_id, messages=messages):
|
120 |
+
response += chunk.data.choices[0].delta.content
|
121 |
+
yield response
|