changes requires for O1
Browse files
app.py
CHANGED
@@ -197,7 +197,7 @@ def bot(message, history, oai_key, system_prompt, seed, temperature, max_tokens,
|
|
197 |
|
198 |
history_openai_format = []
|
199 |
user_msg_parts = []
|
200 |
-
if system_prompt:
|
201 |
history_openai_format.append({"role": "system", "content": system_prompt})
|
202 |
for human, assi in history:
|
203 |
if human is not None:
|
@@ -224,30 +224,42 @@ def bot(message, history, oai_key, system_prompt, seed, temperature, max_tokens,
|
|
224 |
if log_to_console:
|
225 |
print(f"br_prompt: {str(history_openai_format)}")
|
226 |
|
227 |
-
|
228 |
-
|
229 |
-
|
230 |
-
|
231 |
-
|
232 |
-
|
233 |
-
|
234 |
-
|
235 |
-
|
236 |
-
|
237 |
-
|
238 |
-
|
239 |
-
|
240 |
-
|
241 |
-
|
242 |
-
|
243 |
-
|
244 |
-
|
245 |
-
|
246 |
-
|
247 |
-
|
248 |
-
|
249 |
-
|
250 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
251 |
|
252 |
if log_to_console:
|
253 |
print(f"br_result: {str(history)}")
|
|
|
197 |
|
198 |
history_openai_format = []
|
199 |
user_msg_parts = []
|
200 |
+
if system_prompt and not (model == "o1-mini" or model == "o1-preview"):
|
201 |
history_openai_format.append({"role": "system", "content": system_prompt})
|
202 |
for human, assi in history:
|
203 |
if human is not None:
|
|
|
224 |
if log_to_console:
|
225 |
print(f"br_prompt: {str(history_openai_format)}")
|
226 |
|
227 |
+
if model == "o1-preview" or model == "o1-mini":
|
228 |
+
response = client.chat.completions.create(
|
229 |
+
model=model,
|
230 |
+
messages= history_openai_format,
|
231 |
+
seed=seed_i,
|
232 |
+
)
|
233 |
+
|
234 |
+
yield response.choices[0].message.content
|
235 |
+
|
236 |
+
if log_to_console:
|
237 |
+
print(f"usage: {response.usage}")
|
238 |
+
else:
|
239 |
+
response = client.chat.completions.create(
|
240 |
+
model=model,
|
241 |
+
messages= history_openai_format,
|
242 |
+
temperature=temperature,
|
243 |
+
seed=seed_i,
|
244 |
+
max_tokens=max_tokens,
|
245 |
+
stream=True,
|
246 |
+
stream_options={"include_usage": True}
|
247 |
+
)
|
248 |
+
|
249 |
+
partial_response=""
|
250 |
+
for chunk in response:
|
251 |
+
if chunk.choices:
|
252 |
+
txt = ""
|
253 |
+
for choice in chunk.choices:
|
254 |
+
cont = choice.delta.content
|
255 |
+
if cont:
|
256 |
+
txt += cont
|
257 |
+
|
258 |
+
partial_response += txt
|
259 |
+
yield partial_response
|
260 |
+
|
261 |
+
if chunk.usage and log_to_console:
|
262 |
+
print(f"usage: {chunk.usage}")
|
263 |
|
264 |
if log_to_console:
|
265 |
print(f"br_result: {str(history)}")
|