ndurner commited on
Commit
bce44b2
·
1 Parent(s): 2d2901a

changes requires for O1

Browse files
Files changed (1) hide show
  1. app.py +37 -25
app.py CHANGED
@@ -197,7 +197,7 @@ def bot(message, history, oai_key, system_prompt, seed, temperature, max_tokens,
197
 
198
  history_openai_format = []
199
  user_msg_parts = []
200
- if system_prompt:
201
  history_openai_format.append({"role": "system", "content": system_prompt})
202
  for human, assi in history:
203
  if human is not None:
@@ -224,30 +224,42 @@ def bot(message, history, oai_key, system_prompt, seed, temperature, max_tokens,
224
  if log_to_console:
225
  print(f"br_prompt: {str(history_openai_format)}")
226
 
227
- response = client.chat.completions.create(
228
- model=model,
229
- messages= history_openai_format,
230
- temperature=temperature,
231
- seed=seed_i,
232
- max_tokens=max_tokens,
233
- stream=True,
234
- stream_options={"include_usage": True}
235
- )
236
-
237
- partial_response=""
238
- for chunk in response:
239
- if chunk.choices:
240
- txt = ""
241
- for choice in chunk.choices:
242
- cont = choice.delta.content
243
- if cont:
244
- txt += cont
245
-
246
- partial_response += txt
247
- yield partial_response
248
-
249
- if chunk.usage and log_to_console:
250
- print(f"usage: {chunk.usage}")
 
 
 
 
 
 
 
 
 
 
 
 
251
 
252
  if log_to_console:
253
  print(f"br_result: {str(history)}")
 
197
 
198
  history_openai_format = []
199
  user_msg_parts = []
200
+ if system_prompt and not (model == "o1-mini" or model == "o1-preview"):
201
  history_openai_format.append({"role": "system", "content": system_prompt})
202
  for human, assi in history:
203
  if human is not None:
 
224
  if log_to_console:
225
  print(f"br_prompt: {str(history_openai_format)}")
226
 
227
+ if model == "o1-preview" or model == "o1-mini":
228
+ response = client.chat.completions.create(
229
+ model=model,
230
+ messages= history_openai_format,
231
+ seed=seed_i,
232
+ )
233
+
234
+ yield response.choices[0].message.content
235
+
236
+ if log_to_console:
237
+ print(f"usage: {response.usage}")
238
+ else:
239
+ response = client.chat.completions.create(
240
+ model=model,
241
+ messages= history_openai_format,
242
+ temperature=temperature,
243
+ seed=seed_i,
244
+ max_tokens=max_tokens,
245
+ stream=True,
246
+ stream_options={"include_usage": True}
247
+ )
248
+
249
+ partial_response=""
250
+ for chunk in response:
251
+ if chunk.choices:
252
+ txt = ""
253
+ for choice in chunk.choices:
254
+ cont = choice.delta.content
255
+ if cont:
256
+ txt += cont
257
+
258
+ partial_response += txt
259
+ yield partial_response
260
+
261
+ if chunk.usage and log_to_console:
262
+ print(f"usage: {chunk.usage}")
263
 
264
  if log_to_console:
265
  print(f"br_result: {str(history)}")