Spaces:
Runtime error
Runtime error
tianlong12
commited on
Commit
•
dd193a1
1
Parent(s):
029220b
Update app.py
Browse files
app.py
CHANGED
@@ -72,7 +72,6 @@ def chat_completions():
|
|
72 |
def generate():
|
73 |
nonlocal messages
|
74 |
full_response = ""
|
75 |
-
final_finish_reason = None
|
76 |
while True:
|
77 |
conversation = "\n".join([f"{msg['role']}: {msg['content']}" for msg in messages])
|
78 |
conversation += "\n请关注并回复user最近的消息并避免总结对话历史的回答"
|
@@ -98,19 +97,20 @@ def chat_completions():
|
|
98 |
elif '"final":true' in event.data:
|
99 |
final_data = json.loads(event.data)
|
100 |
finish_reason = final_data.get('responseMessage', {}).get('finish_reason', 'stop')
|
101 |
-
if finish_reason
|
102 |
messages.append({"role": "assistant", "content": full_response})
|
103 |
messages.append({"role": "user", "content": "请继续你的输出,不要重复之前的内容"})
|
104 |
-
break
|
105 |
else:
|
106 |
-
final_finish_reason = finish_reason
|
107 |
yield f"data: {json.dumps(format_openai_response('', finish_reason))}\n\n"
|
108 |
yield "data: [DONE]\n\n"
|
109 |
-
return
|
|
|
|
|
110 |
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
|
115 |
if stream:
|
116 |
return Response(stream_with_context(generate()), content_type='text/event-stream')
|
|
|
72 |
def generate():
|
73 |
nonlocal messages
|
74 |
full_response = ""
|
|
|
75 |
while True:
|
76 |
conversation = "\n".join([f"{msg['role']}: {msg['content']}" for msg in messages])
|
77 |
conversation += "\n请关注并回复user最近的消息并避免总结对话历史的回答"
|
|
|
97 |
elif '"final":true' in event.data:
|
98 |
final_data = json.loads(event.data)
|
99 |
finish_reason = final_data.get('responseMessage', {}).get('finish_reason', 'stop')
|
100 |
+
if finish_reason == 'length':
|
101 |
messages.append({"role": "assistant", "content": full_response})
|
102 |
messages.append({"role": "user", "content": "请继续你的输出,不要重复之前的内容"})
|
103 |
+
break # 跳出当前循环,继续下一次请求
|
104 |
else:
|
|
|
105 |
yield f"data: {json.dumps(format_openai_response('', finish_reason))}\n\n"
|
106 |
yield "data: [DONE]\n\n"
|
107 |
+
return # 完全结束生成
|
108 |
+
|
109 |
+
# 如果因为 length 而跳出内部循环,则继续外部循环
|
110 |
|
111 |
+
# 如果所有请求都因 length 而结束,发送一个最终的停止信号
|
112 |
+
yield f"data: {json.dumps(format_openai_response('', 'stop'))}\n\n"
|
113 |
+
yield "data: [DONE]\n\n"
|
114 |
|
115 |
if stream:
|
116 |
return Response(stream_with_context(generate()), content_type='text/event-stream')
|