Spaces:
Runtime error
Runtime error
tianlong12
commited on
Commit
•
837e868
1
Parent(s):
2180807
Update app.py
Browse files
app.py
CHANGED
@@ -72,6 +72,7 @@ def chat_completions():
|
|
72 |
def generate():
|
73 |
nonlocal messages
|
74 |
full_response = ""
|
|
|
75 |
while True:
|
76 |
conversation = "\n".join([f"{msg['role']}: {msg['content']}" for msg in messages])
|
77 |
conversation += "\n请关注并回复user最近的消息并避免总结对话历史的回答"
|
@@ -98,15 +99,19 @@ def chat_completions():
|
|
98 |
final_data = json.loads(event.data)
|
99 |
finish_reason = final_data.get('responseMessage', {}).get('finish_reason', 'stop')
|
100 |
if finish_reason == 'length':
|
101 |
-
# 如果因为长度被截断,添加已生成的回复到消息列表,并继续生成
|
102 |
messages.append({"role": "assistant", "content": full_response})
|
103 |
messages.append({"role": "user", "content": "请继续你的输出,不要重复之前的内容"})
|
104 |
break
|
105 |
else:
|
|
|
106 |
yield f"data: {json.dumps(format_openai_response('', finish_reason))}\n\n"
|
107 |
yield "data: [DONE]\n\n"
|
108 |
return
|
109 |
|
|
|
|
|
|
|
|
|
110 |
if stream:
|
111 |
return Response(stream_with_context(generate()), content_type='text/event-stream')
|
112 |
else:
|
|
|
72 |
def generate():
|
73 |
nonlocal messages
|
74 |
full_response = ""
|
75 |
+
final_finish_reason = None
|
76 |
while True:
|
77 |
conversation = "\n".join([f"{msg['role']}: {msg['content']}" for msg in messages])
|
78 |
conversation += "\n请关注并回复user最近的消息并避免总结对话历史的回答"
|
|
|
99 |
final_data = json.loads(event.data)
|
100 |
finish_reason = final_data.get('responseMessage', {}).get('finish_reason', 'stop')
|
101 |
if finish_reason == 'length':
|
|
|
102 |
messages.append({"role": "assistant", "content": full_response})
|
103 |
messages.append({"role": "user", "content": "请继续你的输出,不要重复之前的内容"})
|
104 |
break
|
105 |
else:
|
106 |
+
final_finish_reason = finish_reason
|
107 |
yield f"data: {json.dumps(format_openai_response('', finish_reason))}\n\n"
|
108 |
yield "data: [DONE]\n\n"
|
109 |
return
|
110 |
|
111 |
+
if final_finish_reason is None:
|
112 |
+
yield f"data: {json.dumps(format_openai_response('', 'stop'))}\n\n"
|
113 |
+
yield "data: [DONE]\n\n"
|
114 |
+
|
115 |
if stream:
|
116 |
return Response(stream_with_context(generate()), content_type='text/event-stream')
|
117 |
else:
|