Spaces:
Running
Running
Update predict.py
Browse files- predict.py +3 -3
predict.py
CHANGED
@@ -104,7 +104,7 @@ def predict_no_ui_long_connection(inputs, top_p, temperature, history=[], sys_pr
|
|
104 |
return result
|
105 |
|
106 |
|
107 |
-
def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt='',
|
108 |
stream = True, additional_fn=None):
|
109 |
"""
|
110 |
发送至chatGPT,流式获取输出。
|
@@ -127,7 +127,7 @@ def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt=''
|
|
127 |
chatbot.append((inputs, ""))
|
128 |
yield chatbot, history, "等待响应"
|
129 |
|
130 |
-
headers, payload = generate_payload(inputs, top_p, temperature, history, system_prompt, stream)
|
131 |
history.append(inputs); history.append(" ")
|
132 |
|
133 |
retry = 0
|
@@ -187,7 +187,7 @@ def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt=''
|
|
187 |
yield chatbot, history, "Json解析不合常规" + error_msg
|
188 |
return
|
189 |
|
190 |
-
def generate_payload(inputs, top_p, temperature, history, system_prompt, stream):
|
191 |
"""
|
192 |
整合所有信息,选择LLM模型,生成http请求,为发送请求做准备
|
193 |
"""
|
|
|
104 |
return result
|
105 |
|
106 |
|
107 |
+
def predict(api, inputs, top_p, temperature, chatbot=[], history=[], system_prompt='',
|
108 |
stream = True, additional_fn=None):
|
109 |
"""
|
110 |
发送至chatGPT,流式获取输出。
|
|
|
127 |
chatbot.append((inputs, ""))
|
128 |
yield chatbot, history, "等待响应"
|
129 |
|
130 |
+
headers, payload = generate_payload(api, inputs, top_p, temperature, history, system_prompt, stream)
|
131 |
history.append(inputs); history.append(" ")
|
132 |
|
133 |
retry = 0
|
|
|
187 |
yield chatbot, history, "Json解析不合常规" + error_msg
|
188 |
return
|
189 |
|
190 |
+
def generate_payload(api, inputs, top_p, temperature, history, system_prompt, stream):
|
191 |
"""
|
192 |
整合所有信息,选择LLM模型,生成http请求,为发送请求做准备
|
193 |
"""
|