Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import hf_hub_download
|
3 |
import os
|
|
|
4 |
|
5 |
# 模型下载链接
|
6 |
model_url = "https://huggingface.co/CMLL/ZhongJing-2-1_8b-GGUF/resolve/main/ZhongJing1_5-1_8b-fp16.gguf"
|
@@ -26,8 +27,16 @@ with open(os.path.join(prompts_dir, "TcmChat.txt"), "w") as f:
|
|
26 |
|
27 |
# Gradio 接口
|
28 |
def chat_with_model(user_input, history):
|
29 |
-
prompt = f"
|
30 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
31 |
history.append((user_input, response))
|
32 |
return history, history
|
33 |
|
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import hf_hub_download
|
3 |
import os
|
4 |
+
import subprocess
|
5 |
|
6 |
# 模型下载链接
|
7 |
model_url = "https://huggingface.co/CMLL/ZhongJing-2-1_8b-GGUF/resolve/main/ZhongJing1_5-1_8b-fp16.gguf"
|
|
|
27 |
|
28 |
# Gradio 接口
|
29 |
def chat_with_model(user_input, history):
|
30 |
+
prompt = f"User: {user_input}\nAssistant:"
|
31 |
+
with open(os.path.join(prompts_dir, "TcmChat.txt"), "a") as f:
|
32 |
+
f.write(prompt + "\n")
|
33 |
+
|
34 |
+
# 执行命令并捕获输出
|
35 |
+
command = f"./llama.cpp/build/bin/main -m {model_path} -n 256 --repeat_penalty 1.0 --color -i -r \"User:\" -f {os.path.join(prompts_dir, 'TcmChat.txt')}"
|
36 |
+
result = subprocess.run(command, shell=True, capture_output=True, text=True)
|
37 |
+
|
38 |
+
response = result.stdout.split("User:")[-1].strip()
|
39 |
+
|
40 |
history.append((user_input, response))
|
41 |
return history, history
|
42 |
|