Spaces:
Sleeping
Sleeping
File size: 4,733 Bytes
f215879 e89886d f215879 be3619c e57e9d3 f73242a fa8ab40 f73242a e57e9d3 f73242a e3c1459 376fb29 e57e9d3 376fb29 e57e9d3 be3619c 376fb29 f215879 be3619c 85ccecf f215879 be3619c f215879 85ccecf be3619c 85ccecf e57e9d3 f215879 be3619c f215879 7a25f5e f215879 7a25f5e f215879 3517e68 f215879 4c1a0f1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 |
import gradio as gr
import random
import time
import chatglm_cpp
from pathlib import Path
model_file_path = "chatglm3-ggml_q4_0.bin"
#model_file_path = "../../Downloads1/chatglm3-ggml_q4_0.bin"
chatglm_llm = chatglm_cpp.Pipeline(Path(model_file_path))
def predict(message, history):
flatten_history = []
for a, b in history:
flatten_history.append(a)
flatten_history.append(b)
streamer = chatglm_llm.chat(
history= flatten_history + [message], do_sample=False,
stream = True
)
response = ""
for new_text in streamer:
response += new_text
yield response
with gr.Blocks(css = "custom.css") as demo:
title = gr.HTML(
"""<h1 align="center"> <font size="+3"> ChatGLM3 Chatbot ☔️🐼 </font> </h1>""",
elem_id="title",
)
gr.HTML(
"""<h1 align="left"> <font size="+0"> 与人工智能助手 ChatGLM3 进行对话 </font> </h1>""",
#elem_id="title",
)
chatbot = gr.Chatbot()
def user(user_message, history):
return "", history + [[user_message, None]]
'''
def bot_api(user_message):
l = user_message.split("[SEP]")
history = []
for ele in l:
if not history:
history.append(
[ele]
)
else:
if len(history[-1]) == 2:
history.append([ele])
else:
history[-1].append(ele)
if len(history[-1]) <= 1:
return ""
history[-1][1] = ""
user_message_ = history[-1][0]
pred_iter = predict(user_message_ ,history)
for ele in pred_iter:
history[-1][1] = ele
user_message_out = []
for ele in history:
for e in ele:
user_message_out.append(e)
user_message_out = "[SEP]".join(user_message_out)
return user_message_out
'''
def bot(history):
'''
bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"])
history[-1][1] = ""
for character in bot_message:
history[-1][1] += character
time.sleep(0.05)
yield history
'''
history[-1][1] = ""
user_message = history[-1][0]
pred_iter = predict(user_message ,history)
for ele in pred_iter:
history[-1][1] = ele
yield history
'''
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
bot, chatbot, chatbot
)
'''
with gr.Row():
msg = gr.Textbox(
#label="与人工智能助手 ChatGLM3 进行对话",
show_label=True, lines=1, max_lines=20,
min_width = 1024,
placeholder="你好 人工智能助手 ChatGLM3,我可以问你一些问题吗?",
elem_id="prompt",
interactive=True,
#info = "Generate by Click, and can edit by yourself, look up Examples below"
)
#msg_out = gr.Textbox(visible = False)
sub_button = gr.Button("Submit")
clear = gr.Button("Clear")
#api_button = gr.Button("api", visible = False)
sub_button.click(user, [msg, chatbot], [msg, chatbot], queue=False).then(
bot, chatbot, chatbot
)
'''
api_button.click(
bot_api, msg, msg_out
)
'''
clear.click(lambda: None, None, chatbot, queue=False)
gr.Examples(
[
"哈利波特和赫敏是什么关系?",
"请解释下面的emoji符号描述的情景👨👩🔥❄️",
"明朝内阁制度的特点是什么?",
"如何进行经济建设?",
"你听说过马克思吗?",
],
inputs = msg
)
demo.queue()
demo.launch(enable_queue = True)
'''
from gradio_client import Client
client = Client("http://localhost:7860/")
result = client.predict(
[["诸葛亮是哪个朝代的人?", "诸葛亮是三国时期的人。"],
["为什么说明朝是一个好的时代?", "因为出了王阳明。"],
["我之前问了哪些问题?", None]],
api_name="/bot"
)
print(result)
'''
'''
#### gradio_client.__version__ : 0.7.0
from gradio_client import Client
import json
client = Client("https://svjack-chatglm3-open-chat.hf.space/--replicas/ksskt/")
with open("hist.json", "w") as f:
json.dump(
[["诸葛亮是哪个朝代的人?", "诸葛亮是三国时期的人。"],
["为什么说明朝是一个好的时代?", "因为出了王阳明。"],
["我之前问了哪些问题?", None]]
, f
)
result = client.predict(
"hist.json",
fn_index=1
)
result
'''
|