Spaces:
Build error
Build error
ffreemt
commited on
Commit
•
d652fb6
1
Parent(s):
9109205
Update requirements.txt
Browse files- app-org.py +1 -1
- app.py +20 -21
app-org.py
CHANGED
@@ -42,7 +42,7 @@ def chat(message, history):
|
|
42 |
|
43 |
chatbot = gr.Chatbot([], label="Bot", height=450)
|
44 |
textbox = gr.Textbox('', scale=10, label='', lines=2, placeholder="Ask me anything")
|
45 |
-
submit_btn = gr.Button(value="Send", scale=1, min_width=0, variant="primary")
|
46 |
|
47 |
interf = gr.ChatInterface(
|
48 |
chat,
|
|
|
42 |
|
43 |
chatbot = gr.Chatbot([], label="Bot", height=450)
|
44 |
textbox = gr.Textbox('', scale=10, label='', lines=2, placeholder="Ask me anything")
|
45 |
+
submit_btn = gr.Button(value="▶️ Send", scale=1, min_width=0, variant="primary")
|
46 |
|
47 |
interf = gr.ChatInterface(
|
48 |
chat,
|
app.py
CHANGED
@@ -8,6 +8,7 @@ gradio
|
|
8 |
transformers
|
9 |
sentencepiece
|
10 |
torch
|
|
|
11 |
|
12 |
import gradio as gr
|
13 |
|
@@ -30,6 +31,24 @@ import random
|
|
30 |
import time
|
31 |
import gradio as gr
|
32 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
|
34 |
def stream_chat():
|
35 |
"""samples:
|
@@ -133,26 +152,6 @@ with gr.Blocks() as demo:
|
|
133 |
|
134 |
yield "", chat_history
|
135 |
|
136 |
-
def respond2(message, chat_history):
|
137 |
-
if chat_history is None:
|
138 |
-
chat_history = []
|
139 |
-
bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"])
|
140 |
-
|
141 |
-
temp = ""
|
142 |
-
chat_history.append((message, temp))
|
143 |
-
for elm in range(len(bot_message)):
|
144 |
-
temp = bot_message[:elm+1]
|
145 |
-
time.sleep(0.2)
|
146 |
-
chat_history[-1] = message, temp
|
147 |
-
# yield message, chat_history
|
148 |
-
# chatbot.value = chat_history
|
149 |
-
|
150 |
-
chat_history[-1] = (message, "done " + bot_message)
|
151 |
-
time.sleep(2)
|
152 |
-
|
153 |
-
yield "", chat_history
|
154 |
-
|
155 |
-
|
156 |
msg.submit(respond2, [msg, chatbot], [msg, chatbot])
|
157 |
|
158 |
-
demo.queue(max_size=2).launch()
|
|
|
8 |
transformers
|
9 |
sentencepiece
|
10 |
torch
|
11 |
+
cpm_kernels
|
12 |
|
13 |
import gradio as gr
|
14 |
|
|
|
31 |
import time
|
32 |
import gradio as gr
|
33 |
|
34 |
+
def respond2(message, chat_history):
|
35 |
+
if chat_history is None:
|
36 |
+
chat_history = []
|
37 |
+
bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"])
|
38 |
+
|
39 |
+
temp = ""
|
40 |
+
chat_history.append((message, temp))
|
41 |
+
for elm in range(len(bot_message)):
|
42 |
+
temp = bot_message[:elm+1]
|
43 |
+
time.sleep(0.2)
|
44 |
+
chat_history[-1] = message, temp
|
45 |
+
# yield message, chat_history
|
46 |
+
# chatbot.value = chat_history
|
47 |
+
|
48 |
+
chat_history[-1] = (message, "done " + bot_message)
|
49 |
+
time.sleep(2)
|
50 |
+
|
51 |
+
yield "", chat_history
|
52 |
|
53 |
def stream_chat():
|
54 |
"""samples:
|
|
|
152 |
|
153 |
yield "", chat_history
|
154 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
155 |
msg.submit(respond2, [msg, chatbot], [msg, chatbot])
|
156 |
|
157 |
+
# demo.queue(max_size=2).launch()
|