heegyu commited on
Commit
187fb55
1 Parent(s): d5b6e22

bluechat-v0

Browse files
Files changed (2) hide show
  1. app.py +50 -0
  2. requirements.txt +3 -0
app.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import torch
3
+ import random
4
+ import time
5
+ from transformers import pipeline
6
+
7
+ generator = pipeline(
8
+ 'text-generation',
9
+ model="heegyu/bluechat-v0",
10
+ device="cuda:0" if torch.cuda.is_available() else 'cpu'
11
+ )
12
+
13
+ def query(message, chat_history, max_turn=4):
14
+ prompt = []
15
+ if len(chat_history) > max_turn:
16
+ chat_history = chat_history[-max_turn:]
17
+ for user, bot in chat_history:
18
+ prompt.append(f"<usr> {user}")
19
+ prompt.append(f"<bot> {bot}")
20
+ prompt.append(f"<usr> {message}")
21
+ prompt = "\n".join(prompt) + "\n<bot>"
22
+
23
+ output = generator(
24
+ prompt,
25
+ do_sample=True,
26
+ top_p=0.9,
27
+ early_stopping=True,
28
+ max_length=256,
29
+ )[0]['generated_text']
30
+
31
+ print(output)
32
+
33
+ response = output[len(prompt):]
34
+ return response.strip()
35
+
36
+ with gr.Blocks() as demo:
37
+ chatbot = gr.Chatbot()
38
+ msg = gr.Textbox()
39
+ clear = gr.Button("Clear")
40
+
41
+ def respond(message, chat_history):
42
+ bot_message = query(message, chat_history) #random.choice(["How are you?", "I love you", "I'm very hungry"])
43
+ chat_history.append((message, bot_message))
44
+ # time.sleep(1)
45
+ return "", chat_history
46
+
47
+ msg.submit(respond, [msg, chatbot], [msg, chatbot])
48
+ clear.click(lambda: None, None, chatbot, queue=False)
49
+
50
+ demo.launch()
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ gradio
2
+ transformers
3
+ torch