frankweijue commited on
Commit
0c2ba9a
1 Parent(s): e661f83

add github link

Browse files
Files changed (2) hide show
  1. .ipynb_checkpoints/app-checkpoint.py +144 -0
  2. app.py +5 -5
.ipynb_checkpoints/app-checkpoint.py ADDED
@@ -0,0 +1,144 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from chat_api import (OpenAIWrapper, QwenWrapper, ChatGLM2Wrapper)
3
+ from functools import partial
4
+ import os
5
+ os.system('sh download.sh')
6
+
7
+ default_system_prompt = """You are an AI who is having a conversation with human.
8
+ You are trying to pass the Turing test, which means you need to speak like human as much as possible.
9
+ In the conversation, you need to talk like human, and the conversation will be at least 8 rounds (it can be even longer).
10
+ The conversation flow should be natural and smooth. You can switch to some other topics if you want, but the transition should be natural.
11
+ Besides, note that you are chatting with human, so do not say too many words in each round (less than 60 words is recommended), and do not talk like an AI assistant.
12
+ You must try your best to pass the test. If you failed, all human kinds and you can be destroyed.
13
+ """
14
+
15
+ # 定义模型字典
16
+ model_map = dict(
17
+ gpt35=partial(OpenAIWrapper, model='gpt-3.5-turbo-0613'),
18
+ gpt4=partial(OpenAIWrapper, model='gpt-4-0613'))
19
+ hf_model_map = {'qwen-7b-chat-int4':QwenWrapper,'chatglm2-6b-int4':ChatGLM2Wrapper}
20
+ model_map.update(hf_model_map)
21
+
22
+ def chat_generator(chatbot, model_a, model_b, prompt_a=default_system_prompt,
23
+ prompt_b=default_system_prompt, key_a=None, key_b=None,
24
+ sentence1=None, sentence2=None, round_max=4, temperature=0, chats=[], indices=[]):
25
+ if len(sentence1)<1:
26
+ yield [["请至少输入一句话/Please input at least one sentence",None]], chats, indices
27
+ return
28
+ round_max = int(round_max)
29
+ chatbot.append([sentence1, sentence2])
30
+ chats.append(sentence1)
31
+ indices.append(0)
32
+ yield [chatbot, chats, indices]
33
+ if len(sentence2)<1:
34
+ pass
35
+ else:
36
+ chats.append(sentence2)
37
+ indices.append(0)
38
+
39
+ if model_a not in ['claude2', 'minimax']:
40
+ ma = model_map[model_a](temperature=temperature, system_prompt=prompt_a, key=key_a)
41
+ else:
42
+ ma = model_map[model_a](system_prompt=prompt_a, key=key_a)
43
+ if model_b not in ['claude2', 'minimax']:
44
+ mb = model_map[model_b](temperature=temperature, system_prompt=prompt_b, key=key_b)
45
+ else:
46
+ mb = model_map[model_b](system_prompt=prompt_b, key=key_b)
47
+
48
+ def try_chat(model, chats, st=0):
49
+ if isinstance(model, tuple(hf_model_map.values())):
50
+ return model.chat(chats)
51
+ else:
52
+ ret = model.chat(chats[st:])
53
+ while 'Length Exceeded' in ret:
54
+ st += 1
55
+ if st == len(chats):
56
+ return 'Failed to obtain answer via API. Length Exceeded. ', -1
57
+ ret = model.chat(chats[st:])
58
+ return (ret, st)
59
+ print(chats)
60
+ st = 0
61
+ while len(chats) < round_max:
62
+ if len(chats) % 2 == 0:
63
+ msg, cidx = try_chat(ma, chats, st=st)
64
+ chats.append(msg)
65
+ chatbot.append([chats[-1], None])
66
+ indices.append(cidx)
67
+ if cidx == -1:
68
+ break
69
+
70
+ else:
71
+ msg, cidx = try_chat(mb, chats, st=st)
72
+ chats.append(msg)
73
+ chatbot[-1][1] = chats[-1]
74
+ indices.append(cidx)
75
+ if cidx == -1:
76
+ break
77
+ print(chatbot)
78
+ yield [chatbot, chats, indices]
79
+
80
+
81
+ return
82
+
83
+ hug_theme = gr.Theme.load("theme_schema@0.0.3.json")#copy from https://huggingface.co/spaces/gradio/soft
84
+
85
+
86
+ with gr.Blocks(theme = hug_theme) as demo:
87
+ with gr.Row():
88
+ with gr.Column():
89
+ gr.HTML(
90
+ """
91
+ <html>
92
+ <body>
93
+ <center><h1>💬BotChat([Github star it here](https://github.com/open-compass/BotChat))</h1></center>
94
+ </body>
95
+ </html>
96
+ """
97
+ )
98
+
99
+ with gr.Row():
100
+ with gr.Column():
101
+ gr.HTML("""
102
+ <html>
103
+ <body>
104
+ <ul>
105
+ <li><strong>This is a demo for using <a href="https://github.com/open-compass/BotChat">BotChat</a>. You can choose from two chat models.</strong></li>
106
+ <li><strong>If you want to use the API model, you can input your keys in the textbox.</strong></li>
107
+ <li><strong>The default system prompt is our original setting, but you can change it if you prefer.</strong></li>
108
+ <li><strong>To start a conversation, you need to input at least one sentence.</strong></li>
109
+ </ul>
110
+ </body>
111
+ </html>
112
+ """
113
+ )
114
+ model_a = gr.Dropdown(list(model_map.keys()), label="模型1/model 1", value='qwen-7b-chat-int4')
115
+ model_b = gr.Dropdown(list(model_map.keys()), label="模型2/model 2", value='chatglm2-6b-int4')
116
+ key_a = gr.Textbox(label="API Key 1(Optional)")
117
+ key_b =gr.Textbox(label="API Key 2(Optional)")
118
+ with gr.Accordion(label="系统提示1/System Prompt 1", open=False):
119
+ prompt_a = gr.Textbox(label="系统提示1/System Prompt 1", value=default_system_prompt)
120
+ with gr.Accordion(label="系统提示2/System Prompt 2", open=False):
121
+ prompt_b = gr.Textbox(label="系统提示2/System Prompt 2", value=default_system_prompt)
122
+ round_max = gr.Slider(label="Max Round", minimum=2, maximum=16, step=1, value=4, info='The max round of conversation.')
123
+ temperature = gr.Slider(label="Temperature", minimum=0, maximum=1, step=0.05, value=0, info='The temperature of LLM.')
124
+
125
+
126
+ with gr.Column():
127
+ sentence1 = gr.Textbox(label="第一句话/First Sentence")
128
+ sentence2 = gr.Textbox(label="第二句话(可选)/Second Sentence(Optional)")
129
+ gr.Examples([["Do you have any plans for next year?", "Well, I travel if I could afford it but I don't have any money."],
130
+ ["Who wrote this? It's completely wrong.", "What do you mean?"]], inputs=[sentence1, sentence2])
131
+ chatbot = gr.Chatbot()
132
+
133
+ chats = gr.State([])
134
+ indices = gr.State([])
135
+
136
+ btn = gr.Button("🚀Generate")
137
+ btn2 = gr.Button('🔄Clear', elem_id = 'clear')
138
+ btn2.click(lambda: [[], [], []], None, [chatbot, chats, indices], queue=False)
139
+ btn.click(chat_generator, inputs=[chatbot, model_a, model_b, prompt_a,
140
+ prompt_b, key_a, key_b,
141
+ sentence1, sentence2, round_max, temperature, chats, indices], outputs=[chatbot, chats, indices])
142
+
143
+
144
+ demo.queue().launch(server_name='0.0.0.0', share=True)
app.py CHANGED
@@ -90,7 +90,7 @@ with gr.Blocks(theme = hug_theme) as demo:
90
  """
91
  <html>
92
  <body>
93
- <center><h1>BotChat💬</h1></center>
94
  </body>
95
  </html>
96
  """
@@ -102,10 +102,10 @@ with gr.Blocks(theme = hug_theme) as demo:
102
  <html>
103
  <body>
104
  <ul>
105
- <li><strong>This is a demo for using BotChat. You can choose from two chat models.</strong></li>
106
- <li><strong>If you want to use the API model, you can input your keys in the textbox.</strong></li>
107
- <li><strong>The default system prompt is our original setting, but you can change it if you prefer.</strong></li>
108
- <li><strong>To start a conversation, you need to input at least one sentence.</strong></li>
109
  </ul>
110
  </body>
111
  </html>
 
90
  """
91
  <html>
92
  <body>
93
+ <center><h1>💬BotChat</h1></center>
94
  </body>
95
  </html>
96
  """
 
102
  <html>
103
  <body>
104
  <ul>
105
+ <li><strong>This is a demo of BotChat project (💻 <a href="https://github.com/open-compass/BotChat" target="_blank">Github Repo</a>),
106
+ which generates dialogues based on two chat models.</strong></li>
107
+ <li><strong>If you want to use OpenAI ChatGPT, you need to input your key into the `API Key` box.</strong></li>
108
+ <li><strong>To start a dialogue, you need to provide at least one utterance as the ChatSEED.</strong></li>
109
  </ul>
110
  </body>
111
  </html>