Metin commited on
Commit
4131ea5
β€’
1 Parent(s): f1c975f

Initial commit

Browse files
Files changed (6) hide show
  1. README.md +8 -3
  2. app.py +300 -0
  3. assets/Mistral.svg +32 -0
  4. assets/claude-ai-icon.svg +1 -0
  5. assets/openai.svg +1 -0
  6. requirements.txt +87 -0
README.md CHANGED
@@ -1,8 +1,8 @@
1
  ---
2
  title: Multi LLM Chat
3
- emoji: 🐒
4
- colorFrom: pink
5
- colorTo: indigo
6
  sdk: streamlit
7
  sdk_version: 1.32.2
8
  app_file: app.py
@@ -11,3 +11,8 @@ license: mit
11
  ---
12
 
13
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
1
  ---
2
  title: Multi LLM Chat
3
+ emoji: πŸš€
4
+ colorFrom: yellow
5
+ colorTo: purple
6
  sdk: streamlit
7
  sdk_version: 1.32.2
8
  app_file: app.py
 
11
  ---
12
 
13
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
14
+
15
+ Assets:
16
+ OpenAI logo: https://iconduck.com/designers/aasaam
17
+ Anthropic logo: https://uxwing.com/claude-ai-icon/
18
+ Mistral logo: https://github.com/gilbarbara/logos?files=1
app.py ADDED
@@ -0,0 +1,300 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import traceback
2
+ from time import sleep
3
+
4
+ import anthropic
5
+ import streamlit as st
6
+ import streamlit.components.v1 as components
7
+ from mistralai.client import MistralClient
8
+ from mistralai.models.chat_completion import ChatMessage
9
+ from openai import OpenAI
10
+
11
+
12
+ def create_client(client_class, api_key_name):
13
+ if api_key_name not in st.session_state:
14
+ return None
15
+ return client_class(api_key=st.session_state[api_key_name])
16
+
17
+
18
+ # Create clients
19
+ openai_client = create_client(OpenAI, "openai_key")
20
+ claude_client = create_client(anthropic.Anthropic, "claude_key")
21
+ mistral_client = create_client(MistralClient, "mistral_key")
22
+
23
+ # Initialize counter
24
+ if "counter" not in st.session_state:
25
+ st.session_state["counter"] = 0
26
+
27
+
28
+ # Increment counter to trigger javascript so that focus always will be on the input field.
29
+ def increment_counter():
30
+ st.session_state.counter += 1
31
+
32
+
33
+ # Create debate text from chat history
34
+ # First system prompt is added to the debate text.
35
+ # Then all the remaining messages are concatenated with preceding role titles. The last role title is the role of the next message.
36
+ def create_debate_text(role):
37
+ debate_text = ""
38
+ debate_text += common_system_prompt + "\n"
39
+
40
+ turn_titles = {
41
+ "openai": "\nChatGPT:",
42
+ "mistral": "\nMistral:",
43
+ "claude": "\nClaude:",
44
+ "user": "\nUser:",
45
+ "end": "End of debate",
46
+ }
47
+
48
+ if len(st.session_state.messages) == 0:
49
+ debate_text += "\n" + turn_titles[role]
50
+ return debate_text
51
+
52
+ for message in st.session_state.messages:
53
+ debate_text += "\n".join(
54
+ [turn_titles[message["role"]], message["content"], "\n"]
55
+ )
56
+
57
+ debate_text += "\n\n" + turn_titles[role]
58
+ return debate_text
59
+
60
+
61
+ # Initialize chat history
62
+ if "messages" not in st.session_state:
63
+ st.session_state.messages = []
64
+
65
+ # Create sidebar
66
+ with st.sidebar:
67
+ st.image("assets/openai.svg", width=20)
68
+ openai_api_key = st.text_input("OpenAI API Key", type="password")
69
+ openai_system_prompt = st.text_area(
70
+ "OpenAI System Prompt",
71
+ value="You are ChatGPT. You are agreeing with the debate topic.",
72
+ )
73
+ if (
74
+ "openai_key" not in st.session_state
75
+ or openai_api_key != st.session_state.openai_key
76
+ ) and openai_api_key != "":
77
+ st.session_state.openai_key = openai_api_key
78
+ openai_client = OpenAI(api_key=openai_api_key)
79
+ st.toast("OpenAI API Key is set to test", icon="βœ”οΈ")
80
+
81
+ st.divider()
82
+
83
+ st.image("assets/Mistral.svg", width=20)
84
+ mistral_api_key = st.text_input("mistral API Key", type="password")
85
+ mistral_system_prompt = st.text_area(
86
+ "mistral System Prompt",
87
+ value="You are mistral. You are disagreeing with the debate topic.",
88
+ )
89
+ if (
90
+ "mistral_key" not in st.session_state
91
+ or mistral_api_key != st.session_state.mistral_key
92
+ ) and mistral_api_key != "":
93
+ st.session_state.mistral_key = mistral_api_key
94
+ mistral_client = MistralClient(api_key=mistral_api_key)
95
+ st.toast("mistral API Key is set to test", icon="βœ”οΈ")
96
+
97
+ st.divider()
98
+
99
+ st.image("assets/claude-ai-icon.svg", width=20)
100
+ claude_api_key = st.text_input("Claude API Key", type="password")
101
+ claude_system_prompt = st.text_area(
102
+ "Claude System Prompt",
103
+ value="You are Claude. You are neutral to the debate topic.",
104
+ )
105
+ if (
106
+ "claude_key" not in st.session_state
107
+ or claude_api_key != st.session_state.claude_key
108
+ ) and claude_api_key != "":
109
+ st.session_state.claude_key = claude_api_key
110
+ claude_client = anthropic.Anthropic(api_key=claude_api_key)
111
+ st.toast("Claude API Key is set to test", icon="βœ”οΈ")
112
+
113
+ st.divider()
114
+
115
+ common_system_prompt = st.text_area(
116
+ "Common System Prompt",
117
+ value="Following is a conversation from a debate group. You will state your opinion when its your turn. User will transfer participants responses to you and your response to the participants so that you can communicate.",
118
+ height=300,
119
+ )
120
+
121
+
122
+ # Display chat messages from history on app rerun
123
+ with st.container(border=True):
124
+ for message in st.session_state.messages:
125
+ with st.chat_message(message["name"], avatar=message["avatar"]):
126
+ st.markdown(message["content"])
127
+
128
+ with st.container(border=True):
129
+
130
+ def is_last_message_role(role):
131
+ if len(st.session_state.messages) == 0:
132
+ return False
133
+ return st.session_state.messages[-1]["role"] == role
134
+
135
+ def get_chatgpt_response():
136
+ try:
137
+ debate_text = create_debate_text("openai")
138
+ completion = openai_client.chat.completions.create(
139
+ model="gpt-3.5-turbo",
140
+ messages=[
141
+ {"role": "system", "content": openai_system_prompt},
142
+ {
143
+ "role": "user",
144
+ "content": debate_text,
145
+ },
146
+ ],
147
+ )
148
+
149
+ st.session_state.messages.append(
150
+ {
151
+ "name": "user",
152
+ "role": "openai",
153
+ "content": completion.choices[0].message.content,
154
+ "avatar": "assets/openai.svg",
155
+ }
156
+ )
157
+ except Exception as e:
158
+ print(e, traceback.format_exc())
159
+
160
+ def get_mistral_response():
161
+ try:
162
+ debate_text = create_debate_text("mistral")
163
+ message = mistral_client.chat(
164
+ model="mistral-large-latest",
165
+ messages=[
166
+ ChatMessage(
167
+ role="system",
168
+ content=mistral_system_prompt,
169
+ ),
170
+ ChatMessage(
171
+ role="user",
172
+ content=debate_text,
173
+ ),
174
+ ],
175
+ )
176
+
177
+ st.session_state.messages.append(
178
+ {
179
+ "name": "user",
180
+ "role": "mistral",
181
+ "content": message.choices[0].message.content,
182
+ "avatar": "assets/Mistral.svg",
183
+ }
184
+ )
185
+ except Exception as e:
186
+ print(e, traceback.format_exc())
187
+
188
+ def get_claude_response():
189
+ try:
190
+ debate_text = create_debate_text("claude")
191
+ message = claude_client.messages.create(
192
+ model="claude-3-sonnet-20240229",
193
+ max_tokens=1000,
194
+ temperature=0,
195
+ system=claude_system_prompt,
196
+ messages=[
197
+ {
198
+ "role": "user",
199
+ "content": [
200
+ {
201
+ "type": "text",
202
+ "text": debate_text,
203
+ }
204
+ ],
205
+ },
206
+ ],
207
+ )
208
+
209
+ st.session_state.messages.append(
210
+ {
211
+ "name": "user",
212
+ "role": "claude",
213
+ "content": message.content[0].text,
214
+ "avatar": "assets/claude-ai-icon.svg",
215
+ }
216
+ )
217
+ except Exception as e:
218
+ print(e, traceback.format_exc())
219
+
220
+ # React to user input
221
+ with st.container():
222
+ if prompt := st.chat_input(
223
+ "Start the conversation.", on_submit=increment_counter
224
+ ):
225
+ st.session_state.messages.append(
226
+ {"name": "user", "role": "user", "content": prompt, "avatar": "❔"}
227
+ )
228
+
229
+ st.rerun()
230
+
231
+ with st.container(border=False):
232
+ col1, col2, col3 = st.columns(3)
233
+
234
+ with col1:
235
+ st.button(
236
+ "ChatGPT",
237
+ on_click=get_chatgpt_response,
238
+ disabled=is_last_message_role("openai"),
239
+ type="primary",
240
+ )
241
+
242
+ with col2:
243
+ st.button(
244
+ "Mistral",
245
+ on_click=get_mistral_response,
246
+ disabled=is_last_message_role("mistral"),
247
+ type="primary",
248
+ )
249
+
250
+ with col3:
251
+ st.button(
252
+ "Claude",
253
+ on_click=get_claude_response,
254
+ disabled=is_last_message_role("claude"),
255
+ type="primary",
256
+ )
257
+
258
+ col1, col2 = st.columns(2)
259
+ with col1:
260
+ st.button(
261
+ "Clear chat", on_click=lambda: st.session_state.pop("messages", None)
262
+ )
263
+ with col2:
264
+ # save chat history to file
265
+ st.download_button(
266
+ "Save chat history",
267
+ data=create_debate_text("end"),
268
+ file_name="chat_history.txt",
269
+ mime="text/plain",
270
+ )
271
+
272
+
273
+ custom_css = """
274
+ <style>
275
+ .stButton{
276
+ display: flex;
277
+ justify-content: center;
278
+ align-items: center;
279
+ }
280
+ </style>
281
+ """
282
+
283
+ st.markdown(custom_css, unsafe_allow_html=True)
284
+
285
+ components.html(
286
+ f"""
287
+ <div>some hidden container</div>
288
+ <p>{st.session_state.counter}</p>
289
+ <script>
290
+ console.log("Hello from the other side");
291
+ var input = window.parent.document.querySelectorAll("textarea[type=textarea]");
292
+ console.log(input);
293
+ for (var i = 0; i < input.length; ++i) {{
294
+ console.log(input[i]);
295
+ input[i].focus();
296
+ }}
297
+ </script>
298
+ """,
299
+ height=0,
300
+ )
assets/Mistral.svg ADDED
assets/claude-ai-icon.svg ADDED
assets/openai.svg ADDED
requirements.txt ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ aiofiles==23.2.1
2
+ altair==5.2.0
3
+ annotated-types==0.6.0
4
+ anthropic==0.19.2
5
+ anyio==4.3.0
6
+ attrs==23.2.0
7
+ blinker==1.7.0
8
+ cachetools==5.3.3
9
+ certifi==2024.2.2
10
+ charset-normalizer==3.3.2
11
+ click==8.1.7
12
+ colorama==0.4.6
13
+ contourpy==1.2.0
14
+ cycler==0.12.1
15
+ distro==1.9.0
16
+ fastapi==0.110.0
17
+ ffmpy==0.3.2
18
+ filelock==3.13.1
19
+ fonttools==4.49.0
20
+ fsspec==2024.2.0
21
+ gitdb==4.0.11
22
+ GitPython==3.1.42
23
+ gradio==4.19.2
24
+ gradio_client==0.10.1
25
+ h11==0.14.0
26
+ httpcore==1.0.4
27
+ httpx==0.25.2
28
+ huggingface-hub==0.21.3
29
+ idna==3.6
30
+ importlib-metadata==7.0.1
31
+ importlib_resources==6.1.2
32
+ Jinja2==3.1.3
33
+ jsonschema==4.21.1
34
+ jsonschema-specifications==2023.12.1
35
+ kiwisolver==1.4.5
36
+ markdown-it-py==3.0.0
37
+ MarkupSafe==2.1.5
38
+ matplotlib==3.8.3
39
+ mdurl==0.1.2
40
+ mistralai==0.1.6
41
+ numpy==1.26.4
42
+ openai==1.13.3
43
+ orjson==3.9.15
44
+ packaging==23.2
45
+ pandas==2.2.1
46
+ pillow==10.2.0
47
+ protobuf==4.25.3
48
+ pyarrow==15.0.0
49
+ pydantic==2.6.3
50
+ pydantic_core==2.16.3
51
+ pydeck==0.8.1b0
52
+ pydub==0.25.1
53
+ Pygments==2.17.2
54
+ pyparsing==3.1.1
55
+ python-dateutil==2.9.0.post0
56
+ python-multipart==0.0.9
57
+ pytz==2024.1
58
+ PyYAML==6.0.1
59
+ referencing==0.33.0
60
+ requests==2.31.0
61
+ rich==13.7.1
62
+ rpds-py==0.18.0
63
+ ruff==0.3.0
64
+ semantic-version==2.10.0
65
+ shellingham==1.5.4
66
+ six==1.16.0
67
+ smmap==5.0.1
68
+ sniffio==1.3.1
69
+ starlette==0.36.3
70
+ streamlit==1.31.1
71
+ tenacity==8.2.3
72
+ tokenizers==0.15.2
73
+ toml==0.10.2
74
+ tomlkit==0.12.0
75
+ toolz==0.12.1
76
+ tornado==6.4
77
+ tqdm==4.66.2
78
+ typer==0.9.0
79
+ typing_extensions==4.10.0
80
+ tzdata==2024.1
81
+ tzlocal==5.2
82
+ urllib3==2.2.1
83
+ uvicorn==0.27.1
84
+ validators==0.22.0
85
+ watchdog==4.0.0
86
+ websockets==11.0.3
87
+ zipp==3.17.0