Spaces:
Running
Running
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from hugchat import hugchat
|
2 |
+
from hugchat.login import Login
|
3 |
+
import streamlit as st
|
4 |
+
|
5 |
+
if "logged_in" not in st.session_state:
|
6 |
+
st.session_state.logged_in = False
|
7 |
+
if "cookies" not in st.session_state:
|
8 |
+
st.session_state.cookies = None
|
9 |
+
|
10 |
+
llms = [
|
11 |
+
'meta-llama/Meta-Llama-3.1-70B-Instruct',
|
12 |
+
'meta-llama/Meta-Llama-3.1-405B-Instruct-FP8',
|
13 |
+
'CohereForAI/c4ai-command-r-plus',
|
14 |
+
'mistralai/Mixtral-8x7B-Instruct-v0.1',
|
15 |
+
'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO',
|
16 |
+
'01-ai/Yi-1.5-34B-Chat',
|
17 |
+
'mistralai/Mistral-7B-Instruct-v0.3',
|
18 |
+
'microsoft/Phi-3-mini-4k-instruct'
|
19 |
+
]
|
20 |
+
|
21 |
+
st.title("HugChat LLM Chatbot")
|
22 |
+
st.markdown("Made by Vo1d_s")
|
23 |
+
|
24 |
+
sidebar = st.sidebar
|
25 |
+
sidebar.markdown("## HuggingFace account")
|
26 |
+
email = sidebar.text_input("Email", placeholder="Your email", disabled=st.session_state.logged_in)
|
27 |
+
password = sidebar.text_input("Password (secure)", placeholder="Your password", type="password", disabled=st.session_state.logged_in)
|
28 |
+
empty = sidebar.empty()
|
29 |
+
sidebar.warning("Note: You may get an eamil saying that someone logged into your account because this uses HuggingFace, it is safe.")
|
30 |
+
|
31 |
+
def change_system_prompt():
|
32 |
+
st.session_state.messages = []
|
33 |
+
st.session_state.chatbot.delete_conversation()
|
34 |
+
st.session_state.chatbot = hugchat.ChatBot(st.session_state.cookies, system_prompt=st.session_state.sys_prompt, default_llm=st.session_state.selected_llm)
|
35 |
+
print(st.session_state.selected_llm)
|
36 |
+
|
37 |
+
sidebar.markdown("## Model Configuration")
|
38 |
+
system_prompt = sidebar.text_input("System prompt (don't leave empty)", placeholder="Model behaviour", on_change=change_system_prompt, key="sys_prompt", disabled=not st.session_state.logged_in)
|
39 |
+
selected_llm = sidebar.selectbox("LLM (model)", placeholder="Default LLM", on_change=change_system_prompt, options=llms, key="selected_llm", disabled=not st.session_state.logged_in)
|
40 |
+
|
41 |
+
if email and password:
|
42 |
+
if not st.session_state.logged_in:
|
43 |
+
try:
|
44 |
+
credentials = Login(email=email, passwd=password)
|
45 |
+
st.session_state.cookies = credentials.login()
|
46 |
+
st.session_state.logged_in = True
|
47 |
+
st.session_state.chatbot = hugchat.ChatBot(st.session_state.cookies)
|
48 |
+
except Exception as e:
|
49 |
+
empty.error(f"Error: {e}")
|
50 |
+
|
51 |
+
def stream(prompt):
|
52 |
+
for event in st.session_state.chatbot.chat(prompt):
|
53 |
+
if event:
|
54 |
+
yield event["token"]
|
55 |
+
|
56 |
+
|
57 |
+
def main():
|
58 |
+
if "messages" not in st.session_state:
|
59 |
+
st.session_state.messages = []
|
60 |
+
|
61 |
+
for message in st.session_state.messages:
|
62 |
+
with st.chat_message(message["role"]):
|
63 |
+
st.write(message["content"])
|
64 |
+
|
65 |
+
if prompt := st.chat_input("What is up?", disabled=not st.session_state.logged_in):
|
66 |
+
with st.chat_message("user"):
|
67 |
+
st.write(prompt)
|
68 |
+
|
69 |
+
st.session_state.messages.append({"role": "user", "content": prompt})
|
70 |
+
|
71 |
+
with st.chat_message("assistant"):
|
72 |
+
try:
|
73 |
+
response = st.write_stream(stream(prompt))
|
74 |
+
except Exception as e:
|
75 |
+
response = f"Error: {e}"
|
76 |
+
st.write(response)
|
77 |
+
|
78 |
+
st.session_state.messages.append({"role": "assistant", "content": response})
|
79 |
+
|
80 |
+
if __name__ == "__main__":
|
81 |
+
main()
|