Spaces:
Runtime error
Runtime error
Two column layout
Browse files
app.py
CHANGED
@@ -1,33 +1,64 @@
|
|
1 |
from openai import OpenAI
|
2 |
import streamlit as st
|
3 |
|
4 |
-
st.title("
|
5 |
|
6 |
client = OpenAI(api_key=st.secrets["OPENAI_API_KEY"])
|
7 |
|
|
|
|
|
8 |
if "openai_model" not in st.session_state:
|
9 |
st.session_state["openai_model"] = "gpt-3.5-turbo"
|
10 |
|
11 |
-
if "
|
12 |
-
st.session_state.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
14 |
-
for message in st.session_state.messages:
|
15 |
-
with st.chat_message(message["role"]):
|
16 |
-
st.markdown(message["content"])
|
17 |
|
18 |
if prompt := st.chat_input("What is up?"):
|
19 |
-
st.session_state.
|
20 |
-
|
21 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
|
23 |
with st.chat_message("assistant"):
|
24 |
stream = client.chat.completions.create(
|
25 |
model=st.session_state["openai_model"],
|
26 |
messages=[
|
27 |
{"role": m["role"], "content": m["content"]}
|
28 |
-
for m in st.session_state.
|
29 |
],
|
30 |
stream=True,
|
31 |
)
|
32 |
response = st.write_stream(stream)
|
33 |
-
st.session_state.
|
|
|
1 |
from openai import OpenAI
|
2 |
import streamlit as st
|
3 |
|
4 |
+
st.title("HiddenLayer Chat")
|
5 |
|
6 |
client = OpenAI(api_key=st.secrets["OPENAI_API_KEY"])
|
7 |
|
8 |
+
col1, col2 = st.columns(2)
|
9 |
+
|
10 |
if "openai_model" not in st.session_state:
|
11 |
st.session_state["openai_model"] = "gpt-3.5-turbo"
|
12 |
|
13 |
+
if "messages_col_1" not in st.session_state:
|
14 |
+
st.session_state.messages_col_1 = []
|
15 |
+
|
16 |
+
if "messages_col_2" not in st.session_state:
|
17 |
+
st.session_state.messages_col_2 = []
|
18 |
+
|
19 |
+
for message in st.session_state.messages_col_1:
|
20 |
+
with col1:
|
21 |
+
with st.chat_message(message["role"]):
|
22 |
+
st.markdown(message["content"])
|
23 |
+
|
24 |
+
for message in st.session_state.messages_col_2:
|
25 |
+
with col2:
|
26 |
+
with st.chat_message(message["role"]):
|
27 |
+
st.markdown(message["content"])
|
28 |
|
|
|
|
|
|
|
29 |
|
30 |
if prompt := st.chat_input("What is up?"):
|
31 |
+
st.session_state.messages_col_1.append({"role": "user", "content": prompt})
|
32 |
+
st.session_state.messages_col_2.append({"role": "user", "content": prompt})
|
33 |
+
with col1:
|
34 |
+
with st.chat_message("user"):
|
35 |
+
st.markdown(prompt)
|
36 |
+
|
37 |
+
with col2:
|
38 |
+
with st.chat_message("user"):
|
39 |
+
st.markdown(prompt)
|
40 |
+
|
41 |
+
|
42 |
+
with st.chat_message("assistant"):
|
43 |
+
stream = client.chat.completions.create(
|
44 |
+
model=st.session_state["openai_model"],
|
45 |
+
messages=[
|
46 |
+
{"role": m["role"], "content": m["content"]}
|
47 |
+
for m in st.session_state.messages_col_1
|
48 |
+
],
|
49 |
+
stream=True,
|
50 |
+
)
|
51 |
+
response = st.write_stream(stream)
|
52 |
+
st.session_state.messages_col_1.append({"role": "assistant", "content": response})
|
53 |
|
54 |
with st.chat_message("assistant"):
|
55 |
stream = client.chat.completions.create(
|
56 |
model=st.session_state["openai_model"],
|
57 |
messages=[
|
58 |
{"role": m["role"], "content": m["content"]}
|
59 |
+
for m in st.session_state.messages_col_2
|
60 |
],
|
61 |
stream=True,
|
62 |
)
|
63 |
response = st.write_stream(stream)
|
64 |
+
st.session_state.messages_col_2.append({"role": "assistant", "content": response})
|