Spaces:
Running
Running
added sample questions
Browse files- app.py +34 -5
- requirements.txt +2 -1
app.py
CHANGED
@@ -1,12 +1,14 @@
|
|
1 |
|
2 |
-
from omegaconf import OmegaConf
|
3 |
-
import streamlit as st
|
4 |
import os
|
5 |
from PIL import Image
|
6 |
import sys
|
7 |
import pandas as pd
|
8 |
import requests
|
9 |
|
|
|
|
|
|
|
|
|
10 |
from dotenv import load_dotenv
|
11 |
load_dotenv(override=True)
|
12 |
|
@@ -139,6 +141,15 @@ def initialize_agent(_cfg):
|
|
139 |
def toggle_logs():
|
140 |
st.session_state.show_logs = not st.session_state.show_logs
|
141 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
142 |
def launch_bot():
|
143 |
def reset():
|
144 |
st.session_state.messages = [{"role": "assistant", "content": initial_prompt, "avatar": "π¦"}]
|
@@ -146,6 +157,7 @@ def launch_bot():
|
|
146 |
st.session_state.log_messages = []
|
147 |
st.session_state.prompt = None
|
148 |
st.session_state.show_logs = False
|
|
|
149 |
|
150 |
st.set_page_config(page_title="Financial Assistant", layout="wide")
|
151 |
if 'cfg' not in st.session_state:
|
@@ -153,8 +165,12 @@ def launch_bot():
|
|
153 |
'customer_id': str(os.environ['VECTARA_CUSTOMER_ID']),
|
154 |
'corpus_id': str(os.environ['VECTARA_CORPUS_ID']),
|
155 |
'api_key': str(os.environ['VECTARA_API_KEY']),
|
|
|
156 |
})
|
157 |
st.session_state.cfg = cfg
|
|
|
|
|
|
|
158 |
reset()
|
159 |
|
160 |
cfg = st.session_state.cfg
|
@@ -193,8 +209,18 @@ def launch_bot():
|
|
193 |
with st.chat_message(message["role"], avatar=message["avatar"]):
|
194 |
st.write(message["content"])
|
195 |
|
|
|
|
|
|
|
|
|
|
|
|
|
196 |
# User-provided prompt
|
197 |
-
if
|
|
|
|
|
|
|
|
|
198 |
st.session_state.messages.append({"role": "user", "content": prompt, "avatar": 'π§βπ»'})
|
199 |
st.session_state.prompt = prompt # Save the prompt in session state
|
200 |
st.session_state.log_messages = []
|
@@ -202,6 +228,7 @@ def launch_bot():
|
|
202 |
with st.chat_message("user", avatar='π§βπ»'):
|
203 |
print(f"Starting new question: {prompt}\n")
|
204 |
st.write(prompt)
|
|
|
205 |
|
206 |
# Generate a new response if last message is not from assistant
|
207 |
if st.session_state.prompt:
|
@@ -212,8 +239,10 @@ def launch_bot():
|
|
212 |
message = {"role": "assistant", "content": res, "avatar": 'π€'}
|
213 |
st.session_state.messages.append(message)
|
214 |
st.markdown(res)
|
215 |
-
|
216 |
-
|
|
|
|
|
217 |
log_placeholder = st.empty()
|
218 |
with log_placeholder.container():
|
219 |
if st.session_state.show_logs:
|
|
|
1 |
|
|
|
|
|
2 |
import os
|
3 |
from PIL import Image
|
4 |
import sys
|
5 |
import pandas as pd
|
6 |
import requests
|
7 |
|
8 |
+
from omegaconf import OmegaConf
|
9 |
+
import streamlit as st
|
10 |
+
from streamlit_pills import pills
|
11 |
+
|
12 |
from dotenv import load_dotenv
|
13 |
load_dotenv(override=True)
|
14 |
|
|
|
141 |
def toggle_logs():
|
142 |
st.session_state.show_logs = not st.session_state.show_logs
|
143 |
|
144 |
+
def show_example_questions():
|
145 |
+
if len(st.session_state.example_messages) > 0 and st.session_state.first_turn:
|
146 |
+
selected_example = pills("Queries to Try:", st.session_state.example_messages, index=None)
|
147 |
+
if selected_example:
|
148 |
+
st.session_state.ex_prompt = selected_example
|
149 |
+
st.session_state.first_turn = False
|
150 |
+
return True
|
151 |
+
return False
|
152 |
+
|
153 |
def launch_bot():
|
154 |
def reset():
|
155 |
st.session_state.messages = [{"role": "assistant", "content": initial_prompt, "avatar": "π¦"}]
|
|
|
157 |
st.session_state.log_messages = []
|
158 |
st.session_state.prompt = None
|
159 |
st.session_state.show_logs = False
|
160 |
+
st.session_state.first_turn = True
|
161 |
|
162 |
st.set_page_config(page_title="Financial Assistant", layout="wide")
|
163 |
if 'cfg' not in st.session_state:
|
|
|
165 |
'customer_id': str(os.environ['VECTARA_CUSTOMER_ID']),
|
166 |
'corpus_id': str(os.environ['VECTARA_CORPUS_ID']),
|
167 |
'api_key': str(os.environ['VECTARA_API_KEY']),
|
168 |
+
'examples': os.environ.get('QUERY_EXAMPLES', None)
|
169 |
})
|
170 |
st.session_state.cfg = cfg
|
171 |
+
st.session_state.ex_prompt = None
|
172 |
+
example_messages = [example.strip() for example in cfg.examples.split(",")] if cfg.examples else []
|
173 |
+
st.session_state.example_messages = [em for em in example_messages if len(em)>0]
|
174 |
reset()
|
175 |
|
176 |
cfg = st.session_state.cfg
|
|
|
209 |
with st.chat_message(message["role"], avatar=message["avatar"]):
|
210 |
st.write(message["content"])
|
211 |
|
212 |
+
example_container = st.empty()
|
213 |
+
with example_container:
|
214 |
+
if show_example_questions():
|
215 |
+
example_container.empty()
|
216 |
+
st.rerun()
|
217 |
+
|
218 |
# User-provided prompt
|
219 |
+
if st.session_state.ex_prompt:
|
220 |
+
prompt = st.session_state.ex_prompt
|
221 |
+
else:
|
222 |
+
prompt = st.chat_input()
|
223 |
+
if prompt:
|
224 |
st.session_state.messages.append({"role": "user", "content": prompt, "avatar": 'π§βπ»'})
|
225 |
st.session_state.prompt = prompt # Save the prompt in session state
|
226 |
st.session_state.log_messages = []
|
|
|
228 |
with st.chat_message("user", avatar='π§βπ»'):
|
229 |
print(f"Starting new question: {prompt}\n")
|
230 |
st.write(prompt)
|
231 |
+
st.session_state.ex_prompt = None
|
232 |
|
233 |
# Generate a new response if last message is not from assistant
|
234 |
if st.session_state.prompt:
|
|
|
239 |
message = {"role": "assistant", "content": res, "avatar": 'π€'}
|
240 |
st.session_state.messages.append(message)
|
241 |
st.markdown(res)
|
242 |
+
st.session_state.ex_prompt = None
|
243 |
+
st.session_state.prompt = None
|
244 |
+
st.rerun()
|
245 |
+
|
246 |
log_placeholder = st.empty()
|
247 |
with log_placeholder.container():
|
248 |
if st.session_state.show_logs:
|
requirements.txt
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
omegaconf==2.3.0
|
2 |
-
streamlit==1.32.2
|
3 |
pydantic==1.10.15
|
4 |
python-dotenv==1.0.1
|
|
|
|
|
5 |
git+https://{GITHUB_TOKEN}@github.com/vectara/vectara-agent.git
|
|
|
1 |
omegaconf==2.3.0
|
|
|
2 |
pydantic==1.10.15
|
3 |
python-dotenv==1.0.1
|
4 |
+
streamlit==1.32.2
|
5 |
+
streamlit_pills==0.3.0
|
6 |
git+https://{GITHUB_TOKEN}@github.com/vectara/vectara-agent.git
|