ofermend commited on
Commit
d26ed68
1 Parent(s): 06e14df

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -28
app.py CHANGED
@@ -6,13 +6,9 @@ import os
6
 
7
  import streamlit as st
8
  from PIL import Image
9
- from functools import partial
10
-
11
- def set_query(q: str):
12
- st.session_state['query'] = q
13
 
14
  def launch_bot():
15
- def get_answer(question):
16
  response = vq.submit_query(question)
17
  return response
18
 
@@ -41,36 +37,36 @@ def launch_bot():
41
  "## How this works?\n"
42
  "This app was built with [Vectara](https://vectara.com).\n"
43
  "Vectara's [Indexing API](https://docs.vectara.com/docs/api-reference/indexing-apis/indexing) was used to ingest the data into a Vectara corpus (or index).\n\n"
44
- "This app uses Vectara API to query the corpus and present the results to you, answering your question.\n\n"
45
  )
46
  st.markdown("---")
47
  st.image(image, width=250)
48
 
49
- st.markdown(f"<center> <h2> Vectara demo app: {cfg.title} </h2> </center>", unsafe_allow_html=True)
50
  st.markdown(f"<center> <h4> {cfg.description} <h4> </center>", unsafe_allow_html=True)
51
 
52
- # Setup a split column layout
53
- main_col, questions_col = st.columns([4, 2], gap="medium")
54
- with main_col:
55
- cols = st.columns([1, 8], gap="small")
56
- cols[0].markdown("""<h5>Search</h5>""", unsafe_allow_html=True)
57
- cols[1].text_input(label="search", key='query', max_chars=256, label_visibility='collapsed', help="Enter your question here")
58
-
59
- st.markdown("<h5>Response</h5>", unsafe_allow_html=True)
60
- response_text = st.empty()
61
- response_text.text_area(f" ", placeholder="The answer will appear here.", disabled=True,
62
- key="response", height=1, label_visibility='collapsed')
63
- with questions_col:
64
- st.markdown("<h5 style='text-align:center; color: red'> Sample questions </h5>", unsafe_allow_html=True)
65
- for q in list(cfg.examples):
66
- st.button(q, on_click=partial(set_query, q), use_container_width=True)
67
-
68
 
69
- # run the main flow
70
- if st.session_state.get('query'):
71
- query = st.session_state['query']
72
- response = get_answer(query)
73
- response_text.markdown(response)
74
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
75
  if __name__ == "__main__":
76
  launch_bot()
 
6
 
7
  import streamlit as st
8
  from PIL import Image
 
 
 
 
9
 
10
  def launch_bot():
11
+ def generate_response(question):
12
  response = vq.submit_query(question)
13
  return response
14
 
 
37
  "## How this works?\n"
38
  "This app was built with [Vectara](https://vectara.com).\n"
39
  "Vectara's [Indexing API](https://docs.vectara.com/docs/api-reference/indexing-apis/indexing) was used to ingest the data into a Vectara corpus (or index).\n\n"
40
+ "This app uses Vectara Chat API to query the corpus and present the results to you, answering your question.\n\n"
41
  )
42
  st.markdown("---")
43
  st.image(image, width=250)
44
 
45
+ st.markdown(f"<center> <h2> Vectara chat demo: {cfg.title} </h2> </center>", unsafe_allow_html=True)
46
  st.markdown(f"<center> <h4> {cfg.description} <h4> </center>", unsafe_allow_html=True)
47
 
48
+ if "messages" not in st.session_state.keys():
49
+ st.session_state.messages = [{"role": "assistant", "content": "How may I help you?"}]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
 
51
+ # Display chat messages
52
+ for message in st.session_state.messages:
53
+ with st.chat_message(message["role"]):
54
+ st.write(message["content"])
 
55
 
56
+ # User-provided prompt
57
+ if prompt := st.chat_input():
58
+ st.session_state.messages.append({"role": "user", "content": prompt})
59
+ with st.chat_message("user"):
60
+ st.write(prompt)
61
+
62
+ # Generate a new response if last message is not from assistant
63
+ if st.session_state.messages[-1]["role"] != "assistant":
64
+ with st.chat_message("assistant"):
65
+ with st.spinner("Thinking..."):
66
+ response = generate_response(prompt)
67
+ st.write(response)
68
+ message = {"role": "assistant", "content": response}
69
+ st.session_state.messages.append(message)
70
+
71
  if __name__ == "__main__":
72
  launch_bot()