yrobel-lima commited on
Commit
7a29175
β€’
1 Parent(s): ea992cb

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +108 -144
  2. requirements.txt +2 -1
app.py CHANGED
@@ -1,13 +1,16 @@
 
 
 
1
  import openai
2
  import streamlit as st
3
- from langchain_core.messages import AIMessage, ChatMessage, HumanMessage
4
- from langchain_core.tracers.context import collect_runs
5
- from langsmith import Client
6
- from streamlit_feedback import streamlit_feedback
7
 
8
- from rag.runnable import get_runnable
9
  from utils.error_message_template import ERROR_MESSAGE
10
 
 
 
 
11
  # Streamlit page configuration
12
  st.set_page_config(
13
  page_title="ELLA AI Assistant",
@@ -21,166 +24,127 @@ with open("styles/styles.css") as css:
21
  st.markdown(f"<style>{css.read()}</style>", unsafe_allow_html=True)
22
 
23
 
24
- # Get runnable and memory
25
- @st.cache_resource(show_spinner=False)
26
- def get_runnable_and_memory():
27
- try:
28
- return get_runnable(model="gpt-4-turbo", temperature=0)
29
- except Exception:
30
- st.warning(ERROR_MESSAGE, icon="πŸ™")
31
- st.stop()
32
-
33
-
34
- chain, memory = get_runnable_and_memory()
35
-
36
-
37
- # Set up session state variables
38
- # Clean memory (important! to clean the memory at the end of each session)
39
- if "history" not in st.session_state:
40
- st.session_state["history"] = []
41
- memory.clear()
42
 
43
- if "messages" not in st.session_state:
44
- st.session_state["messages"] = []
 
45
 
46
- if "selected_location" not in st.session_state:
47
- st.session_state["selected_location"] = None
48
 
49
- if "disable_chat_input" not in st.session_state:
50
- st.session_state["disable_chat_input"] = True
51
 
52
 
53
- # Welcome message and Selectbox for location preferences
54
- def welcome_message():
55
- st.markdown(
56
- "Hello there! πŸ‘‹ Need help finding the right service or practitioner? Let our AI assistant give you a hand.\n\n"
57
- "To get started, please select your preferred location and share details about your symptoms or needs. "
58
- )
59
 
60
 
 
61
  def on_change_location():
62
  st.session_state["disable_chat_input"] = (
63
  False if st.session_state["selected_location"] else True
64
  )
65
 
66
 
67
- with st.container():
68
- welcome_message()
69
- location = st.radio(
70
- "**Our Locations**:",
71
- (
72
- "Cordova Bay - Victoria",
73
- "James Bay - Victoria",
74
- "Commercial Drive - Vancouver",
75
- ),
76
- index=None,
77
- label_visibility="visible",
78
- key="selected_location",
79
- on_change=on_change_location,
80
- )
81
- st.markdown("<br>", unsafe_allow_html=True)
82
-
83
- # Get user input only if a location is selected
84
- user_input = st.chat_input(
85
- "Ask ELLA...", disabled=st.session_state["disable_chat_input"]
86
- )
87
-
88
- if user_input:
89
- st.session_state["messages"].append(ChatMessage(role="user", content=user_input))
90
- prompt = f"{user_input}\nLocation preference: {st.session_state.selected_location}."
91
-
92
- else:
93
- prompt = None
94
-
95
- # Display previous messages
96
- user_avatar = "images/user.png"
97
- ai_avatar = "images/tall-tree-logo.png"
98
- for msg in st.session_state["messages"]:
99
- avatar = user_avatar if msg.role == "user" else ai_avatar
100
- with st.chat_message(msg.role, avatar=avatar):
101
- st.markdown(msg.content)
102
-
103
- # Chat interface
104
- if prompt:
105
- # Add all previous messages to memory
106
- for human, ai in st.session_state["history"]:
107
- memory.chat_memory.add_user_message(HumanMessage(content=human))
108
- memory.chat_memory.add_ai_message(AIMessage(content=ai))
109
-
110
- # render the assistant's response
111
- with st.chat_message("assistant", avatar=ai_avatar):
112
- message_placeholder = st.empty()
113
-
114
- try:
115
- partial_message = ""
116
- # Collect runs for feedback using Langsmith.
117
- with st.spinner(" "), collect_runs() as cb:
118
- for chunk in chain.stream({"message": prompt}):
119
- partial_message += chunk
120
- message_placeholder.markdown(partial_message + "|")
121
- st.session_state.run_id = cb.traced_runs[0].id
122
- message_placeholder.markdown(partial_message)
123
- except openai.BadRequestError:
124
- st.warning(ERROR_MESSAGE, icon="πŸ™")
125
- st.stop()
126
- except Exception:
127
- st.warning(ERROR_MESSAGE, icon="πŸ™")
128
- st.stop()
129
-
130
- # Add the full response to the history
131
- st.session_state["history"].append((prompt, partial_message))
132
-
133
- # Add AI message to memory after the response is generated
134
- memory.chat_memory.add_ai_message(AIMessage(content=partial_message))
135
 
136
- # Add the full response to the message history
137
- st.session_state["messages"].append(
138
- ChatMessage(role="assistant", content=partial_message)
 
 
 
 
 
 
 
 
 
139
  )
 
140
 
141
 
142
- # Feedback system using streamlit-feedback and Langsmith
 
 
143
 
144
- # Langsmith client for the feedback system
145
- ls_client = Client()
146
 
147
- # Feedback option
148
- feedback_option = "thumbs"
 
 
149
 
150
- if st.session_state.get("run_id"):
151
- run_id = st.session_state.run_id
152
- feedback = streamlit_feedback(
153
- feedback_type=feedback_option,
154
- optional_text_label="[Optional] Please provide an explanation",
155
- key=f"feedback_{run_id}",
156
- )
157
- score_mappings = {
158
- "thumbs": {"πŸ‘": 1, "πŸ‘Ž": 0},
159
- "faces": {"πŸ˜€": 1, "πŸ™‚": 0.75, "😐": 0.5, "πŸ™": 0.25, "😞": 0},
160
- }
161
 
162
- # Get the score mapping based on the selected feedback option
163
- scores = score_mappings[feedback_option]
 
 
164
 
165
- if feedback:
166
- # Get the score from the selected feedback option's score mapping
167
- score = scores.get(feedback["score"])
168
 
169
- if score is not None:
170
- # Formulate feedback type string incorporating the feedback option
171
- # and score value
172
- feedback_type_str = f"{feedback_option} {feedback['score']}"
173
 
174
- # Record the feedback with the formulated feedback type string
175
- feedback_record = ls_client.create_feedback(
176
- run_id,
177
- feedback_type_str,
178
- score=score,
179
- comment=feedback.get("text"),
180
- )
181
- st.session_state.feedback = {
182
- "feedback_id": str(feedback_record.id),
183
- "score": score,
184
- }
185
- else:
186
- st.warning("Invalid feedback score.")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from typing import Optional
3
+
4
  import openai
5
  import streamlit as st
6
+ from langchain_core.messages import AIMessage, HumanMessage
 
 
 
7
 
8
+ from rag.runnable_and_memory import get_runnable_and_memory
9
  from utils.error_message_template import ERROR_MESSAGE
10
 
11
+ logging.basicConfig(level=logging.ERROR)
12
+
13
+
14
  # Streamlit page configuration
15
  st.set_page_config(
16
  page_title="ELLA AI Assistant",
 
24
  st.markdown(f"<style>{css.read()}</style>", unsafe_allow_html=True)
25
 
26
 
27
+ def initialize_session_state():
28
+ # Initialize the runnable and memory
29
+ if "runnable" not in st.session_state:
30
+ try:
31
+ st.session_state["runnable"], st.session_state["memory"] = (
32
+ get_runnable_and_memory(model="gpt-4o", temperature=0)
33
+ )
34
+ # Clear the memory
35
+ st.session_state["memory"].clear()
36
+ except Exception:
37
+ handle_errors()
 
 
 
 
 
 
 
38
 
39
+ # Other session state variables
40
+ if "chat_history" not in st.session_state:
41
+ st.session_state["chat_history"] = []
42
 
43
+ if "selected_location" not in st.session_state:
44
+ st.session_state["selected_location"] = None
45
 
46
+ if "disable_chat_input" not in st.session_state:
47
+ st.session_state["disable_chat_input"] = True
48
 
49
 
50
+ @st.cache_resource
51
+ def load_avatars():
52
+ return {
53
+ "Human": "images/user.png",
54
+ "AI": "images/tall-tree-logo.png",
55
+ }
56
 
57
 
58
+ # Disable chat input if no location is selected
59
  def on_change_location():
60
  st.session_state["disable_chat_input"] = (
61
  False if st.session_state["selected_location"] else True
62
  )
63
 
64
 
65
+ def app_layout():
66
+ with st.container():
67
+ # Welcome message
68
+ st.markdown(
69
+ "Hello there! πŸ‘‹ Need help finding the right service or practitioner? Let our AI assistant give you a hand.\n\n"
70
+ "To get started, please select your preferred location and share details about your symptoms or needs. "
71
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
72
 
73
+ # Selectbox for location preferences
74
+ st.radio(
75
+ "**Our Locations**:",
76
+ (
77
+ "Cordova Bay - Victoria",
78
+ "James Bay - Victoria",
79
+ "Commercial Drive - Vancouver",
80
+ ),
81
+ index=None,
82
+ label_visibility="visible",
83
+ key="selected_location",
84
+ on_change=on_change_location,
85
  )
86
+ st.markdown("<br>", unsafe_allow_html=True)
87
 
88
 
89
+ def handle_errors(error: Optional[Exception] = None):
90
+ st.warning(error if error else ERROR_MESSAGE, icon="πŸ™")
91
+ st.stop()
92
 
 
 
93
 
94
+ # Chat app logic
95
+ if __name__ == "__main__":
96
+ initialize_session_state()
97
+ app_layout()
98
 
99
+ # Render conversation
100
+ avatars = load_avatars()
101
+ for message in st.session_state["chat_history"]:
102
+ if isinstance(message, AIMessage):
103
+ with st.chat_message("AI", avatar=avatars["AI"]):
104
+ st.write(message.content)
105
+ elif isinstance(message, HumanMessage):
106
+ with st.chat_message("Human", avatar=avatars["Human"]):
107
+ st.write(message.content)
 
 
108
 
109
+ # Get user input only if a location is selected
110
+ user_input = st.chat_input(
111
+ "Ask ELLA...", disabled=st.session_state["disable_chat_input"]
112
+ )
113
 
114
+ # Chat interface
115
+ if user_input and user_input.strip():
116
+ st.session_state["chat_history"].append(HumanMessage(content=user_input))
117
 
118
+ # Append the location to the user input (important!)
119
+ user_query_location = (
120
+ f"{user_input}\nLocation: {st.session_state.selected_location}."
121
+ )
122
 
123
+ # Render the user input
124
+ with st.chat_message("Human", avatar=avatars["Human"]):
125
+ st.write(user_input)
126
+
127
+ # Render the AI response
128
+ with st.chat_message("AI", avatar=avatars["AI"]):
129
+ try:
130
+ with st.spinner(" "):
131
+ response = st.write_stream(
132
+ st.session_state["runnable"].stream(
133
+ {"user_query": user_query_location}
134
+ )
135
+ )
136
+ except openai.BadRequestError:
137
+ handle_errors()
138
+ except Exception:
139
+ handle_errors()
140
+
141
+ # Add AI response to the message history
142
+ st.session_state["chat_history"].append(AIMessage(content=response))
143
+
144
+ # Update runnable memory
145
+ st.session_state["memory"].chat_memory.add_user_message(
146
+ HumanMessage(content=user_query_location)
147
+ )
148
+ st.session_state["memory"].chat_memory.add_ai_message(
149
+ AIMessage(content=response)
150
+ )
requirements.txt CHANGED
@@ -4,4 +4,5 @@ langchain-openai==0.1.19
4
  langchain-qdrant==0.1.3
5
  fastembed==0.3.4
6
  streamlit==1.32.2
7
- streamlit-feedback==0.1.3
 
 
4
  langchain-qdrant==0.1.3
5
  fastembed==0.3.4
6
  streamlit==1.32.2
7
+ streamlit-feedback==0.1.3
8
+ flashrank==0.2.8