Spaces:
Sleeping
Sleeping
UnnamedUnknownx1234987789489
commited on
Update functions.py
Browse files- functions.py +15 -28
functions.py
CHANGED
@@ -91,50 +91,37 @@ def create_retriever_from_chroma(vectorstore_path="docs/chroma/", search_type='m
|
|
91 |
return retriever
|
92 |
|
93 |
|
94 |
-
def handle_userinput(user_question, custom_graph):
|
95 |
# Add the user's question to the chat history and display it in the UI
|
96 |
st.session_state.messages.append({"role": "user", "content": user_question})
|
97 |
st.chat_message("user").write(user_question)
|
98 |
|
99 |
-
#
|
100 |
config = {"configurable": {"thread_id": str(uuid.uuid4())}}
|
101 |
|
102 |
try:
|
103 |
-
#
|
104 |
-
state_dict =
|
105 |
-
{"question": user_question, "steps": []}, config
|
106 |
-
)
|
107 |
|
108 |
-
|
|
|
109 |
with st.sidebar:
|
110 |
st.subheader("Dokumentai, kuriuos Birutė gavo kaip kontekstą")
|
111 |
-
with st.spinner("
|
112 |
for doc in docs:
|
113 |
-
#
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
st.write(f"Documentas: {content}")
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
# Check if a response (generation) was produced by the graph
|
125 |
-
if 'generation' in state_dict and state_dict['generation']:
|
126 |
-
response = state_dict["generation"]
|
127 |
-
|
128 |
-
# Add the assistant's response to the chat history and display it
|
129 |
st.session_state.messages.append({"role": "assistant", "content": response})
|
130 |
st.chat_message("assistant").write(response)
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
except Exception as e:
|
135 |
# Display an error message in case of failure
|
136 |
st.chat_message("assistant").write("Klaida: Arba per didelis kontekstas suteiktas modeliui, arba užklausų serveryje yra per daug")
|
137 |
-
|
138 |
|
139 |
|
140 |
|
|
|
91 |
return retriever
|
92 |
|
93 |
|
94 |
+
async def handle_userinput(user_question, custom_graph):
|
95 |
# Add the user's question to the chat history and display it in the UI
|
96 |
st.session_state.messages.append({"role": "user", "content": user_question})
|
97 |
st.chat_message("user").write(user_question)
|
98 |
|
99 |
+
# Config setup (if required for the graph)
|
100 |
config = {"configurable": {"thread_id": str(uuid.uuid4())}}
|
101 |
|
102 |
try:
|
103 |
+
# Await the asynchronous invocation of the custom graph
|
104 |
+
state_dict = await custom_graph.ainvoke({"question": user_question, "steps": []}, config)
|
|
|
|
|
105 |
|
106 |
+
# Extract documents from the state dictionary
|
107 |
+
docs = state_dict.get("documents", [])
|
108 |
with st.sidebar:
|
109 |
st.subheader("Dokumentai, kuriuos Birutė gavo kaip kontekstą")
|
110 |
+
with st.spinner("Kraunama..."):
|
111 |
for doc in docs:
|
112 |
+
# Display each document
|
113 |
+
st.write(f"Dokumentas: {doc}")
|
114 |
+
|
115 |
+
# Check for and display the assistant's response
|
116 |
+
response = state_dict.get("generation")
|
117 |
+
if response:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
118 |
st.session_state.messages.append({"role": "assistant", "content": response})
|
119 |
st.chat_message("assistant").write(response)
|
120 |
+
|
121 |
+
|
|
|
122 |
except Exception as e:
|
123 |
# Display an error message in case of failure
|
124 |
st.chat_message("assistant").write("Klaida: Arba per didelis kontekstas suteiktas modeliui, arba užklausų serveryje yra per daug")
|
|
|
125 |
|
126 |
|
127 |
|