add context to prompt
Browse files
app.py
CHANGED
@@ -150,11 +150,23 @@ async def main(message: cl.Message):
|
|
150 |
The LCEL RAG chain is stored in the user session, and is unique to each user session - this is why we can access it here.
|
151 |
"""
|
152 |
lcel_rag_chain = cl.user_session.get("lcel_rag_chain")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
153 |
|
154 |
msg = cl.Message(content="")
|
155 |
|
156 |
async for chunk in lcel_rag_chain.astream(
|
157 |
-
|
158 |
config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
|
159 |
):
|
160 |
await msg.stream_token(chunk)
|
|
|
150 |
The LCEL RAG chain is stored in the user session, and is unique to each user session - this is why we can access it here.
|
151 |
"""
|
152 |
lcel_rag_chain = cl.user_session.get("lcel_rag_chain")
|
153 |
+
|
154 |
+
# Retrieve context for the user's query
|
155 |
+
context = hf_retriever.retrieve(query=message.content)
|
156 |
+
|
157 |
+
# Combine context into a single string
|
158 |
+
context_text = "\n".join([doc.page_content for doc in context])
|
159 |
+
|
160 |
+
# Prepare input for the prompt
|
161 |
+
input_dict = {
|
162 |
+
"query": message.content,
|
163 |
+
"context": context_text
|
164 |
+
}
|
165 |
|
166 |
msg = cl.Message(content="")
|
167 |
|
168 |
async for chunk in lcel_rag_chain.astream(
|
169 |
+
input_dict,
|
170 |
config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
|
171 |
):
|
172 |
await msg.stream_token(chunk)
|