Spaces:
Paused
Paused
danicafisher
commited on
Commit
•
ec93c75
1
Parent(s):
bc78ea3
Update app.py
Browse files
app.py
CHANGED
@@ -29,16 +29,81 @@ Context:
|
|
29 |
"""
|
30 |
chat_prompt = ChatPromptTemplate.from_messages([("system", rag_system_prompt_template), ("human", rag_user_prompt_template)])
|
31 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
@cl.on_chat_start
|
33 |
async def on_chat_start():
|
34 |
qdrant_client = QdrantClient(url=os.environ["QDRANT_ENDPOINT"], api_key=os.environ["QDRANT_API_KEY"])
|
35 |
qdrant_store = Qdrant(
|
36 |
client=qdrant_client,
|
37 |
-
collection_name=
|
38 |
embeddings=te3_small
|
39 |
)
|
40 |
-
retriever = qdrant_store.as_retriever()
|
41 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42 |
global retrieval_augmented_qa_chain
|
43 |
retrieval_augmented_qa_chain = (
|
44 |
{"context": itemgetter("question") | retriever, "question": itemgetter("question")}
|
@@ -47,8 +112,6 @@ async def on_chat_start():
|
|
47 |
| chat_model
|
48 |
)
|
49 |
|
50 |
-
await cl.Message(content="Ask away!").send()
|
51 |
-
|
52 |
@cl.author_rename
|
53 |
def rename(orig_author: str):
|
54 |
return "AI Assistant"
|
|
|
29 |
"""
|
30 |
chat_prompt = ChatPromptTemplate.from_messages([("system", rag_system_prompt_template), ("human", rag_user_prompt_template)])
|
31 |
|
32 |
+
# @cl.on_chat_start
|
33 |
+
# async def on_chat_start():
|
34 |
+
# qdrant_client = QdrantClient(url=os.environ["QDRANT_ENDPOINT"], api_key=os.environ["QDRANT_API_KEY"])
|
35 |
+
# qdrant_store = Qdrant(
|
36 |
+
# client=qdrant_client,
|
37 |
+
# collection_name="kai_test_docs",
|
38 |
+
# embeddings=te3_small
|
39 |
+
# )
|
40 |
+
# retriever = qdrant_store.as_retriever()
|
41 |
+
|
42 |
+
# global retrieval_augmented_qa_chain
|
43 |
+
# retrieval_augmented_qa_chain = (
|
44 |
+
# {"context": itemgetter("question") | retriever, "question": itemgetter("question")}
|
45 |
+
# | RunnablePassthrough.assign(context=itemgetter("context"))
|
46 |
+
# | chat_prompt
|
47 |
+
# | chat_model
|
48 |
+
# )
|
49 |
+
|
50 |
+
# await cl.Message(content="Ask away!").send()
|
51 |
+
|
52 |
+
|
53 |
@cl.on_chat_start
|
54 |
async def on_chat_start():
|
55 |
qdrant_client = QdrantClient(url=os.environ["QDRANT_ENDPOINT"], api_key=os.environ["QDRANT_API_KEY"])
|
56 |
qdrant_store = Qdrant(
|
57 |
client=qdrant_client,
|
58 |
+
collection_name=collection_name,
|
59 |
embeddings=te3_small
|
60 |
)
|
|
|
61 |
|
62 |
+
res = await cl.AskActionMessage(
|
63 |
+
content="Pick an action!",
|
64 |
+
actions=[
|
65 |
+
cl.Action(name="Question", value="question", label="Ask a question"),
|
66 |
+
cl.Action(name="File", value="file", label="Upload a file"),
|
67 |
+
],
|
68 |
+
).send()
|
69 |
+
|
70 |
+
if res and res.get("value") == "file":
|
71 |
+
files = None
|
72 |
+
files = await cl.AskFileMessage(
|
73 |
+
content="Please upload a Text or PDF File file to begin!",
|
74 |
+
accept=["text/plain", "application/pdf"],
|
75 |
+
max_size_mb=2,
|
76 |
+
timeout=180,
|
77 |
+
).send()
|
78 |
+
|
79 |
+
file = files[0]
|
80 |
+
|
81 |
+
msg = cl.Message(
|
82 |
+
content=f"Processing `{file.name}`...", disable_human_feedback=True
|
83 |
+
)
|
84 |
+
await msg.send()
|
85 |
+
|
86 |
+
# load the file
|
87 |
+
docs = process_file(file)
|
88 |
+
for i, doc in enumerate(docs):
|
89 |
+
doc.metadata["source"] = f"source_{i}" # TO DO: Add metadata
|
90 |
+
add_to_qdrant(doc, te3_small, qdrant_client, collection_name)
|
91 |
+
print(f"Processing {len(docs)} text chunks")
|
92 |
+
|
93 |
+
# Add to the qdrant_store
|
94 |
+
splits = text_splitter.split_documents(docs)
|
95 |
+
|
96 |
+
qdrant_store.add_documents(
|
97 |
+
documents=splits
|
98 |
+
)
|
99 |
+
|
100 |
+
msg.content = f"Processing `{file.name}` done. You can now ask questions!"
|
101 |
+
await msg.update()
|
102 |
+
|
103 |
+
if res and res.get("value") == "question":
|
104 |
+
await cl.Message(content="Ask away!").send()
|
105 |
+
|
106 |
+
retriever = qdrant_store.as_retriever()
|
107 |
global retrieval_augmented_qa_chain
|
108 |
retrieval_augmented_qa_chain = (
|
109 |
{"context": itemgetter("question") | retriever, "question": itemgetter("question")}
|
|
|
112 |
| chat_model
|
113 |
)
|
114 |
|
|
|
|
|
115 |
@cl.author_rename
|
116 |
def rename(orig_author: str):
|
117 |
return "AI Assistant"
|