no LLM check
Browse files- app.py +4 -4
- semantic.py +1 -3
app.py
CHANGED
@@ -29,7 +29,7 @@ async def main(message: cl.Message):
|
|
29 |
response = _semantic_rag_chain.invoke({"question": message.content})
|
30 |
content += response["response"].content
|
31 |
except Exception as e:
|
32 |
-
|
33 |
|
34 |
# Send a response back to the user
|
35 |
await cl.Message(
|
@@ -39,7 +39,7 @@ async def main(message: cl.Message):
|
|
39 |
@cl.on_chat_start
|
40 |
async def start():
|
41 |
|
42 |
-
|
43 |
global _semantic_rag_chain
|
44 |
# _semantic_rag_chain = SemanticRAGChainFactory.get_semantic_rag_chain()
|
45 |
|
@@ -52,7 +52,7 @@ async def start():
|
|
52 |
# url="https://media.architecturaldigest.com/photos/5f241de2c850b2a36b415024/master/w_1600%2Cc_limit/Luke-logo.png"
|
53 |
# ).send()
|
54 |
|
55 |
-
|
56 |
|
57 |
content = ""
|
58 |
# if _semantic_rag_chain is not None:
|
@@ -73,4 +73,4 @@ async def start():
|
|
73 |
await cl.Message(
|
74 |
content=content + "\nHow can I help you with Meta's 2023 10K?"
|
75 |
).send()
|
76 |
-
|
|
|
29 |
response = _semantic_rag_chain.invoke({"question": message.content})
|
30 |
content += response["response"].content
|
31 |
except Exception as e:
|
32 |
+
print(f"chat error: {e}")
|
33 |
|
34 |
# Send a response back to the user
|
35 |
await cl.Message(
|
|
|
39 |
@cl.on_chat_start
|
40 |
async def start():
|
41 |
|
42 |
+
print("==> starting ...")
|
43 |
global _semantic_rag_chain
|
44 |
# _semantic_rag_chain = SemanticRAGChainFactory.get_semantic_rag_chain()
|
45 |
|
|
|
52 |
# url="https://media.architecturaldigest.com/photos/5f241de2c850b2a36b415024/master/w_1600%2Cc_limit/Luke-logo.png"
|
53 |
# ).send()
|
54 |
|
55 |
+
print("\tsending message back: ready!!!")
|
56 |
|
57 |
content = ""
|
58 |
# if _semantic_rag_chain is not None:
|
|
|
73 |
await cl.Message(
|
74 |
content=content + "\nHow can I help you with Meta's 2023 10K?"
|
75 |
).send()
|
76 |
+
print(f"{20 * '*'}")
|
semantic.py
CHANGED
@@ -1,9 +1,7 @@
|
|
1 |
import logging
|
2 |
-
import shutil
|
3 |
from pathlib import Path
|
4 |
|
5 |
-
logging.
|
6 |
-
_logger = logging.getLogger("chunking")
|
7 |
|
8 |
from operator import itemgetter
|
9 |
from langchain_core.prompts import ChatPromptTemplate
|
|
|
1 |
import logging
|
|
|
2 |
from pathlib import Path
|
3 |
|
4 |
+
_logger = logging.getLogger("semantic")
|
|
|
5 |
|
6 |
from operator import itemgetter
|
7 |
from langchain_core.prompts import ChatPromptTemplate
|