Spaces:
Runtime error
Runtime error
Update ChatEngine.py
Browse files- ChatEngine.py +5 -15
ChatEngine.py
CHANGED
@@ -1,24 +1,20 @@
|
|
1 |
from llama_index.llms.huggingface import HuggingFaceLLM
|
2 |
from llama_index.core.base.llms.types import ChatMessage, MessageRole
|
3 |
-
|
4 |
class ChatEngine:
|
5 |
-
def __init__(self,
|
6 |
"""
|
7 |
Initializes the ChatEngine with a retriever and a language model.
|
8 |
|
9 |
Args:
|
10 |
retriever (HybridRetriever): An instance of a retriever to fetch relevant documents.
|
11 |
-
model_name (str): The name of the language model to be used.
|
12 |
-
context_window (int, optional): The maximum context window size for the language model. Defaults to 32000.
|
13 |
-
temperature (float, optional): The temperature setting for the language model. Defaults to 0.
|
14 |
"""
|
15 |
|
16 |
self.retriever = retriever
|
17 |
|
18 |
-
self.llm = llm
|
19 |
self.chat_history = []
|
20 |
|
21 |
-
def ask_question(self, question):
|
22 |
"""
|
23 |
Asks a question to the language model, using the retriever to fetch relevant documents.
|
24 |
|
@@ -38,12 +34,6 @@ class ChatEngine:
|
|
38 |
|
39 |
self.chat_history.append(ChatMessage(role=MessageRole.ASSISTANT, content=f"Document: {document}"))
|
40 |
|
41 |
-
response =
|
42 |
-
|
43 |
-
|
44 |
-
if response_content.lower().startswith("assistant:"):
|
45 |
-
response_content = response_content[len("assistant:"):].strip()
|
46 |
-
|
47 |
-
self.chat_history.append(ChatMessage(role=MessageRole.ASSISTANT, content=response_content))
|
48 |
-
|
49 |
return response.message.content
|
|
|
1 |
from llama_index.llms.huggingface import HuggingFaceLLM
|
2 |
from llama_index.core.base.llms.types import ChatMessage, MessageRole
|
3 |
+
|
4 |
class ChatEngine:
|
5 |
+
def __init__(self, retriever):
|
6 |
"""
|
7 |
Initializes the ChatEngine with a retriever and a language model.
|
8 |
|
9 |
Args:
|
10 |
retriever (HybridRetriever): An instance of a retriever to fetch relevant documents.
|
|
|
|
|
|
|
11 |
"""
|
12 |
|
13 |
self.retriever = retriever
|
14 |
|
|
|
15 |
self.chat_history = []
|
16 |
|
17 |
+
def ask_question(self, question, llm):
|
18 |
"""
|
19 |
Asks a question to the language model, using the retriever to fetch relevant documents.
|
20 |
|
|
|
34 |
|
35 |
self.chat_history.append(ChatMessage(role=MessageRole.ASSISTANT, content=f"Document: {document}"))
|
36 |
|
37 |
+
response = llm.chat(self.chat_history)
|
38 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
return response.message.content
|