Spaces:
Sleeping
Sleeping
kausthubkannan17
commited on
Commit
•
259b1d5
1
Parent(s):
991bf37
fix: warning issues
Browse files
model.py
CHANGED
@@ -1,5 +1,6 @@
|
|
|
|
|
|
1 |
from langchain_community.chat_message_histories.in_memory import ChatMessageHistory
|
2 |
-
from langchain_community.llms.ctransformers import CTransformers
|
3 |
from langchain_community.vectorstores import DeepLake
|
4 |
from langchain_core.messages import AIMessage
|
5 |
from langchain_core.prompts import PromptTemplate, load_prompt
|
@@ -99,6 +100,7 @@ class DrakeLM:
|
|
99 |
|
100 |
Ask the LLM model a question
|
101 |
"""
|
|
|
102 |
context = self._retrieve(query, metadata_filter)
|
103 |
print("Retrieved context")
|
104 |
prompt_template, prompt_string = self._chat_prompt(query, context)
|
@@ -134,7 +136,7 @@ class DrakeLM:
|
|
134 |
- Create subheadings for each section.
|
135 |
- Use numbered bullet points for each point.
|
136 |
"""
|
137 |
-
|
138 |
notes_chunk = []
|
139 |
for doc in documents:
|
140 |
prompt = self.notes_prompt.format(content_chunk=doc.page_content, rules=rules)
|
|
|
1 |
+
import warnings
|
2 |
+
|
3 |
from langchain_community.chat_message_histories.in_memory import ChatMessageHistory
|
|
|
4 |
from langchain_community.vectorstores import DeepLake
|
5 |
from langchain_core.messages import AIMessage
|
6 |
from langchain_core.prompts import PromptTemplate, load_prompt
|
|
|
100 |
|
101 |
Ask the LLM model a question
|
102 |
"""
|
103 |
+
warnings.filterwarnings("ignore", message="Convert_system_message_to_human will be deprecated!")
|
104 |
context = self._retrieve(query, metadata_filter)
|
105 |
print("Retrieved context")
|
106 |
prompt_template, prompt_string = self._chat_prompt(query, context)
|
|
|
136 |
- Create subheadings for each section.
|
137 |
- Use numbered bullet points for each point.
|
138 |
"""
|
139 |
+
warnings.filterwarnings("ignore", message="Convert_system_message_to_human will be deprecated!")
|
140 |
notes_chunk = []
|
141 |
for doc in documents:
|
142 |
prompt = self.notes_prompt.format(content_chunk=doc.page_content, rules=rules)
|