AttributeError: 'Message' object has no attribute 'replace'

#16
by shivammehta - opened

I am getting this error - AttributeError: 'Message' object has no attribute 'replace'. For the following code, any idea how to solve this

Ingest.py

DATA_PATH = 'data/'
DB_FAISS_PATH = 'vectorstore/db_faiss'

def create_vector_db():
loader = DirectoryLoader(DATA_PATH,
glob='*.pdf',
loader_cls=PyPDFLoader)

documents = loader.load()
text_splitter = RecursiveCharacterTextSplitter(chunk_size=500,
                                               chunk_overlap=50)
texts = text_splitter.split_documents(documents)
docs_text = [text.text for text in texts]

embeddings = HuggingFaceEmbeddings(model_name='sentence-transformers/all-MiniLM-L6-v2',
                                   model_kwargs={'device': 'cpu'})

db = FAISS.from_documents(docs_text, embeddings)
db.save_local(DB_FAISS_PATH)

if name == "main":
create_vector_db()


app.py

from langchain.document_loaders import PyPDFLoader, DirectoryLoader
from langchain import PromptTemplate
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.vectorstores import FAISS
from langchain.llms import CTransformers
from langchain.chains import RetrievalQA
import chainlit as cl
from ctransformers import AutoModelForCausalLM,AutoConfig

DB_FAISS_PATH = 'vectorstore/db_faiss'

def set_custom_prompt():
prompt = PromptTemplate(template=custom_prompt_template,
input_variables=['context', 'question'])
return prompt

def retrieval_qa_chain(llm, prompt, db):
qa_chain = RetrievalQA.from_chain_type(llm=llm,
chain_type='stuff',
retriever=db.as_retriever(search_kwargs={'k': 2}),
return_source_documents=True,
chain_type_kwargs={'prompt': prompt}
)
return qa_chain

#Loading the model
def load_llm():
llm = CTransformers(
model = "TheBloke/Llama-2-70B-Chat-GGML",
model_type="llama",
config=config
)
return llm

def qa_bot():
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2",
model_kwargs={'device': 'cpu'})
db = FAISS.load_local(DB_FAISS_PATH, embeddings)
llm = load_llm()
qa_prompt = set_custom_prompt()
qa = retrieval_qa_chain(llm, qa_prompt, db)

return qa

def final_result(query):
qa_result = qa_bot()
response = qa_result({'query': query})
return response

Sign up or log in to comment