|
from dotenv import load_dotenv |
|
load_dotenv(); |
|
|
|
from langchain_community.document_loaders import WebBaseLoader |
|
from langchain.text_splitter import RecursiveCharacterTextSplitter |
|
|
|
|
|
|
|
data = WebBaseLoader("https://rise.mmu.ac.uk/what-is-rise/").load() |
|
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=150) |
|
docs = text_splitter.split_documents(data) |
|
|
|
|
|
from langchain_community.embeddings import HuggingFaceEmbeddings |
|
from langchain_community.vectorstores import FAISS |
|
embeddings = HuggingFaceEmbeddings() |
|
db = FAISS.from_documents(docs, embeddings) |
|
db.save_local("_rise_faq_db"); |
|
|
|
|
|
|
|
from langchain_community.llms import HuggingFaceHub |
|
from langchain.chains import LLMChain |
|
from langchain.prompts import PromptTemplate |
|
|
|
question = "Who won the FIFA World Cup in the year 1994? " |
|
|
|
template = """Question: {question} |
|
|
|
Answer: Let's think step by step.""" |
|
|
|
prompt = PromptTemplate(template=template, input_variables=["question"]) |
|
|
|
|
|
repo_id = "google/flan-t5-xxl" |
|
llm = HuggingFaceHub( |
|
repo_id=repo_id, model_kwargs={"temperature": 0.5, "max_length": 64} |
|
) |
|
|
|
db = FAISS.load_local("_rise_product_db", embeddings) |
|
from langchain.chains import RetrievalQA |
|
|
|
|
|
qa = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=db.as_retriever()) |
|
result = qa.run(question) |
|
|