Spaces:
Build error
Build error
import gradio as gr | |
from langchain.callbacks.manager import CallbackManager | |
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler | |
from langchain.prompts import PromptTemplate | |
from langchain_community.llms import LlamaCpp | |
from langchain_core.runnables import RunnablePassthrough | |
from langchain_core.output_parsers import StrOutputParser | |
from langchain.text_splitter import RecursiveCharacterTextSplitter | |
from langchain_community.document_loaders import WebBaseLoader | |
from langchain_huggingface.embeddings import HuggingFaceEmbeddings | |
from langchain_community.vectorstores import Chroma | |
from langchain import hub | |
# Set up callback manager and model parameters | |
callback_manager = CallbackManager([StreamingStdOutCallbackHandler()]) | |
n_gpu_layers = 0 | |
n_batch = 512 | |
llm = LlamaCpp( | |
model_path="./models/phi-2.Q2_K.gguf", | |
n_gpu_layers=n_gpu_layers, n_batch=n_batch, | |
n_ctx = 4096, | |
temperature=0.7, | |
max_tokens=4096, | |
top_p=1, | |
callback_manager=callback_manager, | |
verbose=False, | |
) | |
# Load the prompt | |
prompt = hub.pull("rlm/rag-prompt") | |
# Function to format documents | |
def format_docs(docs): | |
return "\n\n".join(doc.page_content for doc in docs) | |
# Main function to process the question and URL | |
def get_answer(question, url): | |
# Load data from the provided URL | |
loader = WebBaseLoader(url) | |
data = loader.load() | |
# Split the data into small chunks | |
text_splitter = RecursiveCharacterTextSplitter(chunk_size=2000, chunk_overlap=0) | |
all_splits = text_splitter.split_documents(data) | |
# Store the data in Vector Store | |
vectorstore = Chroma.from_documents(documents=all_splits, embedding=HuggingFaceEmbeddings()) | |
retriever = vectorstore.as_retriever(search_type="similarity", search_kwargs={"k": 3}) | |
retrieved_docs = retriever.invoke(question) | |
rag_chain = ( | |
{"context": retriever | format_docs, "question": RunnablePassthrough()} | |
| prompt | |
| llm | |
| StrOutputParser() | |
) | |
answer = "" | |
for chunk in rag_chain.stream(question): | |
answer += chunk | |
yield answer | |
yield answer | |
# Create the Gradio interface | |
iface = gr.Interface( | |
fn=get_answer, | |
inputs=[gr.Textbox(lines=1, placeholder="Enter your question here..."), | |
gr.Textbox(lines=1, placeholder="Enter the website URL here...")], | |
outputs="text", | |
title="Web-based Question Answering System", | |
description="Ask a question about the content of a webpage and get an answer.", | |
examples=[ | |
["Which are the top 5 companies in the world with their revenue in table format?", "https://www.investopedia.com/biggest-companies-in-the-world-by-market-cap-5212784"] | |
] | |
) | |
# Launch the app | |
iface.launch(share=True) | |