renatomoulin's picture
add files
65a23ce
from langchain.chains import RetrievalQA
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.vectorstores import Chroma
from data import prepare_data
path = './llm'
persist_directory = "vector_db"
llm = prepare_data(db_path = persist_directory, llm_path = path)
embeddings = HuggingFaceEmbeddings()
vectordb = Chroma(persist_directory = persist_directory, embedding_function = embeddings)
doc_retriever = vectordb.as_retriever()
shakespeare_qa = RetrievalQA.from_chain_type(llm = llm, chain_type = "stuff", retriever = doc_retriever)
if __name__ == "__main__":
# make a gradio interface
import gradio as gr
def make_inference(query):
shakespeare_qa.run(query)
demo = gr.Interface(fn = make_inference, inputs = "text", outputs = "text",
title = "Answer to the question about Shakespeare",
description = "This is a demo of the LangChain library.", )
demo.launch()