from langchain.embeddings.openai import OpenAIEmbeddings from langchain.vectorstores import Chroma from langchain.text_splitter import CharacterTextSplitter from langchain.chains.question_answering import load_qa_chain from langchain.llms import OpenAI import os setup_complete = False with open("guide1.txt") as f: hitchhikersguide = f.read() text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0, separator = "\n") texts = text_splitter.split_text(hitchhikersguide) def get_api_key(input_1, input_2): if len(input_1) >= len(input_2): os.environ['OPENAI_API_KEY'] = input_1 else: os.environ['OPENAI_API_KEY'] = input_2 return True def setup_chain(): global embeddings, docsearch, chain, setup_complete embeddings = OpenAIEmbeddings() docsearch = Chroma.from_texts(texts, embeddings, metadatas=[{"source": str(i)} for i in range(len(texts))]).as_retriever() chain = load_qa_chain(OpenAI(temperature=0), chain_type="stuff") setup_complete = True def make_inference(query): if not setup_complete: setup_chain() docs = docsearch.get_relevant_documents(query) return(chain.run(input_documents=docs, question=query)) if __name__ == "__main__": # make a gradio interface import gradio as gr gr.Interface( make_inference, [ gr.inputs.Textbox(lines=2, label="Query"), ], gr.outputs.Textbox(label="Response"), title="Query Hitchhiker's Guide", description="What would Douglas Adams say if he saw you query The Hitchhiker's Guide to the Galaxy with AI? Try it for yourself...", ).launch(auth=get_api_key)