import os os.environ['OPENAI_API_KEY'] = "<>" ###Chat model from langchain.schema import ( AIMessage, HumanMessage, SystemMessage ) from langchain.chat_models import ChatOpenAI chat = ChatOpenAI() ###Memory from langchain.llms import OpenAI from langchain.memory import ConversationSummaryMemory llm = OpenAI(temperature=0) memory = ConversationSummaryMemory(llm=llm,memory_key="chat_history",return_messages=True) ####Retrieval from langchain.document_loaders import DirectoryLoader # from langchain.document_loaders import WebBaseLoader # loader = WebBaseLoader("https://www.hdfclife.com/insurance-knowledge-centre/about-life-insurance/health-insurance-meaning-and-types") loader = DirectoryLoader('beshak/', glob="**/*.md") data = loader.load() from langchain.text_splitter import RecursiveCharacterTextSplitter text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=0) all_splits = text_splitter.split_documents(data) from langchain.embeddings import OpenAIEmbeddings from langchain.vectorstores import Chroma vectorstore = Chroma.from_documents(documents=all_splits, embedding=OpenAIEmbeddings()) from langchain.chains import ConversationalRetrievalChain llm = ChatOpenAI() retriever = vectorstore.as_retriever() qa = ConversationalRetrievalChain.from_llm(llm, retriever=retriever, memory=memory) print(qa("You're a helpful AI assistant that answers questions about health insurance.")["answer"]) print(qa("What is health insurance?")["answer"]) import gradio as gr def chatbot_response(message, history): return qa(message)["answer"] gr.ChatInterface( chatbot_response, chatbot=gr.Chatbot(height=400), textbox=gr.Textbox(placeholder="Ask me question about health insurance", container=False, scale=7), title="Get Simple Health", description="Ask any health insurance related question", theme="soft", examples=["Hello", "What is health insurance?", "What is critical ilness?"], cache_examples=True, retry_btn=None, undo_btn="Delete Previous", clear_btn="Clear", ).launch(share=True)