import gradio as gr import os import logging from langchain.embeddings.openai import OpenAIEmbeddings from langchain.vectorstores import Chroma from langchain.text_splitter import CharacterTextSplitter from langchain.llms import OpenAI from langchain.chains import ConversationalRetrievalChain from langchain.document_loaders import DirectoryLoader logging.basicConfig(filename='./Logs/bot.log', level=logging.WARNING, format='%(asctime)s - %(levelname)s - %(message)s') txt_loader = DirectoryLoader('./LVE/', glob="**/*.txt") pdf_loader = DirectoryLoader('./LVE/', glob="**/*.pdf") doc_loader = DirectoryLoader('./LVE/', glob="**/*.docx") loaders = [pdf_loader, txt_loader, doc_loader] documents = [] for loader in loaders: documents.extend(loader.load()) print(f"Total # of documents: {len(documents)}") text_splitter = CharacterTextSplitter(chunk_size=500, chunk_overlap=0) documents = text_splitter.split_documents(documents) embeddings = OpenAIEmbeddings() vectorstore = Chroma.from_documents(documents, embeddings) from langchain.memory import ConversationBufferMemory #memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True) qa = ConversationalRetrievalChain.from_llm(OpenAI(temperature=0), vectorstore.as_retriever()) #chat_history = [] def submit_callback(user_message): default_prompt = "For answers, refer to the provided content. If no answer is found, contact lveswim@gmail.com." prompt = default_prompt + user_message # Log the user's query at INFO level logging.info(f"User Query: {user_message}") input_data = {"question": prompt, "chat_history": []} # Add an empty "chat_history" key response = qa(input_data) # Log the chatbot's response at INFO level logging.info(f"Chatbot Response: {response['answer']}") return response["answer"] iface = gr.Interface( fn=submit_callback, inputs=gr.inputs.Textbox(lines=2, label="Enter your query"), outputs=gr.outputs.Textbox(label="Chatbot Response"), #outputs=gr.outputs.HTML(label="Chatbot Response"), title="LVE Torpedoes Chatbot", layout="vertical", description="Enter your query to chat with the LVET chatbot", examples=[ ["What are the practice times for each age group ?"], ["What are the required fields to set up a meet in Touchpad?"], ["Dryland workout for swimmers ?"], ["What are the eligibility criteria for the Mini Torpedoes program?"], ["What is the eligibility to participate in the LVET Swim Team?"], ["How many volunteer hours are required per family during the swim season?"], ["How can I receive credit hours for the official training?"], ["How are swimmers grouped for practice?"], ["When do evaluations take place for new swimmers?"], ["Who are LVET's Board Members"], ["What are the regular season meets start times?"], ["How can I contact LVET's Board Members?"], ["What is the penalty for not meeting the required volunteer hours?"], ["Volunteer Hours?"], ["What types of events can a swimmer enter and how many?"], ["How do I sign up for volunteer jobs to fulfill my volunteer hours?"], ["Volunteer jobs that do not require certification or prior experience"], ["What are the responsibilities of an Age Group Coordinator?"], ["How do I commit my swimmer for meets/events?"], ["How are timers distributed between the host and visiting teams in dual meets?"], ["What happens if a watch malfunctions during an event?"], ["What is the difference between the Divisional Meets and the All Star Meet?"], ["What is the ODSL Scholarship Program and what's the award ?"] ], theme="default" ) iface.launch()