Asif Islam
Combined codes
1798170
from langchain.prompts import (
ChatPromptTemplate,
MessagesPlaceholder,
SystemMessagePromptTemplate,
HumanMessagePromptTemplate,
)
from langchain.chains import LLMChain, ConversationalRetrievalChain
from langchain.chat_models import ChatOpenAI
from langchain.memory import ConversationBufferMemory
from langchain.document_loaders import DirectoryLoader
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.embeddings import OpenAIEmbeddings
from langchain.vectorstores import Chroma
import gradio as gr
# LLM
llm = ChatOpenAI()
# load data
loader = DirectoryLoader('data')
data = loader.load()
# Split and store into vector
text_splitter = RecursiveCharacterTextSplitter(
chunk_size=500, chunk_overlap=100)
all_splits = text_splitter.split_documents(data)
# persist data into SQLite DB
vectorstore = Chroma.from_documents(
documents=all_splits, embedding=OpenAIEmbeddings(), persist_directory='persist')
# Prompt
prompt = ChatPromptTemplate(
messages=[
SystemMessagePromptTemplate.from_template(
"You are a nice chatbot having a conversation with a human about UF student government and its codes which are included as context. The codes include information about rules and the structure and responsibilities of student government. Please reference context first when trying to respond to queries and try to cite sources. If that is not possible, instead try to make your best inference using the context and come to a decision. do not say that is not mentioned in the context - attempt an answer based on the context"
),
# The `variable_name` here is what must align with memory
MessagesPlaceholder(variable_name="chat_history"),
HumanMessagePromptTemplate.from_template("{question}"),
]
)
# Notice that we `return_messages=True` to fit into the MessagesPlaceholder
# Notice that `"chat_history"` aligns with the MessagesPlaceholder name
retriever = vectorstore.as_retriever()
memory = ConversationBufferMemory(
memory_key="chat_history", return_messages=True)
qa = ConversationalRetrievalChain.from_llm(
llm, retriever=retriever, memory=memory)
def queryAI(query: str):
return qa({"question": query})["answer"]
website_description = "This is an experimental bot to help people navigate and make queries about UF Student Government. This is intended to be a helpful resource but its accuracy has not been verfied yet!"
gui = gr.Interface(fn=queryAI, inputs="text", outputs="text", title="UF SG R&P, Codes, and Constitution Chat Bot - Built by AI (Asif Islam)",
description=website_description, flagging_options=["Flag Correct/Helpful", "Flag Incorrect"])
gui.launch(share=True)