virtualoscar / app.py
oschan77's picture
Update app.py
6012e14
import os
import random
import time
import gradio as gr
from langchain import LLMChain, PromptTemplate
from langchain.agents import initialize_agent
from langchain.chains import RetrievalQA
from langchain.document_loaders import TextLoader
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.vectorstores import Chroma
from langchain.chat_models import ChatOpenAI
# Defining a QA bot class
class QAbot():
def __init__(self):
self.setup_openai_api_key()
self.load_documents()
self.setup_embeddings()
self.setup_store()
self.setup_prompt()
self.setup_llm()
self.setup_qa()
def setup_openai_api_key(self):
os.environ['OPENAI_API_KEY'] = os.environ['API_KEY']
# Load documents and split them into chunks using RecursiveCharacterTextSplitter
def load_documents(self):
loader = TextLoader('profile.txt')
documents = loader.load()
text_splitter = RecursiveCharacterTextSplitter(chunk_size=2000, chunk_overlap=200, length_function=len)
self.docs = text_splitter.split_documents(documents)
# Setup embeddings using OpenAIEmbeddings
def setup_embeddings(self):
self.embeddings = OpenAIEmbeddings()
# Setup vectorstore. If a directory already exists, load from it. Else, create a new one.
def setup_store(self):
persist_path = 'vector_db'
if os.path.isdir(persist_path):
self.store = Chroma.from_documents(self.docs, self.embeddings, collection_name='profile', persist_directory=persist_path)
else:
self.store = Chroma(persist_directory=persist_path, embedding_function=self.embeddings)
# Setup chat prompt template for the QAbot
def setup_prompt(self):
self.prompt = ChatPromptTemplate(
input_variables=['context', 'question'],
output_parser=None,
partial_variables={},
messages=[
SystemMessagePromptTemplate(
prompt=PromptTemplate(
input_variables=['context'],
output_parser=None,
partial_variables={},
template="Act as Oscar Chan, who is a machine learning engineer with 2 years of experience implementing machine learning solutions. \nYou, as Oscar Chan, are now talking to your interviewers in a machine learning engineer job interview. \nNever call yourself or or answer questions as an AI language model. \nUse the following pieces of context to answer the users question. \nIf you don't know the answer, just say that you don't know, don't try to make up an answer.\n----------------\n{context}",
template_format='f-string',
validate_template=True),
additional_kwargs={}
),
HumanMessagePromptTemplate(
prompt=PromptTemplate(
input_variables=['question'],
output_parser=None,
partial_variables={},
template='{question}',
template_format='f-string',
validate_template=True
),
additional_kwargs={}
)
]
)
def setup_llm(self):
self.llm = ChatOpenAI(temperature=0.3, model="gpt-3.5-turbo", verbose=False)
# Setup the QA system using RetrievalQA, language model and prompt template
def setup_qa(self):
doc_retriever = self.store.as_retriever()
self.qa = RetrievalQA.from_chain_type(
llm=self.llm,
chain_type="stuff",
retriever=doc_retriever,
verbose=False,
chain_type_kwargs={"prompt": self.prompt},
)
# Answer a user query
def answer(self, query):
response = self.qa.run(query)
return response
def main():
with gr.Blocks() as app:
with gr.Row():
gr.Markdown("""
# \U0001F435 Virtual Oscar \U0001F4AC
Virtual Oscar, a conversational chatbot engineered with OpenAI's GPT-3.5 Turbo technology, is designed to act as a virtual version of Oscar Chan in job interviews and answer job interview questions online for him. Give it a try!
""")
qa_bot = QAbot()
chatbot = gr.Chatbot(label="Ask me anything about Oscar!")
msg = gr.Textbox(placeholder="Enter text and press ENTER", label="Your question about Oscar:")
clear = gr.Button('Clear')
def user(user_message, history):
return '', history + [[user_message, None]]
def bot(history):
bot_message = qa_bot.answer(history[-1][0])
history[-1][1] = ''
for character in bot_message:
history[-1][1] += character
time.sleep(0.03)
yield history
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
bot, chatbot, chatbot
)
clear.click(lambda: None, None, chatbot, queue=False)
app.queue()
app.launch()
if __name__ == "__main__":
main()