bmw-chat / app.py
enoreyes's picture
Update app.py
a89ac38
raw
history blame
1.96 kB
import gradio as gr
import os
from langchain import OpenAI, ConversationChain
from langchain.prompts import PromptTemplate
from langchain.text_splitter import CharacterTextSplitter
from langchain.vectorstores import Chroma
from langchain.docstore.document import Document
from langchain.embeddings import HuggingFaceInstructEmbeddings
from langchain.chains.conversation.memory import ConversationBufferMemory
from langchain.chains.conversation.memory import ConversationEntityMemory
from langchain.chains.conversation.prompt import ENTITY_MEMORY_CONVERSATION_TEMPLATE
from langchain import LLMChain
memory = ConversationBufferMemory(memory_key="chat_history")
persist_directory="db"
llm=OpenAI(model_name = "text-davinci-003", temperature=0)
vectordb = Chroma(persist_directory=persist_directory, embedding_function=embedding)
model_name = "hkunlp/instructor-large"
embed_instruction = "Represent the text from the BMW website for retrieval"
query_instruction = "Query the most relevant text from the BMW website"
embeddings = HuggingFaceInstructEmbeddings(model_name=model_name, embed_instruction=embed_instruction, query_instruction=query_instruction)
chain = RetrievalQAWithSourcesChain.from_chain_type(llm, chain_type="stuff", retriever=db.as_retriever(), memory=memory)
def chat(message, site,history):
history = history or []
response = ""
try:
response = chain.run(input=message)
history.append((message, response))
return history, history
with gr.Blocks() as demo:
gr.Markdown("<h3><center>BMW Chat Bot</center></h3>")
gr.Markdown("<p><center>Ask questions about BMW</center></p>")
chatbot = gr.Chatbot()
with gr.Row():
inp = gr.Textbox(placeholder="Question",label =None)
btn = gr.Button("Run").style(full_width=False)
state = gr.State()
agent_state = gr.State()
btn.click(chat, [inp,site,state],[chatbot, state])
if __name__ == '__main__':
demo.launch()