enadewan's picture
new
937bf02
from langchain.memory import ConversationBufferMemory
from langchain.vectorstores.faiss import FAISS
import os
from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationalRetrievalChain
import pandas as pd
import numpy as np
from langchain.embeddings import HuggingFaceEmbeddings
from langchain import HuggingFaceHub
from typing import Any, Dict, List
embeddings = HuggingFaceEmbeddings()
HUGGINGFACEHUB_API_TOKEN=HUGGINGFACEHUB_API_TOKEN
new_vectorstore = FAISS.load_local("./faiss_docs_index", embeddings)
llm = HuggingFaceHub(repo_id="declare-lab/flan-alpaca-large", model_kwargs={"temperature": 0.1, "max_length": 512},huggingfacehub_api_token= "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")
# Front end web app
import gradio as gr
with gr.Blocks() as demo:
chatbot = gr.Chatbot()
msg = gr.Textbox("Ask Freddy")
clear = gr.Button("Clear")
chat_history = []
def user(user_message, history):
# Get response from QA chain
memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True, output_key='answer')
qa=ConversationalRetrievalChain.from_llm( llm=llm ,retriever=new_vectorstore.as_retriever(search_kwargs={"k":1, "include_metadata": True}),chain_type="refine",memory=memory,return_source_documents=True)
result = qa({"question": user_message,"chat_history": chat_history})
myresponse=result['answer']
# Append user message and response to chat history
chat_history.append((user_message, myresponse))
return gr.update(value=""), chat_history
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False)
clear.click(lambda: None, None, chatbot, queue=False)
if __name__ == "__main__":
demo.launch(debug=True,share=False)