|
import gradio as gr |
|
import openai |
|
import os |
|
|
|
os.environ["PINECONE_ENV"] = "asia-southeast1-gcp-free" |
|
|
|
|
|
|
|
from langchain.embeddings.openai import OpenAIEmbeddings |
|
from langchain.text_splitter import CharacterTextSplitter |
|
from langchain.vectorstores import Pinecone |
|
from langchain.document_loaders.csv_loader import CSVLoader |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
embeddings = OpenAIEmbeddings(openai_api_key = os.environ["OPENAI_API_KEY"]) |
|
|
|
import pinecone |
|
|
|
|
|
pinecone.init( |
|
api_key=os.getenv("PINECONE_API_KEY"), |
|
environment=os.getenv("PINECONE_ENV"), |
|
) |
|
|
|
index_name = "chatbot" |
|
|
|
vectordb = Pinecone.from_existing_index(index_name, embeddings) |
|
|
|
from langchain.memory import ConversationBufferMemory |
|
from langchain.chains import ConversationalRetrievalChain |
|
from langchain.chat_models import ChatOpenAI |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import gradio as gr |
|
import openai |
|
import os |
|
|
|
openai.api_key = os.getenv('OPENAI_API_KEY') |
|
|
|
|
|
class Conversation: |
|
def __init__(self, num_of_round): |
|
self.num_of_round = num_of_round |
|
self.messages = [] |
|
|
|
def ask(self, question): |
|
try: |
|
self.messages.append({"role": "user", "content": question}) |
|
retriever = vectordb.as_retriever() |
|
llm = ChatOpenAI(model_name='gpt-3.5-turbo', temperature=0, openai_api_key = os.environ["OPENAI_API_KEY"]) |
|
memory = ConversationBufferMemory(memory_key="chat_history", return_messages= True) |
|
chain = ConversationalRetrievalChain.from_llm(llm, retriever= retriever, memory= memory) |
|
response = chain.run({'question': question}) |
|
except Exception as e: |
|
print(e) |
|
return e |
|
|
|
message = response |
|
|
|
|
|
self.messages.append({"role": "assistant", "content": message}) |
|
|
|
if len(self.messages) > self.num_of_round*2 + 1: |
|
del self.messages[1:3] |
|
return message |
|
|
|
|
|
|
|
conv = Conversation(10) |
|
|
|
|
|
def answer(question, history=[]): |
|
history.append(question) |
|
response = conv.ask(question) |
|
history.append(response) |
|
responses = [(u, b) for u, b in zip(history[::2], history[1::2])] |
|
return responses, history |
|
|
|
|
|
with gr.Blocks(css="#chatbot{height:300px} .overflow-y-auto{height:500px}") as demo: |
|
chatbot = gr.Chatbot(elem_id="chatbot") |
|
state = gr.State([]) |
|
|
|
with gr.Row(): |
|
txt = gr.Textbox(show_label=False, placeholder="Enter question and press enter") |
|
|
|
txt.submit(answer, [txt, state], [chatbot, state]) |
|
|
|
demo.launch() |