aheman20 commited on
Commit
d942eed
1 Parent(s): 7b470bc

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +74 -0
app.py ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import pinecone
3
+ import openai
4
+ import os
5
+ from langchain.embeddings.openai import OpenAIEmbeddings
6
+ from langchain.chains import ConversationalRetrievalChain
7
+ from langchain.chat_models import ChatOpenAI
8
+ from langchain.memory import ConversationBufferMemory
9
+ from langchain.vectorstores import Pinecone
10
+
11
+
12
+ BOOK_TOKEN = os.getenv("book")
13
+ pine = os.getenv("pine")
14
+ HF_TOKEN = os.getenv("HF_TOKEN")
15
+
16
+ os.environ["OPENAI_API_KEY"] = BOOK_TOKEN
17
+
18
+ OPENAI_API_KEY = ""
19
+ PINECONE_API_KEY = ""
20
+ PINECONE_API_ENV = "us-east-1-aws"
21
+
22
+ #embedding = OpenAIEmbeddings(openai_api_key=OPENAI_API_KEYs)
23
+ embed_model = "text-embedding-ada-002"
24
+
25
+ pinecone.init(
26
+ api_key=pine,
27
+ environment=PINECONE_API_ENV
28
+ )
29
+ openai.api_key=BOOK_TOKEN
30
+ index_n = "ibc-12"
31
+ index = pinecone.Index(index_n)
32
+ index.describe_index_stats()
33
+
34
+ limit = 3750
35
+
36
+ llm = ChatOpenAI(temperature=0, model_name="gpt-4" )
37
+
38
+ embeddings = OpenAIEmbeddings(
39
+ model="text-embedding-ada-002"
40
+ )
41
+
42
+ #get the db index
43
+ db = Pinecone.from_existing_index(index_name=index_n, embedding=embeddings)
44
+
45
+
46
+
47
+ with gr.Blocks() as demo:
48
+ chatbot = gr.Chatbot(label="Talk to the Book")
49
+ msg = gr.Textbox()
50
+ clear = gr.Button("Clear")
51
+ chat_history = []
52
+
53
+ def user(user_message, chat_history):
54
+
55
+ memory = ConversationBufferMemory(
56
+ memory_key='chat_history',
57
+ return_messages=False
58
+ )
59
+
60
+ #Initalize lanchain - Conversation Retrieval Chain
61
+ qa = ConversationalRetrievalChain.from_llm(ChatOpenAI(temperature=0), retriever=db.as_retriever(), memory=memory)
62
+
63
+
64
+
65
+ #get response from QA Chain
66
+ response = qa({'question': user_message, "chat_history": chat_history})
67
+ #append user message and respone to chat history
68
+ chat_history.append((user_message, response["answer"]))
69
+ return gr.update(value=""), chat_history
70
+ msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False)
71
+ clear.click(lambda: None, None, chatbot, queue=False)
72
+
73
+ if __name__ == "__main__":
74
+ demo.launch(debug=True)