asif4318 commited on
Commit
abb2ca9
1 Parent(s): 8028d81

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +58 -0
app.py ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ import openai
4
+ import gradio as gr
5
+ from langchain.chains import ConversationalRetrievalChain, RetrievalQA
6
+ from langchain.chat_models import ChatOpenAI
7
+ from langchain.document_loaders import DirectoryLoader, TextLoader
8
+ from langchain.embeddings import OpenAIEmbeddings
9
+ from langchain.indexes import VectorstoreIndexCreator
10
+ from langchain.indexes.vectorstore import VectorStoreIndexWrapper
11
+ from langchain.llms import OpenAI
12
+ from langchain.vectorstores import Chroma
13
+
14
+ import constants
15
+
16
+ os.environ["OPENAI_API_KEY"] = constants.APIKEY
17
+
18
+ # Enable to save to disk & reuse the model (for repeated queries on the same data)
19
+ PERSIST = False
20
+
21
+ query = None
22
+
23
+ if PERSIST and os.path.exists("persist"):
24
+ print("Reusing index...\n")
25
+ vectorstore = Chroma(persist_directory="persist",
26
+ embedding_function=OpenAIEmbeddings())
27
+ index = VectorStoreIndexWrapper(vectorstore=vectorstore)
28
+ else:
29
+ loader = DirectoryLoader("data/")
30
+ if PERSIST:
31
+ index = VectorstoreIndexCreator(
32
+ vectorstore_kwargs={"persist_directory": "persist"}).from_loaders([loader])
33
+ else:
34
+ index = VectorstoreIndexCreator().from_loaders([loader])
35
+
36
+ chain = ConversationalRetrievalChain.from_llm(
37
+ llm=ChatOpenAI(model="gpt-3.5-turbo"),
38
+ retriever=index.vectorstore.as_retriever(search_kwargs={"k": 1}),
39
+ )
40
+
41
+ chat_history = []
42
+
43
+
44
+ async def getQuery(query: str):
45
+ if not query:
46
+ query = input("Prompt: ")
47
+ if query in ['quit', 'q', 'exit']:
48
+ sys.exit()
49
+ result = chain({"question": query, "chat_history": chat_history})
50
+ print(result['answer'])
51
+
52
+ chat_history.append((query, result['answer']))
53
+ query = None
54
+ return result['answer']
55
+
56
+ demo = gr.Interface(fn=getQuery, inputs="text", outputs="text")
57
+
58
+ demo.launch()