greenio commited on
Commit
f60789a
1 Parent(s): bad66f8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -43
app.py CHANGED
@@ -1,47 +1,38 @@
 
 
1
  import gradio as gr
 
2
  import os
3
- import openai
4
- from langchain.chains import ConversationalRetrievalChain
5
- from langchain.chat_models import ChatOpenAI
6
- from langchain.document_loaders import DirectoryLoader
7
- from langchain.indexes.vectorstore import VectorStoreIndexWrapper
8
- from langchain.indexes import VectorstoreIndexCreator
9
- from langchain.llms import OpenAI
10
- from langchain.embeddings import OpenAIEmbeddings
11
- from langchain.vectorstores.chroma import Chroma
12
-
13
- os.environ["OPENAI_API_KEY"] = "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"
14
-
15
- PERSIST = False
16
-
17
- if PERSIST and os.path.exists("persist"):
18
- print("Reusing index...\n")
19
- vectorstore = Chroma(persist_directory="persist", embedding_function=OpenAIEmbeddings())
20
- index = VectorStoreIndexWrapper(vectorstore=vectorstore)
21
- index.load("persist")
22
- else:
23
- loader = DirectoryLoader("data/")
24
- if PERSIST:
25
- index = VectorstoreIndexCreator(vectorstore_kwargs={"persist_directory":"persist"}).from_loaders([loader])
26
- else:
27
- index = VectorstoreIndexCreator().from_loaders([loader])
28
-
29
- chain = ConversationalRetrievalChain.from_llm(llm=ChatOpenAI(model="gpt-3.5-turbo"), retriever=index.vectorstore.as_retriever(search_kwargs={"k": 1}))
30
-
31
- chat_history = []
32
-
33
- def answer_question(question):
34
- global chat_history
35
- result = chain({"question": question, "chat_history": chat_history})
36
- chat_history.append((question, result['answer']))
37
- return result['answer']
38
-
39
- iface = gr.Interface(
40
- fn=answer_question,
41
- inputs="text",
42
- outputs="text",
43
- title="SPARCBot",
44
- description="Ask a question to get answers about your sparc data"
45
- )
46
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
  iface.launch(share=True)
 
1
+ from gpt_index import SimpleDirectoryReader, GPTListIndex, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
2
+ from langchain.chat_models import ChatOpenAI
3
  import gradio as gr
4
+ import sys
5
  import os
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
 
7
+ os.environ["OPENAI_API_KEY"] = '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'
8
+
9
+ def construct_index(directory_path):
10
+ max_input_size = 4096
11
+ num_outputs = 512
12
+ max_chunk_overlap = 20
13
+ chunk_size_limit = 600
14
+
15
+ prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
16
+
17
+ llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0.7, model_name="gpt-3.5-turbo", max_tokens=num_outputs))
18
+
19
+ documents = SimpleDirectoryReader(directory_path).load_data()
20
+
21
+ index = GPTSimpleVectorIndex(documents, llm_predictor=llm_predictor, prompt_helper=prompt_helper)
22
+
23
+ index.save_to_disk('index.json')
24
+
25
+ return index
26
+
27
+ def chatbot(input_text):
28
+ index = GPTSimpleVectorIndex.load_from_disk('index.json')
29
+ response = index.query(input_text, response_mode="compact")
30
+ return response.response
31
+
32
+ iface = gr.Interface(fn=chatbot,
33
+ inputs=gr.components.Textbox(lines=7, label="Enter your text"),
34
+ outputs="text",
35
+ title="Custom-trained AI Chatbot")
36
+
37
+ index = construct_index("data")
38
  iface.launch(share=True)