SabariKameswaran commited on
Commit
4b02ce6
1 Parent(s): 731fe8e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +62 -62
app.py CHANGED
@@ -1,62 +1,62 @@
1
- from flask import Flask, jsonify, request, send_file
2
- from gtts import gTTS
3
- from langchain.chains import ConversationalRetrievalChain
4
- from langchain.chat_models import ChatOpenAI
5
- from langchain.document_loaders import TextLoader
6
- from langchain.embeddings import OpenAIEmbeddings
7
- from langchain.indexes import VectorstoreIndexCreator
8
- from langchain.indexes.vectorstore import VectorStoreIndexWrapper
9
- from langchain.vectorstores import Chroma
10
- from langchain.memory import ConversationBufferMemory
11
- import os
12
-
13
- app = Flask(__name__)
14
- os.environ["OPENAI_API_KEY"] = "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"
15
- PERSIST = True
16
-
17
- query = None
18
-
19
- def main_func(message, history):
20
- global query
21
- chat_history = history
22
- if PERSIST and os.path.exists("persist"):
23
- print("Reusing index...\n")
24
- vectorstore = Chroma(persist_directory="persist", embedding_function=OpenAIEmbeddings())
25
- index = VectorStoreIndexWrapper(vectorstore=vectorstore)
26
- else:
27
- loader = TextLoader("new.txt")
28
- if PERSIST:
29
- index = VectorstoreIndexCreator(vectorstore_kwargs={"persist_directory": "persist"}).from_loaders([loader])
30
- else:
31
- index = VectorstoreIndexCreator().from_loaders([loader])
32
-
33
- print(index)
34
-
35
- memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
36
- chain = ConversationalRetrievalChain.from_llm(llm=ChatOpenAI(), retriever=index.vectorstore.as_retriever(), memory=memory, verbose=True)
37
-
38
- query = message
39
- result = chain({"question": query, "chat_history": chat_history})
40
- print(result['answer'])
41
- chat_history.append((query, result['answer']))
42
- return result['answer']
43
-
44
- @app.route('/generate-text/<input_text>', methods=['POST'])
45
- def generate_text(input_text):
46
- global query
47
- generated_text = main_func(input_text, [])
48
-
49
- tts = gTTS(text=generated_text, lang='en')
50
- tts.save("output.mp3")
51
-
52
- return jsonify({
53
- 'generated_text': generated_text,
54
- 'audio_url': request.host_url + 'audio'
55
- })
56
-
57
- @app.route('/audio')
58
- def get_audio():
59
- return send_file("output.mp3", as_attachment=True)
60
-
61
- if __name__ == "__main__":
62
- app.run(debug=True)
 
1
+ from flask import Flask, jsonify, request, send_file
2
+ from gtts import gTTS
3
+ from langchain.chains import ConversationalRetrievalChain
4
+ from langchain.chat_models import ChatOpenAI
5
+ from langchain.document_loaders import TextLoader
6
+ from langchain.embeddings import OpenAIEmbeddings
7
+ from langchain.indexes import VectorstoreIndexCreator
8
+ from langchain.indexes.vectorstore import VectorStoreIndexWrapper
9
+ from langchain.vectorstores import Chroma
10
+ from langchain.memory import ConversationBufferMemory
11
+ import os
12
+
13
+ app = Flask(__name__)
14
+ os.environ["OPENAI_API_KEY"] = "YOUR_API_KEY"
15
+ PERSIST = True
16
+
17
+ query = None
18
+
19
+ def main_func(message, history):
20
+ global query
21
+ chat_history = history
22
+ if PERSIST and os.path.exists("persist"):
23
+ print("Reusing index...\n")
24
+ vectorstore = Chroma(persist_directory="persist", embedding_function=OpenAIEmbeddings())
25
+ index = VectorStoreIndexWrapper(vectorstore=vectorstore)
26
+ else:
27
+ loader = TextLoader("new.txt")
28
+ if PERSIST:
29
+ index = VectorstoreIndexCreator(vectorstore_kwargs={"persist_directory": "persist"}).from_loaders([loader])
30
+ else:
31
+ index = VectorstoreIndexCreator().from_loaders([loader])
32
+
33
+ print(index)
34
+
35
+ memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
36
+ chain = ConversationalRetrievalChain.from_llm(llm=ChatOpenAI(), retriever=index.vectorstore.as_retriever(), memory=memory, verbose=True)
37
+
38
+ query = message
39
+ result = chain({"question": query, "chat_history": chat_history})
40
+ print(result['answer'])
41
+ chat_history.append((query, result['answer']))
42
+ return result['answer']
43
+
44
+ @app.route('/generate-text/<input_text>', methods=['POST'])
45
+ def generate_text(input_text):
46
+ global query
47
+ generated_text = main_func(input_text, [])
48
+
49
+ tts = gTTS(text=generated_text, lang='en')
50
+ tts.save("output.mp3")
51
+
52
+ return jsonify({
53
+ 'generated_text': generated_text,
54
+ 'audio_url': request.host_url + 'audio'
55
+ })
56
+
57
+ @app.route('/audio')
58
+ def get_audio():
59
+ return send_file("output.mp3", as_attachment=True)
60
+
61
+ if __name__ == "__main__":
62
+ app.run(debug=True)