momegas commited on
Commit
cf396d5
1 Parent(s): 785715b

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +78 -0
app.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain.document_loaders import DirectoryLoader
2
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
3
+ from langchain.embeddings.openai import OpenAIEmbeddings
4
+ from langchain.vectorstores import Chroma
5
+ from langchain.chat_models import ChatOpenAI
6
+ from langchain.retrievers.multi_query import MultiQueryRetriever
7
+ import dotenv
8
+ from langchain.indexes import VectorstoreIndexCreator
9
+ from langchain.chains.question_answering import load_qa_chain
10
+ from langchain.llms import OpenAI
11
+ from langchain.prompts import PromptTemplate
12
+ from langchain.chat_models import ChatOpenAI
13
+ from langchain.schema import AIMessage, HumanMessage, SystemMessage
14
+ import gradio as gr
15
+
16
+ dotenv.load_dotenv()
17
+
18
+
19
+ system_message = """You are the helpful assistant for accountants.
20
+ You answers should be in Greek.
21
+ If you don't know the answer, just say that you don't know, don't try to make up an answer.".
22
+ """
23
+
24
+ prompt_template = """Use the following pieces of context to answer the question at the end.
25
+ If you don't know the answer, just say that you don't know, don't try to make up an answer.
26
+ Only answer questions that are related to the context. If it's not in the context say "Δεν γνωρίζω".
27
+
28
+ Context:
29
+ {context}
30
+
31
+ Question: {question}
32
+ Answer in Greek:
33
+ """
34
+ PROMPT = PromptTemplate(
35
+ template=prompt_template, input_variables=["context", "question"]
36
+ )
37
+
38
+ loader = DirectoryLoader("./documents", glob="**/*.txt", show_progress=True)
39
+ docs = loader.load()
40
+ text_splitter = RecursiveCharacterTextSplitter(chunk_size=1500, chunk_overlap=400)
41
+ texts = text_splitter.split_documents(docs)
42
+
43
+ embeddings = OpenAIEmbeddings()
44
+ docsearch = Chroma.from_documents(texts, embeddings).as_retriever()
45
+ chat = ChatOpenAI(temperature=0.1)
46
+
47
+
48
+ with gr.Blocks() as demo:
49
+ chatbot = gr.Chatbot()
50
+ msg = gr.Textbox()
51
+ clear = gr.ClearButton([msg, chatbot])
52
+
53
+ def respond(message, chat_history):
54
+ messages = [
55
+ SystemMessage(content=system_message),
56
+ ]
57
+
58
+ result_docs = docsearch.get_relevant_documents(message)
59
+
60
+ for doc in result_docs[:3]:
61
+ print("Result: ", doc, "\n\n")
62
+
63
+ human_message = None
64
+ human_message = HumanMessage(
65
+ content=PROMPT.format(context=result_docs[:3], question=message)
66
+ )
67
+ messages.append(human_message)
68
+
69
+ result = chat(messages)
70
+ bot_message = result.content
71
+ chat_history.append((message, bot_message))
72
+ return "", chat_history
73
+
74
+ msg.submit(respond, [msg, chatbot], [msg, chatbot])
75
+
76
+
77
+ if __name__ == "__main__":
78
+ demo.launch()