mckplus commited on
Commit
0c14d50
1 Parent(s): 3133187

Update DocuChat.py

Browse files
Files changed (1) hide show
  1. DocuChat.py +11 -4
DocuChat.py CHANGED
@@ -14,14 +14,13 @@ pn.config.sizing_mode = 'stretch_width'
14
  # Panel extension
15
  pn.extension()
16
 
17
-
18
-
19
  class LangchainConversation:
20
  def __init__(self):
21
  self.file_input = pn.widgets.FileInput(height=45)
22
  self.openaikey = pn.widgets.PasswordInput(value="", placeholder="Enter your OpenAI API Key here...", height=45)
23
  self.chatbox = pn.widgets.ChatBox(height=300, primary_name="User")
24
  self.chatbox.param.watch(self._chat, 'value')
 
25
 
26
  def _chat(self, event):
27
  user_message = event.new[-1]
@@ -42,15 +41,23 @@ class LangchainConversation:
42
  return '\n'.join([line.strip() for line in lines if line.strip()])
43
 
44
  def qa(self, file, query):
 
 
 
 
45
  loader = PyPDFLoader(file)
46
  documents = loader.load()
47
- text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
48
  texts = text_splitter.split_documents(documents)
49
  embeddings = OpenAIEmbeddings()
50
  db = Chroma.from_documents(texts, embeddings)
51
  retriever = db.as_retriever(search_type="similarity", search_kwargs={"k": 3})
52
  qa = RetrievalQA.from_chain_type(llm=LangchainOpenAI(), chain_type="stuff", retriever=retriever, return_source_documents=True)
53
- result = qa({"query": query})
 
 
 
 
54
  return result['result']
55
 
56
  def view(self):
 
14
  # Panel extension
15
  pn.extension()
16
 
 
 
17
  class LangchainConversation:
18
  def __init__(self):
19
  self.file_input = pn.widgets.FileInput(height=45)
20
  self.openaikey = pn.widgets.PasswordInput(value="", placeholder="Enter your OpenAI API Key here...", height=45)
21
  self.chatbox = pn.widgets.ChatBox(height=300, primary_name="User")
22
  self.chatbox.param.watch(self._chat, 'value')
23
+ self.chat_history = [] # Chat history to store previous queries and responses
24
 
25
  def _chat(self, event):
26
  user_message = event.new[-1]
 
41
  return '\n'.join([line.strip() for line in lines if line.strip()])
42
 
43
  def qa(self, file, query):
44
+ # Consider chat history when processing new queries
45
+ chat_history_str = "\n".join([f"User: {q}\nAI: {a}" for q, a in self.chat_history])
46
+
47
+ # Load, split, and analyze the entire document
48
  loader = PyPDFLoader(file)
49
  documents = loader.load()
50
+ text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0, context_aware=True) # Context-aware splitting
51
  texts = text_splitter.split_documents(documents)
52
  embeddings = OpenAIEmbeddings()
53
  db = Chroma.from_documents(texts, embeddings)
54
  retriever = db.as_retriever(search_type="similarity", search_kwargs={"k": 3})
55
  qa = RetrievalQA.from_chain_type(llm=LangchainOpenAI(), chain_type="stuff", retriever=retriever, return_source_documents=True)
56
+ result = qa({"query": query + "\n" + chat_history_str})
57
+
58
+ # Update chat history
59
+ self.chat_history.append((query, result['result']))
60
+
61
  return result['result']
62
 
63
  def view(self):