rubensmau commited on
Commit
33e91c3
1 Parent(s): 2743de0
chat_dov.py CHANGED
@@ -86,7 +86,6 @@ def create_chatbot(corpus, character_name, chatbot_type, retrieval_docs, summary
86
  ## python -m streamlit run chat_dov.py -- --corpus data/tzamir.txt --character_name Dov --chatbot_type retrieval --retrieval_docs raw --interface streamlit
87
 
88
  def main():
89
- os.environ["LANGCHAIN_HANDLER"] = "langchain"
90
 
91
  # parametros fixos para Dov Tzamir, arquivos ja processados , exceto indice que são em memoria
92
  st.title("Converse com o avatar do Dov Tzamir")
@@ -101,15 +100,10 @@ def main():
101
  "map_reduce", #args.summary_type,
102
  )
103
 
104
- openai_api_key = st.text_input(
105
- label="Your OpenAI API KEY",
106
- placeholder="Your OpenAI API KEY",
107
- type="password",
108
- )
109
  st.write(" ")
110
  st.write("Digite o seu diálogo aqui finalizando a linha com ENTER")
111
  st.write("Voce pode continuar o diálogo, apagando sua perguntanda anterior e digitando aqui novamente")
112
- #openai_api_key = os.environ["OPENAI_API_KEY"]
113
 
114
 
115
  app = Streamlit(chatbot=chatbot)
 
86
  ## python -m streamlit run chat_dov.py -- --corpus data/tzamir.txt --character_name Dov --chatbot_type retrieval --retrieval_docs raw --interface streamlit
87
 
88
  def main():
 
89
 
90
  # parametros fixos para Dov Tzamir, arquivos ja processados , exceto indice que são em memoria
91
  st.title("Converse com o avatar do Dov Tzamir")
 
100
  "map_reduce", #args.summary_type,
101
  )
102
 
 
 
 
 
 
103
  st.write(" ")
104
  st.write("Digite o seu diálogo aqui finalizando a linha com ENTER")
105
  st.write("Voce pode continuar o diálogo, apagando sua perguntanda anterior e digitando aqui novamente")
106
+ openai_api_key = os.environ["OPENAI_API_KEY"]
107
 
108
 
109
  app = Streamlit(chatbot=chatbot)
data_driven_characters/chatbots/retrieval.py CHANGED
@@ -16,6 +16,8 @@ from data_driven_characters.memory import ConversationVectorStoreRetrieverMemory
16
 
17
  from langchain.embeddings.openai import OpenAIEmbeddings
18
  from langchain.vectorstores import FAISS
 
 
19
 
20
 
21
  class RetrievalChatBot:
@@ -34,36 +36,40 @@ class RetrievalChatBot:
34
  conv_memory = ConversationBufferMemory(
35
  memory_key=self.chat_history_key, input_key=self.input_key
36
  )
37
- embeddings = OpenAIEmbeddings()
38
- saved_db = FAISS.load_local('tzamir.ifass', embeddings)
39
-
40
- """ DENTRO DO COMANDO SEGUINTE
41
  retriever=FAISS(
42
  OpenAIEmbeddings().embed_query,
43
  faiss.IndexFlatL2(1536), # Dimensions of the OpenAIEmbeddings
44
  InMemoryDocstore({}),
45
  {},
46
  ).as_retriever(search_kwargs=dict(k=self.num_context_memories)),
47
- """
48
-
49
-
50
- context_memory = ConversationVectorStoreRetrieverMemory(
51
-
52
-
53
- retriever=saved_db.as_retriever(search_kwargs=dict(k=self.num_context_memories)),
54
  memory_key=self.context_key,
55
  output_prefix=character_definition.name,
56
  blacklist=[self.chat_history_key],
57
  )
58
 
59
-
60
-
61
- # add the documents to the context memory
62
- for i, summary in tqdm(enumerate(self.documents)):
63
- context_memory.save_context(inputs={}, outputs={f"[{i}]": summary})
64
-
 
 
 
 
 
 
 
65
  # Combined
66
  memory = CombinedMemory(memories=[conv_memory, context_memory])
 
 
 
 
67
  prompt = PromptTemplate.from_template(
68
  f"""Your name is {character_definition.name}.
69
 
 
16
 
17
  from langchain.embeddings.openai import OpenAIEmbeddings
18
  from langchain.vectorstores import FAISS
19
+ import pickle
20
+ import os.path
21
 
22
 
23
  class RetrievalChatBot:
 
36
  conv_memory = ConversationBufferMemory(
37
  memory_key=self.chat_history_key, input_key=self.input_key
38
  )
39
+ #embeddings = OpenAIEmbeddings()
40
+ #saved_db = FAISS.load_local('tzamir.ifass', embeddings)
41
+ context_memory = ConversationVectorStoreRetrieverMemory(
 
42
  retriever=FAISS(
43
  OpenAIEmbeddings().embed_query,
44
  faiss.IndexFlatL2(1536), # Dimensions of the OpenAIEmbeddings
45
  InMemoryDocstore({}),
46
  {},
47
  ).as_retriever(search_kwargs=dict(k=self.num_context_memories)),
48
+ #retriever=saved_db.as_retriever(search_kwargs=dict(k=self.num_context_memories)),
 
 
 
 
 
 
49
  memory_key=self.context_key,
50
  output_prefix=character_definition.name,
51
  blacklist=[self.chat_history_key],
52
  )
53
 
54
+ # add the documents to the context memory if not saved on disk
55
+ memory_path = 'output/tzamir/memory.pkl'
56
+ if not os.path.exists(memory_path):
57
+ print("gerando os indices")
58
+ for i, summary in tqdm(enumerate(self.documents)):
59
+ context_memory.save_context(inputs={}, outputs={f"[{i}]": summary})
60
+ # salvando no disco
61
+ memory_pickle = open('output/tzamir/memory.pkl', 'wb')
62
+ pickle.dump(context_memory, memory_pickle)
63
+ else:
64
+ print("carregando memoria do disco")
65
+ memory_pickle = open('output/tzamir/memory.pkl', 'rb')
66
+ context_memory = pickle.load(memory_pickle)
67
  # Combined
68
  memory = CombinedMemory(memories=[conv_memory, context_memory])
69
+ #print("$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$")
70
+ #print(memory)
71
+ #print("$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$")
72
+
73
  prompt = PromptTemplate.from_template(
74
  f"""Your name is {character_definition.name}.
75
 
output/tzamir/memory.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e35b2209f9a6ecc414572f31ea5f83a450ae259f524c53681ff7b12b1b9a80a
3
+ size 719469