Mehrdad Esmaeili commited on
Commit
1b2762e
1 Parent(s): 3a1e5b8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -60
app.py CHANGED
@@ -23,9 +23,10 @@ from langchain.prompts import (
23
  MessagesPlaceholder,
24
  SystemMessagePromptTemplate,
25
  )
 
26
  documents=[]
27
  path='./bios/'
28
- # path='./augBios/'
29
  for file in os.listdir(path):
30
  loader = TextLoader(f'{path}{file}',encoding='unicode_escape')
31
  # loader.load()[0].metadata['category']='biography'
@@ -37,7 +38,6 @@ print(len(documents))
37
  text_splitter = CharacterTextSplitter(chunk_size=500, chunk_overlap=0)
38
  texts = text_splitter.split_documents(documents)
39
 
40
- # embeddings = OpenAIEmbeddings()
41
  embeddings = CohereEmbeddings(model='embed-english-v3.0')
42
  docsearch = Chroma.from_documents(texts, embeddings)
43
 
@@ -45,66 +45,25 @@ qa = RetrievalQA.from_chain_type(llm=Cohere(model='command'), chain_type="stuff"
45
  retriever=docsearch.as_retriever(search_kwargs={'k':1}),return_source_documents=True)
46
 
47
  def predict(message, history):
48
- # history_langchain_format = []
49
- # for human, ai in history:
50
- # history_langchain_format.append(HumanMessage(content=human))
51
- # history_langchain_format.append(AIMessage(content=ai))
52
- # history_langchain_format.append(HumanMessage(content=message))
53
- # gpt_response = llm(history_langchain_format)
54
- # return gpt_response.content
55
 
56
- message=message+'? just the book title-Author'
57
- result = qa({"query": message})
58
- # r1=docsearch.similarity_search_with_score(query=q,k=3)
59
- # print([(item[-2].metadata,item[-1]) for item in r1],\
60
- # '\n\n',result['result'],f'|| {result["source_documents"][0].metadata}','\n*****\n')
61
- if result['result'] not in ["I don't know","I don't know."]:
62
- return result['result']+f'\n---\nAmazon Kindle ebook description is:\n {result["source_documents"][0].page_content}'+\
63
- f'\nfrom this file: {result["source_documents"][0].metadata}'
64
- else:
65
- return result['result']
66
- '''This is code used for with memory chat'''
67
- # text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=0)
68
- # texts = text_splitter.split_documents(documents)
69
-
70
- # # embeddings = OpenAIEmbeddings()
71
- # embeddings = CohereEmbeddings(model='embed-english-v3.0')
72
- # docsearch = Chroma.from_documents(texts, embeddings)
73
- # memory=ConversationSummaryMemory(
74
- # llm=Cohere(model='command'), memory_key="chat_history", return_messages=True
75
- # )
76
- # qa = ConversationalRetrievalChain.from_llm(llm=Cohere(model='command'), \
77
- # retriever=docsearch.as_retriever(),return_source_documents=True,memory=memory)
78
 
79
- # def predict(message, history):
80
- # # history_langchain_format = []
81
- # # for human, ai in history:
82
- # # history_langchain_format.append(HumanMessage(content=human))
83
- # # history_langchain_format.append(AIMessage(content=ai))
84
- # # history_langchain_format.append(HumanMessage(content=message))
85
- # # gpt_response = llm(history_langchain_format)
86
- # # return gpt_response.content
87
-
88
- # # message=message+'? just the book title-Autho'
89
- # #---------
90
- # # for human, ai in history:
91
- # # history_langchain_format.append(HumanMessage(content=human))
92
- # # history_langchain_format.append(AIMessage(content=ai))
93
- # # history_langchain_format.append(HumanMessage(content=message))
94
- # # gpt_response = llm(history_langchain_format)
95
- # # return gpt_response.content
96
- # #------------
97
-
98
- # result = qa(message)
99
- # # r1=docsearch.similarity_search_with_score(query=q,k=3)
100
- # # print([(item[-2].metadata,item[-1]) for item in r1],\
101
- # # '\n\n',result['result'],f'|| {result["source_documents"][0].metadata}','\n*****\n')
102
- # if result['result'] not in ["I don't know","I don't know."]:
103
- # return result['result']+f'\n---\nAmazon Kindle ebook description is:\n {result["source_documents"][0].page_content}'+\
104
- # f'\nfrom this file: {result["source_documents"][0].metadata}'
105
- # else:
106
- # return result['result']
107
- '''------'''
108
  gr.ChatInterface(predict,
109
  chatbot=gr.Chatbot(height='auto'),
110
  textbox=gr.Textbox(placeholder="Recommend a book on someone who..."),
 
23
  MessagesPlaceholder,
24
  SystemMessagePromptTemplate,
25
  )
26
+ from langchain.schema import AIMessage,HumanMessage
27
  documents=[]
28
  path='./bios/'
29
+
30
  for file in os.listdir(path):
31
  loader = TextLoader(f'{path}{file}',encoding='unicode_escape')
32
  # loader.load()[0].metadata['category']='biography'
 
38
  text_splitter = CharacterTextSplitter(chunk_size=500, chunk_overlap=0)
39
  texts = text_splitter.split_documents(documents)
40
 
 
41
  embeddings = CohereEmbeddings(model='embed-english-v3.0')
42
  docsearch = Chroma.from_documents(texts, embeddings)
43
 
 
45
  retriever=docsearch.as_retriever(search_kwargs={'k':1}),return_source_documents=True)
46
 
47
  def predict(message, history):
48
+ history_langchain_format = []
49
+ for human, ai in history:
50
+ history_langchain_format.append(HumanMessage(content=human))
51
+ history_langchain_format.append(AIMessage(content=ai))
52
+ history_langchain_format.append(HumanMessage(content=message))
53
+ gpt_response = qa({'query':history_langchain_format})
54
+ return gpt_response['result']
55
 
56
+ # message=message+'? just the book title-Author'
57
+ # result = qa({"query": message})
58
+ # # r1=docsearch.similarity_search_with_score(query=q,k=3)
59
+ # # print([(item[-2].metadata,item[-1]) for item in r1],\
60
+ # # '\n\n',result['result'],f'|| {result["source_documents"][0].metadata}','\n*****\n')
61
+ # if result['result'] not in ["I don't know","I don't know."]:
62
+ # return result['result']+f'\n---\nAmazon Kindle ebook description is:\n {result["source_documents"][0].page_content}'+\
63
+ # f'\nfrom this file: {result["source_documents"][0].metadata}'
64
+ # else:
65
+ # return result['result']
 
 
 
 
 
 
 
 
 
 
 
 
66
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
67
  gr.ChatInterface(predict,
68
  chatbot=gr.Chatbot(height='auto'),
69
  textbox=gr.Textbox(placeholder="Recommend a book on someone who..."),