ErikH commited on
Commit
565faf3
1 Parent(s): 783a9b1

Update pages/bot.py

Browse files
Files changed (1) hide show
  1. pages/bot.py +31 -6
pages/bot.py CHANGED
@@ -7,6 +7,11 @@ import os
7
  from PyPDF2 import PdfReader
8
  from transformers import pipeline
9
 
 
 
 
 
 
10
  ###########
11
  #pip install faiss-cpu
12
  #pip install langchain
@@ -82,7 +87,9 @@ def main():
82
  if user_question:
83
  st.text(retrieved_docs[0].page_content)
84
  context=retrieved_docs[0].page_content
85
- #question=user_question
 
 
86
  generator = pipeline('text-generation', model = 'gpt2')
87
  answer = generator(context, max_length = 30, num_return_sequences=3)
88
 
@@ -90,12 +97,30 @@ def main():
90
  #st.text_area()
91
  st.text(answer)
92
  st.text(type(answer))
93
- # bei incoming pdf
94
 
95
- #vectorstore_DB=get_vectorstore() # bei Abfrage durch Chatbot
96
- #print(get_vectorstore().similarity_search_with_score("stelle")) # zeigt an ob Vektordatenbank gefüllt ist
97
-
98
- #print(get_conversation_chain(get_vectorstore()))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
99
 
100
 
101
 
 
7
  from PyPDF2 import PdfReader
8
  from transformers import pipeline
9
 
10
+ #Retriever erweiterung
11
+ from langchain.prompts import ChatPromptTemplate
12
+ from langchain.schema import StrOutputParser
13
+ from langchain.schema.runnable import RunnablePassthrough
14
+
15
  ###########
16
  #pip install faiss-cpu
17
  #pip install langchain
 
87
  if user_question:
88
  st.text(retrieved_docs[0].page_content)
89
  context=retrieved_docs[0].page_content
90
+ question=user_question
91
+
92
+ ##IDEE Text Generation
93
  generator = pipeline('text-generation', model = 'gpt2')
94
  answer = generator(context, max_length = 30, num_return_sequences=3)
95
 
 
97
  #st.text_area()
98
  st.text(answer)
99
  st.text(type(answer))
 
100
 
101
+ #IDEE Retriever erweitern
102
+ template = """Answer the question based only on the following context:
103
+
104
+ {context}
105
+
106
+ Question: {question}
107
+ """
108
+ prompt = ChatPromptTemplate.from_template(template)
109
+ model = "hkunlp/instructor-base"
110
+
111
+
112
+ def format_docs(docs):
113
+ return "\n\n".join([d.page_content for d in docs])
114
+
115
+
116
+ chain = (
117
+ {"context": retriever | format_docs, "question": RunnablePassthrough()}
118
+ | prompt
119
+ | model
120
+ | StrOutputParser()
121
+ )
122
+
123
+ st.text(chain.invoke(question))
124
 
125
 
126