mikepastor11 commited on
Commit
6da8da6
1 Parent(s): 97a488a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -8
app.py CHANGED
@@ -32,6 +32,13 @@ from langchain.chains import ConversationalRetrievalChain
32
  # from langchain.llms import HuggingFaceHub
33
  from langchain_community.llms import HuggingFaceHub
34
 
 
 
 
 
 
 
 
35
  ##################################################################################
36
  def extract_pdf_text(pdf_docs):
37
  text = ""
@@ -102,17 +109,21 @@ def prepare_conversation(vectorstore):
102
  def process_user_question(user_question):
103
 
104
  print('process_user_question called: \n')
 
 
 
 
 
 
105
  if user_question == None :
106
  print('question is null')
107
  return
108
  if user_question == '' :
109
  print('question is blank')
110
  return
111
-
112
  if st == None :
113
  print('session is null')
114
  return
115
-
116
  if st.session_state == None :
117
  print('session STATE is null')
118
  return
@@ -133,12 +144,12 @@ def process_user_question(user_question):
133
 
134
  for i, message in enumerate(st.session_state.chat_history):
135
 
136
- # Scrolling looses the last printed line, only print the last 6 lines
 
137
  #
138
- print('results_size on msg: ', results_size, i, ( results_size - 6 ) )
139
- if results_size > 6:
140
- if i < ( results_size - 6 ):
141
- print( 'skipped line', i)
142
  continue
143
 
144
  if i % 2 == 0:
@@ -205,7 +216,7 @@ def main():
205
  st.header(f"Pennwick File Analyzer")
206
 
207
  user_question = None
208
- user_question = st.text_input("Ask the Open Source - Flan-t5 Model a question about your uploaded documents:")
209
  if user_question != None:
210
  print( 'calling process question', user_question)
211
  process_user_question(user_question)
@@ -249,6 +260,8 @@ def main():
249
  # # create conversation chain
250
  st.session_state.conversation = prepare_conversation(vectorstore)
251
 
 
 
252
  # Mission Complete!
253
  global_later = datetime.now()
254
  st.write("Files Vectorized - Total EXECUTION Time =",
 
32
  # from langchain.llms import HuggingFaceHub
33
  from langchain_community.llms import HuggingFaceHub
34
 
35
+ ##################################################################################
36
+ # Admin flags
37
+ DISPLAY_DIALOG_LINES=6
38
+
39
+ SESSION_STARTED = False
40
+
41
+
42
  ##################################################################################
43
  def extract_pdf_text(pdf_docs):
44
  text = ""
 
109
  def process_user_question(user_question):
110
 
111
  print('process_user_question called: \n')
112
+
113
+ if (! SESSION_STARTED):
114
+ print('No Session')
115
+ st.write( 'Please upload and analyze your PDF files first!')
116
+ return
117
+
118
  if user_question == None :
119
  print('question is null')
120
  return
121
  if user_question == '' :
122
  print('question is blank')
123
  return
 
124
  if st == None :
125
  print('session is null')
126
  return
 
127
  if st.session_state == None :
128
  print('session STATE is null')
129
  return
 
144
 
145
  for i, message in enumerate(st.session_state.chat_history):
146
 
147
+ # Scrolling does not display the last printed line,
148
+ # so only print the last 6 lines
149
  #
150
+ print('results_size on msg: ', results_size, i, ( results_size - DISPLAY_DIALOG_LINES ) )
151
+ if results_size > DISPLAY_DIALOG_LINES:
152
+ if i < ( results_size - DISPLAY_DIALOG_LINES ):
 
153
  continue
154
 
155
  if i % 2 == 0:
 
216
  st.header(f"Pennwick File Analyzer")
217
 
218
  user_question = None
219
+ user_question = st.text_input("Ask the Open Source - Flan-T5 Model a question about your uploaded documents:")
220
  if user_question != None:
221
  print( 'calling process question', user_question)
222
  process_user_question(user_question)
 
260
  # # create conversation chain
261
  st.session_state.conversation = prepare_conversation(vectorstore)
262
 
263
+ SESSION_STARTED = True
264
+
265
  # Mission Complete!
266
  global_later = datetime.now()
267
  st.write("Files Vectorized - Total EXECUTION Time =",