WebashalarForML commited on
Commit
0edc3f0
·
verified ·
1 Parent(s): bdb28de

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -14
app.py CHANGED
@@ -17,6 +17,10 @@ logging.basicConfig(
17
  # Flask App
18
  app = Flask(__name__)
19
  app.secret_key = 'your_secret_key'
 
 
 
 
20
  app.config['UPLOAD_FOLDER'] = 'uploads/'
21
  app.config['RESULT_FOLDER'] = 'results/'
22
 
@@ -188,20 +192,19 @@ def process_file():
188
  except Exception as e:
189
  logging.exception(f"Error during primary processing: {e}")
190
  flash('Primary processing failed, attempting backup model...')
191
-
192
- # Correct unpacking (3 values)
193
- LLMdata_v1, extracted_text, processed_Img = extract_text_from_images(file_paths)
194
- logging.info(f"Extracted text(Backup): {extracted_text}")
195
- logging.info(f"Processed images(Backup): {processed_Img}")
 
 
196
 
197
  LLMdata = {}
198
  try:
199
- if extracted_text:
200
- text = json_to_llm_str(extracted_text)
201
- LLMdata = NER_Model(text)
202
- logging.info(f"NER model data: {LLMdata}")
203
- else:
204
- logging.warning("No extracted text available for backup model")
205
  except Exception as backup_e:
206
  logging.exception(f"Error during backup processing: {backup_e}")
207
  flash('Backup processing also failed')
@@ -209,10 +212,8 @@ def process_file():
209
 
210
  # Final merge using backup data if we reached here
211
  cont_data = process_extracted_text(extracted_text)
212
- logging.info(f"Contextual data: {cont_data}")
213
-
214
  processed_data = process_resume_data(LLMdata, cont_data, extracted_text)
215
- logging.info(f"Processed data: {processed_data}")
216
 
217
  session['processed_data'] = processed_data
218
  session['processed_Img'] = processed_Img
 
17
  # Flask App
18
  app = Flask(__name__)
19
  app.secret_key = 'your_secret_key'
20
+
21
+ @app.template_filter('basename')
22
+ def basename_filter(path):
23
+ return os.path.basename(path)
24
  app.config['UPLOAD_FOLDER'] = 'uploads/'
25
  app.config['RESULT_FOLDER'] = 'results/'
26
 
 
192
  except Exception as e:
193
  logging.exception(f"Error during primary processing: {e}")
194
  flash('Primary processing failed, attempting backup model...')
195
+
196
+ # We don't call extract_text_from_images AGAIN because it already ran and produced its results
197
+ # in the variables assigned at line 162. We just need to ensure they are available here.
198
+ # If extraction completely failed (raised before return), then we have nothing to do.
199
+ if 'extracted_text' not in locals() or not extracted_text:
200
+ flash('Critical failure: Could not extract text from image.')
201
+ return redirect(url_for('index'))
202
 
203
  LLMdata = {}
204
  try:
205
+ text = json_to_llm_str(extracted_text)
206
+ LLMdata = NER_Model(text)
207
+ logging.info(f"NER model data: {LLMdata}")
 
 
 
208
  except Exception as backup_e:
209
  logging.exception(f"Error during backup processing: {backup_e}")
210
  flash('Backup processing also failed')
 
212
 
213
  # Final merge using backup data if we reached here
214
  cont_data = process_extracted_text(extracted_text)
 
 
215
  processed_data = process_resume_data(LLMdata, cont_data, extracted_text)
216
+ logging.info(f"Final merged data: {processed_data}")
217
 
218
  session['processed_data'] = processed_data
219
  session['processed_Img'] = processed_Img