Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -229,59 +229,7 @@ def process_documents(temp_dir):
|
|
229 |
|
230 |
return pd.DataFrame(d)
|
231 |
|
232 |
-
|
233 |
-
"""Handle file uploads and process requirements with enhanced error handling."""
|
234 |
-
try:
|
235 |
-
# Create temporary directory
|
236 |
-
temp_dir = tempfile.mkdtemp()
|
237 |
-
print(f"Created temporary directory: {temp_dir}")
|
238 |
-
|
239 |
-
try:
|
240 |
-
# Extract zip file
|
241 |
-
print(f"Extracting ZIP file: {zip_file.name}")
|
242 |
-
with zipfile.ZipFile(zip_file.name, 'r') as zip_ref:
|
243 |
-
zip_ref.extractall(temp_dir)
|
244 |
-
print(f"ZIP contents: {zip_ref.namelist()}")
|
245 |
-
|
246 |
-
# Preprocess and read requirements CSV
|
247 |
-
print("Processing CSV file...")
|
248 |
-
requirements_df = preprocess_csv(csv_file)
|
249 |
-
print(f"Found {len(requirements_df)} requirements")
|
250 |
-
|
251 |
-
# Setup RAG system
|
252 |
-
print("Setting up RAG system...")
|
253 |
-
vector_store = setup_rag_system(temp_dir)
|
254 |
-
rag_chain = create_workflow(vector_store)
|
255 |
-
|
256 |
-
# Process requirements
|
257 |
-
results = []
|
258 |
-
for idx, req in enumerate(requirements_df['requirement'], 1):
|
259 |
-
print(f"Processing requirement {idx}/{len(requirements_df)}")
|
260 |
-
try:
|
261 |
-
response = rag_chain.invoke(req)
|
262 |
-
results.append({
|
263 |
-
'requirement': req,
|
264 |
-
'response': response
|
265 |
-
})
|
266 |
-
except Exception as e:
|
267 |
-
error_msg = f"Error processing requirement: {str(e)}"
|
268 |
-
print(error_msg)
|
269 |
-
results.append({
|
270 |
-
'requirement': req,
|
271 |
-
'response': error_msg
|
272 |
-
})
|
273 |
-
|
274 |
-
return pd.DataFrame(results)
|
275 |
-
|
276 |
-
finally:
|
277 |
-
# Cleanup
|
278 |
-
print(f"Cleaning up temporary directory: {temp_dir}")
|
279 |
-
shutil.rmtree(temp_dir)
|
280 |
-
|
281 |
-
except Exception as e:
|
282 |
-
error_msg = f"Processing error: {str(e)}"
|
283 |
-
print(error_msg)
|
284 |
-
return pd.DataFrame([{'error': error_msg}])
|
285 |
|
286 |
# The rest of the code remains the same...
|
287 |
|
@@ -387,26 +335,33 @@ def preprocess_csv(csv_file):
|
|
387 |
raise ValueError(f"Could not process CSV file: {str(e2)}")
|
388 |
|
389 |
def handle_upload(zip_file, csv_file):
|
390 |
-
"""Handle file uploads and process requirements."""
|
391 |
try:
|
392 |
# Create temporary directory
|
393 |
temp_dir = tempfile.mkdtemp()
|
|
|
394 |
|
395 |
try:
|
396 |
# Extract zip file
|
|
|
397 |
with zipfile.ZipFile(zip_file.name, 'r') as zip_ref:
|
398 |
zip_ref.extractall(temp_dir)
|
|
|
399 |
|
400 |
# Preprocess and read requirements CSV
|
|
|
401 |
requirements_df = preprocess_csv(csv_file)
|
|
|
402 |
|
403 |
# Setup RAG system
|
|
|
404 |
vector_store = setup_rag_system(temp_dir)
|
405 |
rag_chain = create_workflow(vector_store)
|
406 |
|
407 |
# Process requirements
|
408 |
results = []
|
409 |
-
for req in requirements_df['requirement']:
|
|
|
410 |
try:
|
411 |
response = rag_chain.invoke(req)
|
412 |
results.append({
|
@@ -414,20 +369,24 @@ def handle_upload(zip_file, csv_file):
|
|
414 |
'response': response
|
415 |
})
|
416 |
except Exception as e:
|
|
|
|
|
417 |
results.append({
|
418 |
'requirement': req,
|
419 |
-
'response':
|
420 |
})
|
421 |
|
422 |
return pd.DataFrame(results)
|
423 |
|
424 |
finally:
|
425 |
# Cleanup
|
|
|
426 |
shutil.rmtree(temp_dir)
|
427 |
|
428 |
except Exception as e:
|
429 |
-
|
430 |
-
|
|
|
431 |
def main():
|
432 |
"""Main function to run the Gradio interface."""
|
433 |
iface = gr.Interface(
|
|
|
229 |
|
230 |
return pd.DataFrame(d)
|
231 |
|
232 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
233 |
|
234 |
# The rest of the code remains the same...
|
235 |
|
|
|
335 |
raise ValueError(f"Could not process CSV file: {str(e2)}")
|
336 |
|
337 |
def handle_upload(zip_file, csv_file):
|
338 |
+
"""Handle file uploads and process requirements with enhanced error handling."""
|
339 |
try:
|
340 |
# Create temporary directory
|
341 |
temp_dir = tempfile.mkdtemp()
|
342 |
+
print(f"Created temporary directory: {temp_dir}")
|
343 |
|
344 |
try:
|
345 |
# Extract zip file
|
346 |
+
print(f"Extracting ZIP file: {zip_file.name}")
|
347 |
with zipfile.ZipFile(zip_file.name, 'r') as zip_ref:
|
348 |
zip_ref.extractall(temp_dir)
|
349 |
+
print(f"ZIP contents: {zip_ref.namelist()}")
|
350 |
|
351 |
# Preprocess and read requirements CSV
|
352 |
+
print("Processing CSV file...")
|
353 |
requirements_df = preprocess_csv(csv_file)
|
354 |
+
print(f"Found {len(requirements_df)} requirements")
|
355 |
|
356 |
# Setup RAG system
|
357 |
+
print("Setting up RAG system...")
|
358 |
vector_store = setup_rag_system(temp_dir)
|
359 |
rag_chain = create_workflow(vector_store)
|
360 |
|
361 |
# Process requirements
|
362 |
results = []
|
363 |
+
for idx, req in enumerate(requirements_df['requirement'], 1):
|
364 |
+
print(f"Processing requirement {idx}/{len(requirements_df)}")
|
365 |
try:
|
366 |
response = rag_chain.invoke(req)
|
367 |
results.append({
|
|
|
369 |
'response': response
|
370 |
})
|
371 |
except Exception as e:
|
372 |
+
error_msg = f"Error processing requirement: {str(e)}"
|
373 |
+
print(error_msg)
|
374 |
results.append({
|
375 |
'requirement': req,
|
376 |
+
'response': error_msg
|
377 |
})
|
378 |
|
379 |
return pd.DataFrame(results)
|
380 |
|
381 |
finally:
|
382 |
# Cleanup
|
383 |
+
print(f"Cleaning up temporary directory: {temp_dir}")
|
384 |
shutil.rmtree(temp_dir)
|
385 |
|
386 |
except Exception as e:
|
387 |
+
error_msg = f"Processing error: {str(e)}"
|
388 |
+
print(error_msg)
|
389 |
+
return pd.DataFrame([{'error': error_msg}])
|
390 |
def main():
|
391 |
"""Main function to run the Gradio interface."""
|
392 |
iface = gr.Interface(
|