|
|
|
|
|
|
|
import sys |
|
import os |
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) |
|
|
|
import time |
|
import json |
|
from PIL import Image |
|
|
|
from config import TEST_IMAGE_URL, TEST_PRODUCT_TYPE |
|
|
|
|
|
|
|
|
|
def test_process_image_through_entire_pipeline(): |
|
print(f"\n{'='*60}") |
|
print(f"TESTING FULL PIPELINE WITH: {TEST_IMAGE_URL}") |
|
print(f"Product Type: {TEST_PRODUCT_TYPE}") |
|
print(f"{'='*60}\n") |
|
|
|
|
|
from src.utils import ProcessingContext |
|
from src.pipeline import run_functions_in_sequence, PIPELINE_STEPS |
|
from src.models import model_loader |
|
|
|
|
|
|
|
|
|
print("\nπ CHECKING MODEL LOADING STATE...") |
|
|
|
|
|
if os.getenv("SPACE_ID"): |
|
print("β
Running in Zero GPU environment - models will load on-demand") |
|
print(f" MODELS_LOADED: {model_loader.MODELS_LOADED}") |
|
print(f" LOAD_ERROR: {model_loader.LOAD_ERROR}") |
|
else: |
|
|
|
print("π₯ LOADING ALL MODELS...") |
|
try: |
|
model_loader.ensure_models_loaded() |
|
if model_loader.MODELS_LOADED: |
|
print("β
Models loaded successfully!") |
|
else: |
|
print("β οΈ Models not fully loaded but continuing...") |
|
except Exception as e: |
|
print(f"β οΈ Model loading encountered issues: {e}") |
|
print("Continuing with test anyway...") |
|
|
|
|
|
|
|
|
|
print("\nπ¦ PREPARING TEST DATA...") |
|
|
|
|
|
contexts = [ProcessingContext(url=TEST_IMAGE_URL, product_type=TEST_PRODUCT_TYPE, keywords=[])] |
|
|
|
|
|
batch_logs = [] |
|
|
|
|
|
|
|
|
|
print("\nπ RUNNING FULL PIPELINE...") |
|
print(f" Pipeline steps: {[step.__name__ for step in PIPELINE_STEPS]}") |
|
|
|
start_time = time.time() |
|
|
|
|
|
batch_logs = run_functions_in_sequence(contexts, PIPELINE_STEPS) |
|
|
|
end_time = time.time() |
|
processing_time = end_time - start_time |
|
|
|
|
|
|
|
|
|
ctx = contexts[0] |
|
|
|
print(f"\nπ PROCESSING RESULTS:") |
|
print(f" Total processing time: {processing_time:.2f} seconds") |
|
|
|
|
|
if hasattr(ctx, 'skip_processing') and ctx.skip_processing: |
|
print(f"β Processing was skipped") |
|
if hasattr(ctx, 'error') and ctx.error: |
|
print(f" Error: {ctx.error}") |
|
|
|
|
|
print("\nπ STEP-BY-STEP RESULTS:") |
|
|
|
|
|
if "original" in ctx.pil_img: |
|
print(f"β
Step 1: Image downloaded - Size: {ctx.pil_img['original'].size}") |
|
else: |
|
print(f"β Step 1: Image download failed") |
|
|
|
|
|
if "background_removed" in ctx.pil_img: |
|
print(f"β
Step 2: Background removed successfully") |
|
else: |
|
print(f"β οΈ Step 2: Background removal skipped") |
|
if os.getenv("SPACE_ID"): |
|
print(f" (Expected in Zero GPU - models load on-demand)") |
|
|
|
|
|
if hasattr(ctx, 'detection_result') and ctx.detection_result: |
|
print(f"β
Step 3: Objects detected - {len(ctx.detection_result)} detections") |
|
|
|
for det_type, detections in ctx.detection_result.items(): |
|
if detections: |
|
print(f" - {det_type}: {len(detections)} objects") |
|
else: |
|
print(f"β οΈ Step 3: Object detection skipped") |
|
|
|
|
|
if "cropped" in ctx.pil_img: |
|
print(f"β
Step 4: Image cropped - Size: {ctx.pil_img['cropped'].size}") |
|
elif "final" in ctx.pil_img: |
|
print(f"β
Step 4: Final image created - Size: {ctx.pil_img['final'].size}") |
|
else: |
|
print(f"β οΈ Step 4: Cropping/padding skipped") |
|
|
|
|
|
if hasattr(ctx, 'result_image') and ctx.result_image: |
|
print(f"β
Step 5: Image encoded to base64 - Length: {len(ctx.result_image)}") |
|
else: |
|
print(f"β Step 5: Base64 encoding failed") |
|
|
|
|
|
|
|
|
|
if batch_logs: |
|
print(f"\nπ BATCH LOGS ({len(batch_logs)} entries):") |
|
for i, log in enumerate(batch_logs): |
|
print(f"\nLog {i+1}:") |
|
print(json.dumps(log, indent=2)) |
|
|
|
|
|
|
|
|
|
if hasattr(ctx, 'processing_logs') and ctx.processing_logs: |
|
print(f"\nπ PROCESSING LOGS ({len(ctx.processing_logs)} entries):") |
|
for i, log in enumerate(ctx.processing_logs[-10:]): |
|
print(f" {i+1}. {log}") |
|
|
|
|
|
|
|
|
|
print(f"\n{'='*60}") |
|
|
|
|
|
if hasattr(ctx, 'result_image') and ctx.result_image: |
|
print(f"β
PIPELINE TEST COMPLETED SUCCESSFULLY") |
|
print(f" Processing time: {processing_time:.2f}s") |
|
if "final" in ctx.pil_img: |
|
print(f" Output image size: {ctx.pil_img['final'].size}") |
|
else: |
|
print(f"β οΈ PIPELINE TEST COMPLETED WITH WARNINGS") |
|
if os.getenv("SPACE_ID"): |
|
print(f" Note: Limited processing expected in Zero GPU environment") |
|
|
|
print(f"{'='*60}\n") |
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
test_process_image_through_entire_pipeline() |
|
|
|
|