|
|
""" |
|
|
Pytest configuration and fixtures for BackgroundFX Pro tests. |
|
|
""" |
|
|
|
|
|
import pytest |
|
|
import numpy as np |
|
|
import torch |
|
|
import cv2 |
|
|
import tempfile |
|
|
import shutil |
|
|
from pathlib import Path |
|
|
from unittest.mock import Mock, MagicMock |
|
|
import os |
|
|
import sys |
|
|
|
|
|
|
|
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope="session") |
|
|
def test_config(): |
|
|
"""Test configuration.""" |
|
|
return { |
|
|
'device': 'cpu', |
|
|
'test_data_dir': Path(__file__).parent / 'data', |
|
|
'temp_dir': tempfile.mkdtemp(prefix='bgfx_test_'), |
|
|
'max_test_duration': 30, |
|
|
'use_gpu': torch.cuda.is_available() |
|
|
} |
|
|
|
|
|
|
|
|
@pytest.fixture(scope="session", autouse=True) |
|
|
def cleanup(test_config): |
|
|
"""Cleanup after all tests.""" |
|
|
yield |
|
|
|
|
|
if os.path.exists(test_config['temp_dir']): |
|
|
shutil.rmtree(test_config['temp_dir']) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture |
|
|
def sample_image(): |
|
|
"""Create a sample image for testing.""" |
|
|
|
|
|
image = np.zeros((512, 512, 3), dtype=np.uint8) |
|
|
|
|
|
|
|
|
image[:, :] = [100, 150, 200] |
|
|
|
|
|
|
|
|
cv2.rectangle(image, (150, 100), (350, 450), (50, 100, 50), -1) |
|
|
|
|
|
|
|
|
noise = np.random.randint(0, 20, (512, 512, 3), dtype=np.uint8) |
|
|
image = cv2.add(image, noise) |
|
|
|
|
|
return image |
|
|
|
|
|
|
|
|
@pytest.fixture |
|
|
def sample_mask(): |
|
|
"""Create a sample mask for testing.""" |
|
|
mask = np.zeros((512, 512), dtype=np.uint8) |
|
|
|
|
|
cv2.rectangle(mask, (150, 100), (350, 450), 255, -1) |
|
|
|
|
|
mask = cv2.GaussianBlur(mask, (5, 5), 2) |
|
|
return mask |
|
|
|
|
|
|
|
|
@pytest.fixture |
|
|
def sample_background(): |
|
|
"""Create a sample background image.""" |
|
|
background = np.zeros((512, 512, 3), dtype=np.uint8) |
|
|
|
|
|
for i in range(512): |
|
|
background[i, :] = [ |
|
|
int(255 * (i / 512)), |
|
|
100, |
|
|
int(255 * (1 - i / 512)) |
|
|
] |
|
|
return background |
|
|
|
|
|
|
|
|
@pytest.fixture |
|
|
def sample_video(test_config): |
|
|
"""Create a sample video file for testing.""" |
|
|
video_path = Path(test_config['temp_dir']) / 'test_video.mp4' |
|
|
|
|
|
|
|
|
fourcc = cv2.VideoWriter_fourcc(*'mp4v') |
|
|
out = cv2.VideoWriter(str(video_path), fourcc, 30.0, (512, 512)) |
|
|
|
|
|
|
|
|
for i in range(30): |
|
|
frame = np.zeros((512, 512, 3), dtype=np.uint8) |
|
|
|
|
|
x = 100 + i * 5 |
|
|
cv2.rectangle(frame, (x, 200), (x + 100, 400), (0, 255, 0), -1) |
|
|
out.write(frame) |
|
|
|
|
|
out.release() |
|
|
return str(video_path) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture |
|
|
def mock_model(): |
|
|
"""Create a mock ML model for testing.""" |
|
|
model = MagicMock() |
|
|
model.eval = MagicMock(return_value=None) |
|
|
model.to = MagicMock(return_value=model) |
|
|
|
|
|
|
|
|
def forward(x): |
|
|
batch_size = x.shape[0] if hasattr(x, 'shape') else 1 |
|
|
return torch.randn(batch_size, 1, 512, 512) |
|
|
|
|
|
model.__call__ = MagicMock(side_effect=forward) |
|
|
model.forward = MagicMock(side_effect=forward) |
|
|
|
|
|
return model |
|
|
|
|
|
|
|
|
@pytest.fixture |
|
|
def mock_sam2_predictor(): |
|
|
"""Create a mock SAM2 predictor.""" |
|
|
predictor = MagicMock() |
|
|
|
|
|
def predict(image): |
|
|
h, w = image.shape[:2] if len(image.shape) > 2 else (512, 512) |
|
|
return np.random.randint(0, 2, (h, w), dtype=np.uint8) * 255 |
|
|
|
|
|
predictor.predict = MagicMock(side_effect=predict) |
|
|
predictor.set_image = MagicMock(return_value=None) |
|
|
|
|
|
return predictor |
|
|
|
|
|
|
|
|
@pytest.fixture |
|
|
def mock_matanyone_model(): |
|
|
"""Create a mock MatAnyone model.""" |
|
|
model = MagicMock() |
|
|
|
|
|
def refine(image, mask): |
|
|
return cv2.GaussianBlur(mask, (5, 5), 2) |
|
|
|
|
|
model.refine = MagicMock(side_effect=refine) |
|
|
|
|
|
return model |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture |
|
|
def pipeline_config(): |
|
|
"""Create pipeline configuration for testing.""" |
|
|
from api.pipeline import PipelineConfig |
|
|
|
|
|
return PipelineConfig( |
|
|
use_gpu=False, |
|
|
quality_preset='medium', |
|
|
enable_cache=False, |
|
|
batch_size=1, |
|
|
max_workers=2 |
|
|
) |
|
|
|
|
|
|
|
|
@pytest.fixture |
|
|
def mock_pipeline(pipeline_config): |
|
|
"""Create a mock processing pipeline.""" |
|
|
from api.pipeline import ProcessingPipeline |
|
|
|
|
|
|
|
|
with pytest.MonkeyPatch().context() as m: |
|
|
m.setattr('api.pipeline.ModelFactory.load_model', |
|
|
lambda self, *args, **kwargs: Mock()) |
|
|
pipeline = ProcessingPipeline(pipeline_config) |
|
|
|
|
|
return pipeline |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture |
|
|
def api_client(): |
|
|
"""Create a test client for the API.""" |
|
|
from fastapi.testclient import TestClient |
|
|
from api.api_server import app |
|
|
|
|
|
return TestClient(app) |
|
|
|
|
|
|
|
|
@pytest.fixture |
|
|
def mock_job_manager(): |
|
|
"""Create a mock job manager.""" |
|
|
manager = MagicMock() |
|
|
manager.create_job = MagicMock(return_value='test-job-123') |
|
|
manager.get_job = MagicMock(return_value={'status': 'processing'}) |
|
|
manager.update_job = MagicMock(return_value=None) |
|
|
|
|
|
return manager |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture |
|
|
def temp_dir(test_config): |
|
|
"""Create a temporary directory for test files.""" |
|
|
temp_path = Path(test_config['temp_dir']) / 'test_run' |
|
|
temp_path.mkdir(parents=True, exist_ok=True) |
|
|
yield temp_path |
|
|
|
|
|
if temp_path.exists(): |
|
|
shutil.rmtree(temp_path) |
|
|
|
|
|
|
|
|
@pytest.fixture |
|
|
def sample_files(temp_dir, sample_image): |
|
|
"""Create sample files in temp directory.""" |
|
|
files = {} |
|
|
|
|
|
|
|
|
image_path = temp_dir / 'sample.jpg' |
|
|
cv2.imwrite(str(image_path), sample_image) |
|
|
files['image'] = image_path |
|
|
|
|
|
|
|
|
for i in range(3): |
|
|
path = temp_dir / f'image_{i}.jpg' |
|
|
cv2.imwrite(str(path), sample_image) |
|
|
files[f'image_{i}'] = path |
|
|
|
|
|
return files |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture |
|
|
def mock_registry(): |
|
|
"""Create a mock model registry.""" |
|
|
from models.registry import ModelRegistry, ModelInfo, ModelTask, ModelFramework |
|
|
|
|
|
registry = ModelRegistry(models_dir=Path(tempfile.mkdtemp())) |
|
|
|
|
|
|
|
|
test_model = ModelInfo( |
|
|
model_id='test-model', |
|
|
name='Test Model', |
|
|
version='1.0', |
|
|
task=ModelTask.SEGMENTATION, |
|
|
framework=ModelFramework.PYTORCH, |
|
|
url='http://example.com/model.pth', |
|
|
filename='test_model.pth', |
|
|
file_size=1000000 |
|
|
) |
|
|
|
|
|
registry.register_model(test_model) |
|
|
|
|
|
return registry |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture |
|
|
def mock_websocket(): |
|
|
"""Create a mock WebSocket connection.""" |
|
|
ws = MagicMock() |
|
|
ws.accept = MagicMock(return_value=None) |
|
|
ws.send_json = MagicMock(return_value=None) |
|
|
ws.receive_text = MagicMock(return_value='{"type": "ping", "data": {}}') |
|
|
|
|
|
return ws |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture |
|
|
def mock_progress_callback(): |
|
|
"""Create a mock progress callback.""" |
|
|
callback = MagicMock() |
|
|
return callback |
|
|
|
|
|
|
|
|
@pytest.fixture |
|
|
def device(): |
|
|
"""Get device for testing.""" |
|
|
return 'cuda' if torch.cuda.is_available() else 'cpu' |
|
|
|
|
|
|
|
|
@pytest.fixture |
|
|
def performance_timer(): |
|
|
"""Timer for performance testing.""" |
|
|
import time |
|
|
|
|
|
class Timer: |
|
|
def __init__(self): |
|
|
self.start_time = None |
|
|
self.elapsed = 0 |
|
|
|
|
|
def __enter__(self): |
|
|
self.start_time = time.time() |
|
|
return self |
|
|
|
|
|
def __exit__(self, *args): |
|
|
self.elapsed = time.time() - self.start_time |
|
|
|
|
|
return Timer |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def pytest_configure(config): |
|
|
"""Register custom markers.""" |
|
|
config.addinivalue_line( |
|
|
"markers", "slow: marks tests as slow (deselect with '-m \"not slow\"')" |
|
|
) |
|
|
config.addinivalue_line( |
|
|
"markers", "gpu: marks tests that require GPU" |
|
|
) |
|
|
config.addinivalue_line( |
|
|
"markers", "integration: marks integration tests" |
|
|
) |
|
|
config.addinivalue_line( |
|
|
"markers", "unit: marks unit tests" |
|
|
) |