"""
pytest配置文件
定义测试fixtures和全局配置
"""

import pytest
import asyncio
import os
import tempfile
import shutil
from typing import Generator, AsyncGenerator
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, Session
from fastapi.testclient import TestClient

from main import app
from core.database import Base, get_db
from core.config import settings


# 测试数据库配置
TEST_DATABASE_URL = "sqlite:///./test_campus_video_twin.db"


@pytest.fixture(scope="session")
def test_engine():
    """创建测试数据库引擎"""
    engine = create_engine(
        TEST_DATABASE_URL,
        connect_args={"check_same_thread": False}
    )
    
    # 创建所有表
    Base.metadata.create_all(bind=engine)
    
    yield engine
    
    # 清理
    Base.metadata.drop_all(bind=engine)
    if os.path.exists("./test_campus_video_twin.db"):
        os.remove("./test_campus_video_twin.db")


@pytest.fixture(scope="function")
def test_db(test_engine) -> Generator[Session, None, None]:
    """创建测试数据库会话"""
    TestingSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=test_engine)
    
    connection = test_engine.connect()
    transaction = connection.begin()
    session = TestingSessionLocal(bind=connection)
    
    yield session
    
    session.close()
    transaction.rollback()
    connection.close()


@pytest.fixture(scope="function")
def test_client(test_db: Session) -> Generator[TestClient, None, None]:
    """创建测试客户端"""
    
    def override_get_db():
        try:
            yield test_db
        finally:
            pass
    
    app.dependency_overrides[get_db] = override_get_db
    
    with TestClient(app) as client:
        yield client
    
    app.dependency_overrides.clear()


@pytest.fixture(scope="session")
def event_loop():
    """创建事件循环"""
    loop = asyncio.get_event_loop_policy().new_event_loop()
    yield loop
    loop.close()


@pytest.fixture(scope="function")
def temp_dir():
    """创建临时目录"""
    temp_dir = tempfile.mkdtemp()
    yield temp_dir
    shutil.rmtree(temp_dir)


@pytest.fixture(scope="function")
def mock_gpu_available():
    """模拟GPU可用"""
    original_value = os.environ.get("CUDA_VISIBLE_DEVICES")
    os.environ["CUDA_VISIBLE_DEVICES"] = "0"
    yield True
    if original_value is not None:
        os.environ["CUDA_VISIBLE_DEVICES"] = original_value
    else:
        os.environ.pop("CUDA_VISIBLE_DEVICES", None)


@pytest.fixture(scope="function")
def mock_gpu_unavailable():
    """模拟GPU不可用"""
    original_value = os.environ.get("CUDA_VISIBLE_DEVICES")
    os.environ["CUDA_VISIBLE_DEVICES"] = ""
    yield False
    if original_value is not None:
        os.environ["CUDA_VISIBLE_DEVICES"] = original_value
    else:
        os.environ.pop("CUDA_VISIBLE_DEVICES", None)


@pytest.fixture(scope="function")
def sample_camera_data():
    """示例摄像头数据"""
    return {
        "name": "测试摄像头",
        "location": "测试位置",
        "ip_address": "192.168.1.100",
        "port": 554,
        "username": "admin",
        "password": "123456",
        "stream_url": "rtsp://192.168.1.100:554/stream1",
        "status": "online",
        "position": {"x": 100.0, "y": 200.0, "z": 10.0}
    }


@pytest.fixture(scope="function")
def sample_alert_data():
    """示例告警数据"""
    return {
        "title": "测试告警",
        "message": "这是一个测试告警",
        "level": "medium",
        "location": "测试位置",
        "alert_type": "test",
        "metadata": {"test": True}
    }


# 性能测试相关fixtures
@pytest.fixture(scope="session")
def performance_test_config():
    """性能测试配置"""
    return {
        "max_response_time": 2.0,
        "max_concurrent_users": 10,
        "test_duration": 30,
        "acceptable_error_rate": 0.05
    }


@pytest.fixture(scope="function")
def performance_metrics():
    """性能指标收集器"""
    from tests.performance.benchmark_config import PerformanceMetrics
    return PerformanceMetrics()


# 集成测试相关fixtures
@pytest.fixture(scope="function")
def integration_test_data():
    """集成测试数据"""
    return {
        "cameras": [
            {
                "name": f"集成测试摄像头{i}",
                "location": f"测试位置{i}",
                "ip_address": f"192.168.1.{100+i}",
                "port": 554,
                "username": "admin",
                "password": "123456",
                "stream_url": f"rtsp://192.168.1.{100+i}:554/stream1",
                "status": "online",
                "position": {"x": float(i*50), "y": float(i*30), "z": 10.0}
            }
            for i in range(5)
        ],
        "alerts": [
            {
                "title": f"集成测试告警{i}",
                "message": f"集成测试告警消息{i}",
                "level": ["low", "medium", "high", "critical"][i % 4],
                "location": f"测试位置{i}",
                "alert_type": "integration_test",
                "metadata": {"test_id": i}
            }
            for i in range(10)
        ]
    }


# 测试环境配置
def pytest_configure(config):
    """pytest配置"""
    # 设置测试环境变量
    os.environ["TESTING"] = "1"
    os.environ["DATABASE_URL"] = TEST_DATABASE_URL
    
    # 禁用某些功能在测试环境中
    os.environ["DISABLE_GPU_TASKS"] = "1"
    os.environ["DISABLE_EXTERNAL_SERVICES"] = "1"


def pytest_unconfigure(config):
    """pytest清理"""
    # 清理环境变量
    os.environ.pop("TESTING", None)
    os.environ.pop("DATABASE_URL", None)
    os.environ.pop("DISABLE_GPU_TASKS", None)
    os.environ.pop("DISABLE_EXTERNAL_SERVICES", None)


# 测试标记
def pytest_collection_modifyitems(config, items):
    """修改测试项目"""
    # 为慢速测试添加标记
    for item in items:
        if "integration" in item.nodeid or "e2e" in item.nodeid:
            item.add_marker(pytest.mark.slow)
        
        if "performance" in item.nodeid or "benchmark" in item.nodeid:
            item.add_marker(pytest.mark.performance)
        
        if "gpu" in item.nodeid:
            item.add_marker(pytest.mark.gpu)


# 自定义标记
pytest_plugins = []

# 注册自定义标记
def pytest_configure(config):
    config.addinivalue_line("markers", "slow: marks tests as slow")
    config.addinivalue_line("markers", "performance: marks tests as performance tests")
    config.addinivalue_line("markers", "gpu: marks tests as requiring GPU")
    config.addinivalue_line("markers", "integration: marks tests as integration tests")
    config.addinivalue_line("markers", "e2e: marks tests as end-to-end tests")