"""
态势对象数据同步业务逻辑服务层
"""
import asyncio
import uuid
import json
import secrets
import hashlib
from typing import List, Optional, Dict, Any, Tuple
from datetime import datetime, timedelta
from urllib.parse import urlparse

# 延迟导入数据库驱动，只在需要时导入
asyncpg = None
aiomysql = None

def get_asyncpg():
    global asyncpg
    if asyncpg is None:
        try:
            import asyncpg
        except ImportError:
            raise ImportError("asyncpg is required for PostgreSQL support. Please install it with: pip install asyncpg")
    return asyncpg

def get_aiomysql():
    global aiomysql
    if aiomysql is None:
        try:
            import aiomysql
        except ImportError:
            raise ImportError("aiomysql is required for MySQL support. Please install it with: pip install aiomysql")
    return aiomysql

from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy import select, func, and_, or_, update, delete, text
from sqlalchemy.orm import selectinload

from src.situation.sync_models import DataSource, SyncTask, FieldMapping, SyncLog
from src.situation.sync_schemas import (
    DataSourceCreate, DataSourceUpdate, SyncTaskCreate, SyncTaskUpdate,
    FieldMappingCreate, FieldMappingUpdate, TestConnectionResponse,
    TableStructureResponse, SyncExecutionResponse, SyncStatisticsResponse,
    SyncStatus, ExecutionStatus, DatabaseType, SyncMode
)
from src.situation.models import SituationObject
from src.situation.schemas import SituationObjectCreate
from src.utils import logger


class DataSourceEncryption:
    """数据源连接信息加密工具"""

    @staticmethod
    def encrypt_password(password: str) -> str:
        """加密密码"""
        # 使用 base64 编码（不是真正的加密，但可以避免明文存储）
        import base64
        # 对密码进行简单的 base64 编码，这样至少不是明文
        encoded_bytes = base64.b64encode(password.encode('utf-8'))
        return encoded_bytes.decode('utf-8')

    @staticmethod
    def decrypt_password(encrypted_password: str) -> str:
        """解密密码"""
        if not encrypted_password:
            return ""

        import base64

        # 首先尝试新的 base64 解码方式
        try:
            decoded_bytes = base64.b64decode(encrypted_password.encode('utf-8'))
            return decoded_bytes.decode('utf-8')
        except Exception as e:
            logger.debug(f"Base64 解码失败，尝试旧格式: {e}")

        # 如果 base64 解码失败，检查是否是旧的哈希格式
        try:
            if ':' in encrypted_password:
                parts = encrypted_password.split(':')
                if len(parts) == 2:
                    salt, hash_hex = parts
                    # 这是旧的 PBKDF2 哈希格式，无法解密
                    logger.warning("检测到旧格式的密码哈希，无法解密获取原始密码。需要重新设置密码。")
                    # 返回一个特殊标记，表示需要重新输入密码
                    return "LEGACY_HASH_NEEDS_RESET"
                else:
                    # 格式不对，直接返回
                    return encrypted_password
            else:
                # 没有冒号分隔符，可能是其他格式
                return encrypted_password
        except Exception as e:
            logger.error(f"处理旧格式密码失败: {e}")
            return encrypted_password


class DatabaseConnector:
    """数据库连接器"""

    @staticmethod
    async def test_mysql_connection(host: str, port: int, database: str,
                                   username: str, password: str,
                                   connection_params: Dict[str, Any] = None) -> Tuple[bool, str, List[str]]:
        """测试MySQL连接"""
        try:
            aiomysql = get_aiomysql()
            conn_params = {
                'host': host,
                'port': port,
                'db': database,
                'user': username,
                'password': password,
                'charset': 'utf8mb4'
            }

            if connection_params:
                conn_params.update(connection_params)

            # 创建连接
            conn = await aiomysql.connect(**conn_params)

            # 获取表列表
            tables = []
            async with conn.cursor() as cursor:
                await cursor.execute("SHOW TABLES")
                result = await cursor.fetchall()
                tables = [row[0] for row in result]

            conn.close()

            return True, "连接成功", tables

        except Exception as e:
            logger.error(f"MySQL连接测试失败: {e}")
            return False, f"连接失败: {str(e)}", []

    @staticmethod
    async def test_postgresql_connection(host: str, port: int, database: str,
                                       username: str, password: str,
                                       connection_params: Dict[str, Any] = None) -> Tuple[bool, str, List[str]]:
        """测试PostgreSQL连接"""
        try:
            logger.info(f"PostgreSQL连接测试 - 主机: {host}, 端口: {port}, 数据库: {database}, 用户: {username}")
            logger.debug(f"PostgreSQL连接测试 - 密码长度: {len(password) if password else 0}")

            asyncpg = get_asyncpg()
            conn_params = {
                'host': host,
                'port': port,
                'database': database,
                'user': username,
                'password': password,
                'server_settings': {
                    'application_name': 'yuxi-know-sync-test'
                }
            }

            if connection_params:
                conn_params.update(connection_params)
                logger.debug(f"PostgreSQL连接测试 - 额外连接参数: {connection_params}")

            # 创建连接
            conn = await asyncpg.connect(**conn_params)

            # 获取表列表
            tables = []
            query = """
                SELECT table_name
                FROM information_schema.tables
                WHERE table_schema = 'public'
                AND table_type = 'BASE TABLE'
                ORDER BY table_name
            """
            rows = await conn.fetch(query)
            tables = [row['table_name'] for row in rows]

            await conn.close()

            return True, "连接成功", tables

        except Exception as e:
            logger.error(f"PostgreSQL连接测试失败: {e}")
            return False, f"连接失败: {str(e)}", []

    @staticmethod
    async def get_mysql_table_structure(host: str, port: int, database: str,
                                       username: str, password: str,
                                       table_name: str,
                                       connection_params: Dict[str, Any] = None) -> List[Dict[str, Any]]:
        """获取MySQL表结构"""
        try:
            aiomysql = get_aiomysql()
            conn_params = {
                'host': host,
                'port': port,
                'db': database,
                'user': username,
                'password': password,
                'charset': 'utf8mb4'
            }

            if connection_params:
                conn_params.update(connection_params)

            conn = await aiomysql.connect(**conn_params)

            columns = []
            async with conn.cursor() as cursor:
                # 使用 SHOW FULL COLUMNS 获取完整的字段信息，包括注释
                await cursor.execute(f"SHOW FULL COLUMNS FROM {table_name}")
                result = await cursor.fetchall()

                for row in result:
                    columns.append({
                        'name': row[0],          # Field
                        'type': row[1],          # Type
                        'nullable': row[3] == 'YES',  # Null
                        'primary_key': row[4] == 'PRI',  # Key
                        'default': row[5],       # Default
                        'comment': row[8] if len(row) > 8 else ''  # Comment
                    })

            conn.close()
            return columns

        except Exception as e:
            logger.error(f"获取MySQL表结构失败: {e}")
            raise

    @staticmethod
    async def get_postgresql_table_structure(host: str, port: int, database: str,
                                           username: str, password: str,
                                           table_name: str,
                                           connection_params: Dict[str, Any] = None) -> List[Dict[str, Any]]:
        """获取PostgreSQL表结构"""
        try:
            asyncpg = get_asyncpg()
            conn_params = {
                'host': host,
                'port': port,
                'database': database,
                'user': username,
                'password': password
            }

            if connection_params:
                conn_params.update(connection_params)

            conn = await asyncpg.connect(**conn_params)

            query = """
                SELECT
                    c.column_name,
                    c.data_type,
                    c.is_nullable,
                    c.column_default,
                    c.character_maximum_length,
                    pgd.description as column_comment
                FROM information_schema.columns c
                LEFT JOIN pg_catalog.pg_statio_all_tables st
                    ON c.table_schema = st.schemaname
                    AND c.table_name = st.relname
                LEFT JOIN pg_catalog.pg_description pgd
                    ON pgd.objoid = st.relid
                    AND pgd.objsubid = c.ordinal_position
                WHERE c.table_name = $1
                AND c.table_schema = 'public'
                ORDER BY c.ordinal_position
            """

            rows = await conn.fetch(query, table_name)

            columns = []
            for row in rows:
                columns.append({
                    'name': row['column_name'],
                    'type': row['data_type'],
                    'nullable': row['is_nullable'] == 'YES',
                    'primary_key': False,  # 需要额外查询
                    'default': row['column_default'],
                    'max_length': row['character_maximum_length'],
                    'comment': row['column_comment'] or ''
                })

            # 查询主键信息
            pk_query = """
                SELECT column_name
                FROM information_schema.table_constraints tc
                JOIN information_schema.key_column_usage kcu
                    ON tc.constraint_name = kcu.constraint_name
                WHERE tc.table_name = $1
                AND tc.constraint_type = 'PRIMARY KEY'
                AND tc.table_schema = 'public'
            """

            pk_rows = await conn.fetch(pk_query, table_name)
            pk_columns = {row['column_name'] for row in pk_rows}

            # 标记主键列
            for col in columns:
                if col['name'] in pk_columns:
                    col['primary_key'] = True

            await conn.close()
            return columns

        except Exception as e:
            logger.error(f"获取PostgreSQL表结构失败: {e}")
            raise


class DataSyncService:
    """数据同步服务"""

    @staticmethod
    async def create_data_source(db: AsyncSession, data_source: DataSourceCreate, user: str = None) -> DataSource:
        """创建数据源"""
        # 检查名称是否重复
        existing = await db.execute(
            select(DataSource).where(
                and_(
                    DataSource.name == data_source.name,
                    DataSource.is_deleted == False
                )
            )
        )
        if existing.scalar_one_or_none():
            raise ValueError("数据源名称已存在")

        # 加密密码
        encrypted_password = DataSourceEncryption.encrypt_password(data_source.password)

        # 创建数据源
        db_source = DataSource(
            name=data_source.name,
            description=data_source.description,
            db_type=data_source.db_type,
            host=data_source.host,
            port=data_source.port,
            database_name=data_source.database_name,
            username=data_source.username,
            password=encrypted_password,
            connection_params=data_source.connection_params or {},
            created_by=user
        )

        db.add(db_source)
        await db.commit()
        await db.refresh(db_source)

        logger.info(f"创建数据源成功: {db_source.name} (ID: {db_source.id})")
        return db_source

    @staticmethod
    async def get_data_sources(db: AsyncSession, page: int = 1, page_size: int = 20,
                              db_type: DatabaseType = None) -> Tuple[List[DataSource], int]:
        """获取数据源列表"""
        query = select(DataSource).where(DataSource.is_deleted == False)

        if db_type:
            query = query.where(DataSource.db_type == db_type)

        # 计算总数
        count_query = select(func.count()).select_from(query.subquery())
        total = await db.scalar(count_query)

        # 分页查询
        offset = (page - 1) * page_size
        query = query.offset(offset).limit(page_size).order_by(DataSource.created_at.desc())

        result = await db.execute(query)
        data_sources = result.scalars().all()

        return list(data_sources), total

    @staticmethod
    async def get_data_source_by_id(db: AsyncSession, source_id: uuid.UUID) -> Optional[DataSource]:
        """根据ID获取数据源"""
        result = await db.execute(
            select(DataSource).where(
                and_(
                    DataSource.id == source_id,
                    DataSource.is_deleted == False
                )
            )
        )
        return result.scalar_one_or_none()

    @staticmethod
    async def update_data_source(db: AsyncSession, source_id: uuid.UUID,
                                data_source: DataSourceUpdate, user: str = None) -> DataSource:
        """更新数据源"""
        db_source = await DataSyncService.get_data_source_by_id(db, source_id)
        if not db_source:
            raise ValueError("数据源不存在")

        # 检查名称重复
        if data_source.name and data_source.name != db_source.name:
            existing = await db.execute(
                select(DataSource).where(
                    and_(
                        DataSource.name == data_source.name,
                        DataSource.id != source_id,
                        DataSource.is_deleted == False
                    )
                )
            )
            if existing.scalar_one_or_none():
                raise ValueError("数据源名称已存在")

        # 更新字段
        update_data = data_source.dict(exclude_unset=True)

        # 如果更新了密码，需要加密
        if 'password' in update_data:
            update_data['password'] = DataSourceEncryption.encrypt_password(update_data['password'])

        for field, value in update_data.items():
            setattr(db_source, field, value)

        db_source.updated_by = user
        db_source.updated_at = datetime.utcnow()

        await db.commit()
        await db.refresh(db_source)

        logger.info(f"更新数据源成功: {db_source.name} (ID: {db_source.id})")
        return db_source

    @staticmethod
    async def delete_data_source(db: AsyncSession, source_id: uuid.UUID) -> bool:
        """删除数据源（软删除）"""
        db_source = await DataSyncService.get_data_source_by_id(db, source_id)
        if not db_source:
            raise ValueError("数据源不存在")

        # 检查是否有关联的同步任务
        task_count = await db.scalar(
            select(func.count()).select_from(
                select(SyncTask).where(
                    and_(
                        SyncTask.data_source_id == source_id,
                        SyncTask.is_deleted == False
                    )
                ).subquery()
            )
        )

        if task_count > 0:
            raise ValueError(f"该数据源下还有 {task_count} 个同步任务，无法删除")

        db_source.is_deleted = True
        db_source.deleted_at = datetime.utcnow()

        await db.commit()

        logger.info(f"删除数据源成功: {db_source.name} (ID: {db_source.id})")
        return True

    @staticmethod
    async def test_connection(db: AsyncSession, source_id: uuid.UUID) -> TestConnectionResponse:
        """测试数据源连接"""
        db_source = await DataSyncService.get_data_source_by_id(db, source_id)
        if not db_source:
            raise ValueError("数据源不存在")

        # 解密密码
        password = DataSourceEncryption.decrypt_password(db_source.password)

        # 检查是否是需要重置的旧格式密码
        if password == "LEGACY_HASH_NEEDS_RESET":
            logger.warning(f"数据源 {db_source.name} 使用旧格式密码，无法解密")
            return TestConnectionResponse(
                success=False,
                message="此数据源使用了旧格式的密码加密，无法进行连接测试。请编辑数据源并重新设置密码。",
                available_tables=[]
            )

        try:
            if db_source.db_type == DatabaseType.MYSQL:
                success, message, tables = await DatabaseConnector.test_mysql_connection(
                    db_source.host, db_source.port, db_source.database_name,
                    db_source.username, password, db_source.connection_params
                )
            elif db_source.db_type == DatabaseType.POSTGRESQL:
                success, message, tables = await DatabaseConnector.test_postgresql_connection(
                    db_source.host, db_source.port, db_source.database_name,
                    db_source.username, password, db_source.connection_params
                )
            else:
                raise ValueError(f"不支持的数据库类型: {db_source.db_type}")

            # 更新连接测试状态
            db_source.is_connected = success
            db_source.last_test_time = datetime.utcnow()
            db_source.test_error = message if not success else None

            await db.commit()

            return TestConnectionResponse(
                success=success,
                message=message,
                available_tables=tables
            )

        except Exception as e:
            logger.error(f"测试数据源连接失败: {e}")

            # 更新失败状态
            db_source.is_connected = False
            db_source.last_test_time = datetime.utcnow()
            db_source.test_error = str(e)

            await db.commit()

            return TestConnectionResponse(
                success=False,
                message=f"连接测试失败: {str(e)}",
                available_tables=[]
            )

    @staticmethod
    async def get_table_structure(db: AsyncSession, source_id: uuid.UUID,
                                 table_name: str) -> TableStructureResponse:
        """获取表结构"""
        db_source = await DataSyncService.get_data_source_by_id(db, source_id)
        if not db_source:
            raise ValueError("数据源不存在")

        if not db_source.is_connected:
            raise ValueError("数据源未连接，请先测试连接")

        # 解密密码
        password = DataSourceEncryption.decrypt_password(db_source.password)

        try:
            if db_source.db_type == DatabaseType.MYSQL:
                columns = await DatabaseConnector.get_mysql_table_structure(
                    db_source.host, db_source.port, db_source.database_name,
                    db_source.username, password, table_name, db_source.connection_params
                )
            elif db_source.db_type == DatabaseType.POSTGRESQL:
                columns = await DatabaseConnector.get_postgresql_table_structure(
                    db_source.host, db_source.port, db_source.database_name,
                    db_source.username, password, table_name, db_source.connection_params
                )
            else:
                raise ValueError(f"不支持的数据库类型: {db_source.db_type}")

            return TableStructureResponse(
                table_name=table_name,
                columns=columns
            )

        except Exception as e:
            logger.error(f"获取表结构失败: {e}")
            raise ValueError(f"获取表结构失败: {str(e)}")

    @staticmethod
    async def create_sync_task(db: AsyncSession, sync_task: SyncTaskCreate, user: str = None) -> SyncTask:
        """创建同步任务"""
        # 检查数据源是否存在
        data_source = await DataSyncService.get_data_source_by_id(db, sync_task.data_source_id)
        if not data_source:
            raise ValueError("数据源不存在")

        # 检查任务名称是否重复
        existing = await db.execute(
            select(SyncTask).where(
                and_(
                    SyncTask.task_name == sync_task.task_name,
                    SyncTask.is_deleted == False
                )
            )
        )
        if existing.scalar_one_or_none():
            raise ValueError("同步任务名称已存在")

        # 创建同步任务
        db_task = SyncTask(
            task_name=sync_task.task_name,
            description=sync_task.description,
            data_source_id=sync_task.data_source_id,
            source_table=sync_task.source_table,
            sync_mode=sync_task.sync_mode,
            schedule_expression=sync_task.schedule_expression,
            batch_size=sync_task.batch_size,
            sync_filter=sync_task.sync_filter,
            is_active=sync_task.is_active,
            created_by=user
        )

        db.add(db_task)
        await db.flush()  # 获取任务ID

        # 创建字段映射
        for mapping_data in sync_task.field_mappings:
            db_mapping = FieldMapping(
                sync_task_id=db_task.id,
                source_field=mapping_data.source_field,
                target_field=mapping_data.target_field,
                field_type=mapping_data.field_type,
                is_required=mapping_data.is_required,
                default_value=mapping_data.default_value,
                transform_rule=mapping_data.transform_rule
            )
            db.add(db_mapping)

        await db.commit()
        await db.refresh(db_task)

        logger.info(f"创建同步任务成功: {db_task.task_name} (ID: {db_task.id})")
        return db_task

    @staticmethod
    async def get_sync_tasks(db: AsyncSession, page: int = 1, page_size: int = 20,
                           filters: Dict[str, Any] = None) -> Tuple[List[SyncTask], int]:
        """获取同步任务列表"""
        query = select(SyncTask).where(SyncTask.is_deleted == False)

        if filters:
            if filters.get('task_name'):
                query = query.where(SyncTask.task_name.ilike(f"%{filters['task_name']}%"))
            if filters.get('sync_status'):
                query = query.where(SyncTask.sync_status == filters['sync_status'])
            if filters.get('sync_mode'):
                query = query.where(SyncTask.sync_mode == filters['sync_mode'])
            if filters.get('data_source_id'):
                query = query.where(SyncTask.data_source_id == filters['data_source_id'])
            if filters.get('is_active') is not None:
                query = query.where(SyncTask.is_active == filters['is_active'])

        # 计算总数
        count_query = select(func.count()).select_from(query.subquery())
        total = await db.scalar(count_query)

        # 分页查询
        offset = (page - 1) * page_size
        query = query.offset(offset).limit(page_size).order_by(SyncTask.created_at.desc())

        result = await db.execute(query)
        sync_tasks = result.scalars().all()

        return list(sync_tasks), total

    @staticmethod
    async def get_sync_task_by_id(db: AsyncSession, task_id: uuid.UUID) -> Optional[SyncTask]:
        """根据ID获取同步任务"""
        result = await db.execute(
            select(SyncTask)
            .options(selectinload(SyncTask.field_mappings))
            .where(
                and_(
                    SyncTask.id == task_id,
                    SyncTask.is_deleted == False
                )
            )
        )
        return result.scalar_one_or_none()

    @staticmethod
    async def execute_sync_task(db: AsyncSession, task_id: uuid.UUID, user: str = None) -> SyncExecutionResponse:
        """执行同步任务"""
        db_task = await DataSyncService.get_sync_task_by_id(db, task_id)
        if not db_task:
            raise ValueError("同步任务不存在")

        if not db_task.is_active:
            raise ValueError("同步任务已禁用")

        # 检查是否已有正在执行的任务
        if db_task.sync_status == SyncStatus.RUNNING:
            raise ValueError("同步任务正在执行中")

        # 生成执行ID
        execution_id = f"exec_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{uuid.uuid4().hex[:8]}"

        # 更新任务状态
        db_task.sync_status = SyncStatus.RUNNING
        db_task.last_sync_time = datetime.utcnow()

        # 创建同步日志
        sync_log = SyncLog(
            sync_task_id=task_id,
            execution_id=execution_id,
            start_time=datetime.utcnow(),
            status=ExecutionStatus.RUNNING
        )
        db.add(sync_log)

        await db.commit()

        # 异步执行同步任务
        asyncio.create_task(DataSyncService._perform_sync(db_task.id, execution_id))

        logger.info(f"启动同步任务: {db_task.task_name} (执行ID: {execution_id})")

        return SyncExecutionResponse(
            execution_id=execution_id,
            task_name=db_task.task_name,
            status=ExecutionStatus.RUNNING,
            message="同步任务已启动",
            started_at=datetime.utcnow()
        )

    @staticmethod
    async def _perform_sync(task_id: uuid.UUID, execution_id: str):
        """执行同步任务的具体逻辑（异步）"""
        from src.situation.database import get_db_session

        async with get_db_session() as db:
            try:
                # 获取任务信息
                task = await DataSyncService.get_sync_task_by_id(db, task_id)
                if not task:
                    logger.error(f"同步任务不存在: {task_id}")
                    return

                # 获取数据源信息
                data_source = await DataSyncService.get_data_source_by_id(db, task.data_source_id)
                if not data_source:
                    logger.error(f"数据源不存在: {task.data_source_id}")
                    return

                # 获取字段映射
                mappings_result = await db.execute(
                    select(FieldMapping).where(FieldMapping.sync_task_id == task_id)
                )
                mappings = mappings_result.scalars().all()

                # 执行数据同步
                success_count, failed_count = await DataSyncService._sync_data_from_source(
                    data_source, task, mappings
                )

                # 更新任务状态
                task.sync_status = SyncStatus.SUCCESS if failed_count == 0 else SyncStatus.FAILED
                task.synced_records = task.synced_records + success_count
                task.failed_records = task.failed_records + failed_count

                # 更新同步日志
                log_result = await db.execute(
                    select(SyncLog).where(SyncLog.execution_id == execution_id)
                )
                sync_log = log_result.scalar_one_or_none()

                if sync_log:
                    sync_log.end_time = datetime.utcnow()
                    sync_log.duration = int((sync_log.end_time - sync_log.start_time).total_seconds())
                    sync_log.status = ExecutionStatus.SUCCESS if failed_count == 0 else ExecutionStatus.FAILED
                    sync_log.success_records = success_count
                    sync_log.failed_records = failed_count

                await db.commit()

                logger.info(f"同步任务完成: {task.task_name}, 成功: {success_count}, 失败: {failed_count}")

            except Exception as e:
                logger.error(f"同步任务执行失败: {e}")

                # 更新任务状态为失败
                task.sync_status = SyncStatus.FAILED
                task.last_error = str(e)

                # 更新同步日志
                log_result = await db.execute(
                    select(SyncLog).where(SyncLog.execution_id == execution_id)
                )
                sync_log = log_result.scalar_one_or_none()

                if sync_log:
                    sync_log.end_time = datetime.utcnow()
                    sync_log.duration = int((sync_log.end_time - sync_log.start_time).total_seconds())
                    sync_log.status = ExecutionStatus.FAILED
                    sync_log.error_message = str(e)

                await db.commit()

    @staticmethod
    async def _sync_data_from_source(data_source: DataSource, task: SyncTask,
                                   mappings: List[FieldMapping]) -> Tuple[int, int]:
        """从数据源同步数据"""
        # 这里需要实现具体的数据同步逻辑
        # 包括：连接源数据库、查询数据、转换数据、插入到目标表

        success_count = 0
        failed_count = 0

        # TODO: 实现完整的数据同步逻辑
        # 1. 连接源数据库
        # 2. 查询源表数据（考虑分页和过滤条件）
        # 3. 根据字段映射转换数据
        # 4. 批量插入到态势对象表
        # 5. 处理冲突和错误

        logger.info(f"数据同步逻辑待实现 - 任务: {task.task_name}")

        return success_count, failed_count

    @staticmethod
    async def get_sync_statistics(db: AsyncSession) -> SyncStatisticsResponse:
        """获取同步统计信息"""
        today = datetime.utcnow().date()
        today_start = datetime.combine(today, datetime.min.time())

        # 总任务数
        total_tasks = await db.scalar(
            select(func.count()).select_from(
                select(SyncTask).where(SyncTask.is_deleted == False).subquery()
            )
        )

        # 活跃任务数
        active_tasks = await db.scalar(
            select(func.count()).select_from(
                select(SyncTask).where(
                    and_(
                        SyncTask.is_deleted == False,
                        SyncTask.is_active == True
                    )
                ).subquery()
            )
        )

        # 今日成功同步数
        successful_syncs_today = await db.scalar(
            select(func.count()).select_from(
                select(SyncLog).where(
                    and_(
                        SyncLog.start_time >= today_start,
                        SyncLog.status == ExecutionStatus.SUCCESS
                    )
                ).subquery()
            )
        )

        # 今日失败同步数
        failed_syncs_today = await db.scalar(
            select(func.count()).select_from(
                select(SyncLog).where(
                    and_(
                        SyncLog.start_time >= today_start,
                        SyncLog.status == ExecutionStatus.FAILED
                    )
                ).subquery()
            )
        )

        # 最后同步时间
        last_sync_result = await db.execute(
            select(SyncLog.start_time)
            .where(SyncLog.status == ExecutionStatus.SUCCESS)
            .order_by(SyncLog.start_time.desc())
            .limit(1)
        )
        last_sync_time = last_sync_result.scalar()

        # 下次计划同步时间
        next_sync_result = await db.execute(
            select(SyncTask.next_sync_time)
            .where(
                and_(
                    SyncTask.is_active == True,
                    SyncTask.sync_mode == SyncMode.SCHEDULED,
                    SyncTask.next_sync_time > datetime.utcnow()
                )
            )
            .order_by(SyncTask.next_sync_time)
            .limit(1)
        )
        next_scheduled_sync = next_sync_result.scalar()

        return SyncStatisticsResponse(
            total_tasks=total_tasks or 0,
            active_tasks=active_tasks or 0,
            successful_syncs_today=successful_syncs_today or 0,
            failed_syncs_today=failed_syncs_today or 0,
            last_sync_time=last_sync_time,
            next_scheduled_sync=next_scheduled_sync
        )