#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Redis监控系统API模块
提供Redis任务监控和队列监控的完整CRUD API
包含两个监控系统：
1. Redis任务监控系统（redis_task_monitor）
2. Redis队列监控系统（redis_queue_monitor）
"""

import json
import logging
from datetime import datetime, timedelta
from flask import Blueprint, request, jsonify, current_app
from app.utils.mysql_db import get_db_connection, get_db_cursor
from app.utils.logger import log_api_call, FlinkLogger

# 创建Blueprint
# 注意：路由前缀在注册时设置，这里不设置，以便支持多个前缀
bp = Blueprint('redis_task_monitor', __name__)
logger = logging.getLogger(__name__)


# ============================================================================
# 工具函数
# ============================================================================

def format_datetime_field(value):
    """格式化datetime字段为字符串"""
    if value and hasattr(value, 'strftime'):
        return value.strftime('%Y-%m-%d %H:%M:%S')
    return value


def format_row_datetime_fields(row):
    """格式化行数据中的所有datetime字段"""
    datetime_fields = ['created_at', 'updated_at', 'last_scan_time', 'last_alert_time',
                      'latest_data_timestamp', 'batch_time', 'collection_time',
                      'expire_time', 'last_seen_time', 'heartbeat_time', 'last_check_time']
    for field in datetime_fields:
        if field in row:
            row[field] = format_datetime_field(row[field])
    return row


def format_decimal_field(value):
    """格式化decimal字段"""
    if value is not None:
        return float(value)
    return value


# 环境配置相关API已按需求移除


# ============================================================================
# 任务监控配置表 API (redis_task_config) - 任务监控系统
# ============================================================================

@bp.route('/api/redis-task-monitor/configs', methods=['GET'])
@log_api_call("redis_task_monitor.list_task_configs")
def list_task_configs():
    """获取任务配置列表（支持分页、排序、筛选、返回统计数据）"""
    try:
        env_id = request.args.get('env_id', type=int)
        page = request.args.get('page', 1, type=int)
        page_size = request.args.get('pageSize', 20, type=int)
        category = request.args.get('category', '').strip()
        key_prefix = request.args.get('key_prefix', '').strip()
        status = request.args.get('status', '').strip()
        is_monitored = request.args.get('is_monitored', '').strip()
        is_alert_enabled = request.args.get('is_alert_enabled', '').strip()
        alert_status = request.args.get('alert_status', '').strip()
        search = request.args.get('search', '').strip()
        sort = request.args.get('sort', 'last_alert_time').strip()
        order = request.args.get('order', 'desc').strip()
        
        # 验证排序字段
        allowed_sort_fields = [
            'id', 'env_id', 'category', 'key_prefix', 'is_monitored', 
            'is_alert_enabled', 'status', 'alert_status', 'last_scan_time',
            'latest_key_count', 'last_alert_time', 'created_at', 'updated_at'
        ]
        if sort not in allowed_sort_fields:
            sort = 'last_alert_time'
        
        # 验证排序方向
        order = order.lower()
        if order not in ['asc', 'desc']:
            order = 'desc'
        
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                # 构建WHERE条件
                where_conditions = []
                params = []
                
                if env_id:
                    where_conditions.append("env_id = %s")
                    params.append(env_id)
                
                if category:
                    where_conditions.append("category = %s")
                    params.append(category)
                
                if key_prefix:
                    where_conditions.append("key_prefix = %s")
                    params.append(key_prefix)
                
                if status:
                    where_conditions.append("status = %s")
                    params.append(status)
                
                if is_monitored != '' and is_monitored is not None:
                    where_conditions.append("is_monitored = %s")
                    params.append(int(is_monitored))
                
                if is_alert_enabled != '' and is_alert_enabled is not None:
                    where_conditions.append("is_alert_enabled = %s")
                    params.append(int(is_alert_enabled))
                
                if alert_status:
                    where_conditions.append("alert_status = %s")
                    params.append(alert_status)
                
                if search:
                    where_conditions.append("(key_prefix LIKE %s OR category LIKE %s OR description LIKE %s)")
                    params.extend([f'%{search}%', f'%{search}%', f'%{search}%'])
                
                where_clause = " AND ".join(where_conditions) if where_conditions else "1=1"
                
                # 查询总数
                count_sql = f"SELECT COUNT(*) as total FROM redis_task_config WHERE {where_clause}"
                cur.execute(count_sql, params)
                total = cur.fetchone()['total']
                
                # 获取统计数据（基于整个数据集，不考虑分页）
                stats_where = " AND ".join(where_conditions) if where_conditions else "1=1"
                stats_sql = f"""
                    SELECT 
                        COUNT(*) as total_count,
                        SUM(CASE WHEN is_monitored = 1 THEN 1 ELSE 0 END) as monitored_count,
                        SUM(CASE WHEN is_alert_enabled = 1 THEN 1 ELSE 0 END) as alert_enabled_count,
                        SUM(CASE WHEN alert_status = 'alerting' THEN 1 ELSE 0 END) as alerting_count,
                        SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active_count,
                        SUM(CASE WHEN status = 'inactive' THEN 1 ELSE 0 END) as inactive_count,
                        SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused_count
                    FROM redis_task_config
                    WHERE {stats_where}
                """
                cur.execute(stats_sql, params)
                stats = cur.fetchone()
                
                # 处理NULL值
                for key in stats:
                    if stats[key] is None:
                        stats[key] = 0
                
                # 排序和分页
                offset = (page - 1) * page_size
                
                # 构建排序子句（对于时间戳字段，将 NULL 值排在最后）
                nullable_timestamp_fields = ['last_scan_time', 'last_alert_time', 'latest_data_timestamp']
                if sort in nullable_timestamp_fields:
                    order_clause = f"({sort} IS NULL), {sort} {order.upper()}"
                else:
                    order_clause = f"{sort} {order.upper()}"
                
                # 查询数据
                sql = f"""
                    SELECT 
                        id, env_id, category, key_prefix, description,
                        is_monitored, is_alert_enabled, scan_count, sample_size,
                        task_type_filter, task_type_mode,
                        webhook_url, alert_on_early_delete, alert_on_expire, status,
                        alert_status, last_alert_time,
                        last_scan_time, latest_key_count, latest_data_timestamp, average_ttl,
                        created_at, updated_at
                    FROM redis_task_config
                    WHERE {where_clause}
                    ORDER BY {order_clause}
                    LIMIT %s OFFSET %s
                """
                cur.execute(sql, params + [page_size, offset])
                rows = cur.fetchall()
                
                # 格式化时间字段
                for row in rows:
                    format_row_datetime_fields(row)
                
                return jsonify({
                    "success": True,
                    "message": "ok",
                    "data": {
                        "items": rows,
                        "total": total,
                        "stats": stats,
                        "page": page,
                        "pageSize": page_size
                    }
                })
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"获取任务配置列表失败: {e}")
        logger.exception(e)
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-task-monitor/configs/categories', methods=['GET'])
@log_api_call("redis_task_monitor.get_task_config_categories")
def get_task_config_categories():
    """获取所有分类列表"""
    try:
        env_id = request.args.get('env_id', type=int)
        
        with get_db_cursor() as cur:
            if env_id:
                cur.execute("""
                    SELECT DISTINCT category, COUNT(*) as count
                    FROM redis_task_config
                    WHERE env_id = %s
                    GROUP BY category
                    ORDER BY category
                """, (env_id,))
            else:
                cur.execute("""
                    SELECT DISTINCT category, COUNT(*) as count
                    FROM redis_task_config
                    GROUP BY category
                    ORDER BY category
                """)
            categories = cur.fetchall()
            return jsonify({
                "success": True,
                "data": categories
            })
    except Exception as e:
        logger.error(f"获取分类列表失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-task-monitor/configs/key-prefixes', methods=['GET'])
@log_api_call("redis_task_monitor.get_task_config_key_prefixes")
def get_task_config_key_prefixes():
    """获取所有key前缀列表"""
    try:
        env_id = request.args.get('env_id', type=int)
        
        with get_db_cursor() as cur:
            if env_id:
                cur.execute("""
                    SELECT DISTINCT key_prefix
                    FROM redis_task_config
                    WHERE env_id = %s
                    ORDER BY key_prefix
                """, (env_id,))
            else:
                cur.execute("""
                    SELECT DISTINCT key_prefix
                    FROM redis_task_config
                    ORDER BY key_prefix
                """)
            prefixes = cur.fetchall()
            # 提取key_prefix字段，返回字符串数组
            prefix_list = [p['key_prefix'] for p in prefixes]
            return jsonify({
                "success": True,
                "data": prefix_list
            })
    except Exception as e:
        logger.error(f"获取key前缀列表失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-task-monitor/configs/<int:config_id>', methods=['GET'])
@log_api_call("redis_task_monitor.get_task_config")
def get_task_config(config_id):
    """获取任务配置详情"""
    try:
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                cur.execute("""
                    SELECT 
                        id, env_id, category, key_prefix, description,
                        is_monitored, is_alert_enabled, scan_count, sample_size,
                        task_type_filter, task_type_mode,
                        webhook_url, alert_on_early_delete, alert_on_expire, status,
                        alert_status, last_alert_time,
                        last_scan_time, latest_key_count, latest_data_timestamp, average_ttl,
                        created_at, updated_at
                    FROM redis_task_config
                    WHERE id = %s
                """, (config_id,))
                row = cur.fetchone()
                
                if not row:
                    return jsonify({
                        "success": False,
                        "message": "任务配置不存在",
                        "data": None
                    }), 404
                
                format_row_datetime_fields(row)
                
                return jsonify({
                    "success": True,
                    "message": "ok",
                    "data": row
                })
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"获取任务配置详情失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-task-monitor/configs', methods=['POST'])
@log_api_call("redis_task_monitor.create_task_config")
def create_task_config():
    """创建任务配置"""
    try:
        data = request.get_json()
        current_app.logger.info(f"创建任务配置: {data.get('key_prefix')}")
        
        # 必填字段验证
        required_fields = ['env_id', 'category', 'key_prefix']
        for field in required_fields:
            if not data.get(field):
                return jsonify({
                    "success": False,
                    "message": f"缺少必填字段: {field}",
                    "data": None
                }), 400
        
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                # 检查env_id是否存在
                cur.execute("SELECT id FROM redis_env_config WHERE id = %s", (data['env_id'],))
                if not cur.fetchone():
                    return jsonify({
                        "success": False,
                        "message": f"环境ID {data['env_id']} 不存在",
                        "data": None
                    }), 400
                
                # 检查(env_id, key_prefix)组合是否已存在
                cur.execute("""
                    SELECT id FROM redis_task_config 
                    WHERE env_id = %s AND key_prefix = %s
                """, (data['env_id'], data['key_prefix']))
                if cur.fetchone():
                    return jsonify({
                        "success": False,
                        "message": f"环境 {data['env_id']} 下的key前缀 {data['key_prefix']} 已存在",
                        "data": None
                    }), 400
                
                # 插入新配置
                cur.execute("""
                    INSERT INTO redis_task_config (
                        env_id, category, key_prefix, description,
                        is_monitored, is_alert_enabled, scan_count, sample_size,
                        task_type_filter, task_type_mode,
                        webhook_url, alert_on_early_delete, alert_on_expire, status
                    ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
                """, (
                    data['env_id'],
                    data['category'],
                    data['key_prefix'],
                    data.get('description'),
                    data.get('is_monitored', 1),
                    data.get('is_alert_enabled', 0),
                    data.get('scan_count', 500),
                    data.get('sample_size', 1000),
                    data.get('task_type_filter'),
                    data.get('task_type_mode', 'all'),
                    data.get('webhook_url'),
                    data.get('alert_on_early_delete', 1),
                    data.get('alert_on_expire', 0),
                    data.get('status', 'active')
                ))
                conn.commit()
                
                new_id = cur.lastrowid
                current_app.logger.info(f"任务配置创建成功 | id: {new_id}")
                
                return jsonify({
                    "success": True,
                    "message": "任务配置创建成功",
                    "data": {"id": new_id}
                })
        except Exception as e:
            conn.rollback()
            current_app.logger.error(f"创建任务配置失败: {str(e)}")
            return jsonify({
                "success": False,
                "message": f"创建任务配置失败: {str(e)}",
                "data": None
            }), 500
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"创建任务配置失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-task-monitor/configs/<int:config_id>', methods=['PUT'])
@log_api_call("redis_task_monitor.update_task_config")
def update_task_config(config_id):
    """更新任务配置"""
    try:
        data = request.get_json()
        current_app.logger.info(f"更新任务配置 | id: {config_id}")
        
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                # 检查配置是否存在
                cur.execute("SELECT id FROM redis_task_config WHERE id = %s", (config_id,))
                if not cur.fetchone():
                    return jsonify({
                        "success": False,
                        "message": "任务配置不存在",
                        "data": None
                    }), 404
                
                # 如果更新env_id或key_prefix，检查(env_id, key_prefix)唯一性（排除当前记录）
                if 'env_id' in data or 'key_prefix' in data:
                    # 获取当前配置的env_id和key_prefix
                    cur.execute("SELECT env_id, key_prefix FROM redis_task_config WHERE id = %s", (config_id,))
                    current_config = cur.fetchone()
                    if current_config:
                        check_env_id = data.get('env_id', current_config['env_id'])
                        check_key_prefix = data.get('key_prefix', current_config['key_prefix'])
                        
                        cur.execute("""
                            SELECT id FROM redis_task_config 
                            WHERE env_id = %s AND key_prefix = %s AND id != %s
                        """, (check_env_id, check_key_prefix, config_id))
                        if cur.fetchone():
                            return jsonify({
                                "success": False,
                                "message": f"环境 {check_env_id} 下的key前缀 {check_key_prefix} 已存在",
                                "data": None
                            }), 400
                        
                        # 如果更新了env_id，检查env_id是否存在
                        if 'env_id' in data:
                            cur.execute("SELECT id FROM redis_env_config WHERE id = %s", (data['env_id'],))
                            if not cur.fetchone():
                                return jsonify({
                                    "success": False,
                                    "message": f"环境ID {data['env_id']} 不存在",
                                    "data": None
                                }), 400
                
                # 构建更新语句
                update_fields = []
                params = []
                
                allowed_fields = {
                    'env_id', 'category', 'key_prefix', 'description', 'is_monitored', 'is_alert_enabled',
                    'scan_count', 'sample_size', 'task_type_filter', 'task_type_mode',
                    'webhook_url', 'alert_on_early_delete', 'alert_on_expire', 'status'
                }
                
                for field in allowed_fields:
                    if field in data:
                        update_fields.append(f"{field} = %s")
                        params.append(data[field])
                
                if not update_fields:
                    return jsonify({
                        "success": False,
                        "message": "没有需要更新的字段",
                        "data": None
                    }), 400
                
                params.append(config_id)
                
                cur.execute(
                    f"UPDATE redis_task_config SET {', '.join(update_fields)} WHERE id = %s",
                    params
                )
                conn.commit()
                
                current_app.logger.info(f"任务配置更新成功 | id: {config_id}")
                
                return jsonify({
                    "success": True,
                    "message": "任务配置更新成功",
                    "data": {"id": config_id}
                })
        except Exception as e:
            conn.rollback()
            current_app.logger.error(f"更新任务配置失败: {str(e)}")
            return jsonify({
                "success": False,
                "message": f"更新任务配置失败: {str(e)}",
                "data": None
            }), 500
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"更新任务配置失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-task-monitor/configs/<int:config_id>', methods=['DELETE'])
@log_api_call("redis_task_monitor.delete_task_config")
def delete_task_config(config_id):
    """删除任务配置"""
    try:
        current_app.logger.info(f"删除任务配置 | id: {config_id}")
        
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                # 检查配置是否存在
                cur.execute("SELECT key_prefix FROM redis_task_config WHERE id = %s", (config_id,))
                row = cur.fetchone()
                if not row:
                    return jsonify({
                        "success": False,
                        "message": "任务配置不存在",
                        "data": None
                    }), 404
                
                key_prefix = row['key_prefix']
                
                # 删除配置（级联删除会同时删除关联的任务数据）
                cur.execute("DELETE FROM redis_task_config WHERE id = %s", (config_id,))
                conn.commit()
                
                current_app.logger.info(f"任务配置删除成功 | id: {config_id} | key_prefix: {key_prefix}")
                
                return jsonify({
                    "success": True,
                    "message": "任务配置删除成功",
                    "data": None
                })
        except Exception as e:
            conn.rollback()
            current_app.logger.error(f"删除任务配置失败: {str(e)}")
            return jsonify({
                "success": False,
                "message": f"删除任务配置失败: {str(e)}",
                "data": None
            }), 500
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"删除任务配置失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


# ============================================================================
# 任务数据表 API (redis_task_data) - 任务监控系统
# ============================================================================

@bp.route('/api/redis-task-monitor/task-data', methods=['GET'])
@log_api_call("redis_task_monitor.list_task_data")
def list_task_data():
    """获取任务数据列表（支持分页、排序、筛选）"""
    try:
        config_id = request.args.get('config_id', type=int)
        page = request.args.get('page', 1, type=int)
        page_size = request.args.get('pageSize', 20, type=int)
        key_prefix = request.args.get('key_prefix', '').strip()
        task_id = request.args.get('task_id', '').strip()
        task_type = request.args.get('task_type', '').strip()
        user_id = request.args.get('user_id', '').strip()
        deletion_status = request.args.get('deletion_status', '').strip()
        search = request.args.get('search', '').strip()
        sort = request.args.get('sort', 'collection_time').strip()
        order = request.args.get('order', 'desc').strip()
        
        # 验证排序字段
        allowed_sort_fields = [
            'id', 'config_id', 'redis_key', 'key_prefix', 'task_id', 'task_type',
            'user_id', 'ttl', 'expire_time', 'data_size', 'data_type',
            'deletion_status', 'last_seen_time', 'collection_time', 'created_at', 'updated_at'
        ]
        if sort not in allowed_sort_fields:
            sort = 'collection_time'
        
        # 验证排序方向
        order = order.lower()
        if order not in ['asc', 'desc']:
            order = 'desc'
        
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                # 构建WHERE条件
                where_conditions = []
                params = []
                
                if config_id:
                    where_conditions.append("config_id = %s")
                    params.append(config_id)
                
                if key_prefix:
                    where_conditions.append("key_prefix = %s")
                    params.append(key_prefix)
                
                if task_id:
                    where_conditions.append("task_id = %s")
                    params.append(task_id)
                
                if task_type:
                    where_conditions.append("task_type = %s")
                    params.append(task_type)
                
                if user_id:
                    where_conditions.append("user_id = %s")
                    params.append(user_id)
                
                if deletion_status:
                    where_conditions.append("deletion_status = %s")
                    params.append(deletion_status)
                
                if search:
                    where_conditions.append("(redis_key LIKE %s OR task_id LIKE %s OR user_id LIKE %s)")
                    params.extend([f'%{search}%', f'%{search}%', f'%{search}%'])
                
                where_clause = " AND ".join(where_conditions) if where_conditions else "1=1"
                
                # 查询总数
                count_sql = f"SELECT COUNT(*) as total FROM redis_task_data WHERE {where_clause}"
                cur.execute(count_sql, params)
                total = cur.fetchone()['total']
                
                # 排序和分页
                offset = (page - 1) * page_size
                
                # 构建排序子句（对于时间戳字段，将 NULL 值排在最后）
                nullable_timestamp_fields = ['expire_time', 'last_seen_time', 'collection_time']
                if sort in nullable_timestamp_fields:
                    order_clause = f"({sort} IS NULL), {sort} {order.upper()}"
                else:
                    order_clause = f"{sort} {order.upper()}"
                
                # 查询数据
                sql = f"""
                    SELECT 
                        id, config_id, redis_key, key_prefix,
                        task_id, task_type, user_id,
                        task_data, ttl, expire_time, data_size, data_type,
                        deletion_status, last_seen_time,
                        collection_time, created_at, updated_at
                    FROM redis_task_data
                    WHERE {where_clause}
                    ORDER BY {order_clause}
                    LIMIT %s OFFSET %s
                """
                cur.execute(sql, params + [page_size, offset])
                rows = cur.fetchall()
                
                # 格式化时间字段和JSON字段
                for row in rows:
                    format_row_datetime_fields(row)
                    # 解析JSON字段
                    if row.get('task_data'):
                        try:
                            if isinstance(row['task_data'], str):
                                row['task_data'] = json.loads(row['task_data'])
                        except:
                            pass
                
                return jsonify({
                    "success": True,
                    "message": "ok",
                    "data": {
                        "items": rows,
                        "total": total,
                        "page": page,
                        "pageSize": page_size
                    }
                })
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"获取任务数据列表失败: {e}")
        logger.exception(e)
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-task-monitor/task-data/<int:data_id>', methods=['GET'])
@log_api_call("redis_task_monitor.get_task_data")
def get_task_data(data_id):
    """获取任务数据详情"""
    try:
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                cur.execute("""
                    SELECT 
                        id, config_id, redis_key, key_prefix,
                        task_id, task_type, user_id,
                        task_data, ttl, expire_time, data_size, data_type,
                        deletion_status, last_seen_time,
                        collection_time, created_at, updated_at
                    FROM redis_task_data
                    WHERE id = %s
                """, (data_id,))
                row = cur.fetchone()
                
                if not row:
                    return jsonify({
                        "success": False,
                        "message": "任务数据不存在",
                        "data": None
                    }), 404
                
                format_row_datetime_fields(row)
                
                # 解析JSON字段
                if row.get('task_data'):
                    try:
                        if isinstance(row['task_data'], str):
                            row['task_data'] = json.loads(row['task_data'])
                    except:
                        pass
                
                return jsonify({
                    "success": True,
                    "message": "ok",
                    "data": row
                })
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"获取任务数据详情失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-task-monitor/task-data/stats', methods=['GET'])
@log_api_call("redis_task_monitor.get_task_data_stats")
def get_task_data_stats():
    """获取任务数据统计"""
    try:
        config_id = request.args.get('config_id', type=int)
        key_prefix = request.args.get('key_prefix', '').strip()
        
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                # 构建WHERE条件
                where_conditions = []
                params = []
                
                if config_id:
                    where_conditions.append("config_id = %s")
                    params.append(config_id)
                
                if key_prefix:
                    where_conditions.append("key_prefix = %s")
                    params.append(key_prefix)
                
                where_clause = " AND ".join(where_conditions) if where_conditions else "1=1"
                
                # 查询统计信息
                stats_sql = f"""
                    SELECT 
                        COUNT(*) as total_count,
                        SUM(CASE WHEN deletion_status = 'active' THEN 1 ELSE 0 END) as active_count,
                        SUM(CASE WHEN deletion_status = 'normal_expire' THEN 1 ELSE 0 END) as normal_expire_count,
                        SUM(CASE WHEN deletion_status = 'early_deleted' THEN 1 ELSE 0 END) as early_deleted_count,
                        AVG(ttl) as avg_ttl,
                        SUM(data_size) as total_data_size
                    FROM redis_task_data
                    WHERE {where_clause}
                """
                cur.execute(stats_sql, params)
                stats = cur.fetchone()
                
                # 处理NULL值
                for key in stats:
                    if stats[key] is None:
                        stats[key] = 0
                
                return jsonify({
                    "success": True,
                    "message": "ok",
                    "data": stats
                })
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"获取任务数据统计失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


# ============================================================================
# 队列监控配置表 API (redis_queue_config) - 队列监控系统
# ============================================================================

@bp.route('/api/redis-queue-monitor/configs', methods=['GET'])
@log_api_call("redis_queue_monitor.list_queue_configs")
def list_queue_configs():
    """获取队列配置列表（支持分页、排序、筛选、返回统计数据）"""
    try:
        env_id = request.args.get('env_id', type=int)
        page = request.args.get('page', 1, type=int)
        page_size = request.args.get('pageSize', 20, type=int)
        category = request.args.get('category', '').strip()
        queue_key = request.args.get('queue_key', '').strip()
        queue_type = request.args.get('queue_type', '').strip()
        status = request.args.get('status', '').strip()
        is_monitored = request.args.get('is_monitored', '').strip()
        is_alert_enabled = request.args.get('is_alert_enabled', '').strip()
        alert_status = request.args.get('alert_status', '').strip()
        search = request.args.get('search', '').strip()
        sort = request.args.get('sort', 'last_scan_time').strip()
        order = request.args.get('order', 'desc').strip()
        
        # 验证排序字段
        allowed_sort_fields = [
            'id', 'env_id', 'queue_key', 'queue_type', 'category',
            'is_monitored', 'is_alert_enabled', 'status', 'alert_status',
            'last_scan_time', 'latest_queue_length', 'last_alert_time',
            'created_at', 'updated_at'
        ]
        if sort not in allowed_sort_fields:
            sort = 'last_scan_time'
        
        # 验证排序方向
        order = order.lower()
        if order not in ['asc', 'desc']:
            order = 'desc'
        
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                # 构建WHERE条件
                where_conditions = []
                params = []
                
                if env_id:
                    where_conditions.append("env_id = %s")
                    params.append(env_id)
                
                if category:
                    where_conditions.append("category = %s")
                    params.append(category)
                
                if queue_key:
                    where_conditions.append("queue_key = %s")
                    params.append(queue_key)
                
                if queue_type:
                    where_conditions.append("queue_type = %s")
                    params.append(queue_type)
                
                if status:
                    where_conditions.append("status = %s")
                    params.append(status)
                
                if is_monitored != '' and is_monitored is not None:
                    where_conditions.append("is_monitored = %s")
                    params.append(int(is_monitored))
                
                if is_alert_enabled != '' and is_alert_enabled is not None:
                    where_conditions.append("is_alert_enabled = %s")
                    params.append(int(is_alert_enabled))
                
                if alert_status:
                    where_conditions.append("alert_status = %s")
                    params.append(alert_status)
                
                if search:
                    where_conditions.append("(queue_key LIKE %s OR category LIKE %s OR description LIKE %s)")
                    params.extend([f'%{search}%', f'%{search}%', f'%{search}%'])
                
                where_clause = " AND ".join(where_conditions) if where_conditions else "1=1"
                
                # 查询总数
                count_sql = f"SELECT COUNT(*) as total FROM redis_queue_config WHERE {where_clause}"
                cur.execute(count_sql, params)
                total = cur.fetchone()['total']
                
                # 获取统计数据（基于整个数据集，不考虑分页）
                stats_where = " AND ".join(where_conditions) if where_conditions else "1=1"
                stats_sql = f"""
                    SELECT 
                        COUNT(*) as total_count,
                        SUM(CASE WHEN is_monitored = 1 THEN 1 ELSE 0 END) as monitored_count,
                        SUM(CASE WHEN is_alert_enabled = 1 THEN 1 ELSE 0 END) as alert_enabled_count,
                        SUM(CASE WHEN alert_status = 'alerting' THEN 1 ELSE 0 END) as alerting_count,
                        SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active_count,
                        SUM(CASE WHEN status = 'inactive' THEN 1 ELSE 0 END) as inactive_count,
                        SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused_count,
                        SUM(latest_queue_length) as total_queue_length
                    FROM redis_queue_config
                    WHERE {stats_where}
                """
                cur.execute(stats_sql, params)
                stats = cur.fetchone()
                
                # 处理NULL值
                for key in stats:
                    if stats[key] is None:
                        stats[key] = 0
                    elif isinstance(stats[key], float):
                        stats[key] = format_decimal_field(stats[key])
                
                # 排序和分页
                offset = (page - 1) * page_size
                
                # 构建排序子句（对于时间戳字段，将 NULL 值排在最后）
                nullable_timestamp_fields = ['last_scan_time', 'last_alert_time']
                if sort in nullable_timestamp_fields:
                    order_clause = f"({sort} IS NULL), {sort} {order.upper()}"
                else:
                    order_clause = f"{sort} {order.upper()}"
                
                # 查询数据
                sql = f"""
                    SELECT 
                        id, env_id, queue_key, queue_type, category, description,
                        is_monitored, is_alert_enabled,
                        webhook_url, alert_on_length_high, alert_on_growth_surge,
                        alert_on_growth_drop, alert_on_empty, alert_on_failed_rate_high,
                        length_threshold, growth_rate_threshold, drop_rate_threshold,
                        failed_rate_threshold, heartbeat_timeout,
                        alert_status, last_alert_time,
                        last_scan_time, latest_queue_length, status,
                        created_at, updated_at
                    FROM redis_queue_config
                    WHERE {where_clause}
                    ORDER BY {order_clause}
                    LIMIT %s OFFSET %s
                """
                cur.execute(sql, params + [page_size, offset])
                rows = cur.fetchall()
                
                # 格式化时间字段和decimal字段
                for row in rows:
                    format_row_datetime_fields(row)
                    # 格式化decimal字段
                    for field in ['growth_rate_threshold', 'drop_rate_threshold', 'failed_rate_threshold']:
                        if field in row:
                            row[field] = format_decimal_field(row[field])
                
                return jsonify({
                    "success": True,
                    "message": "ok",
                    "data": {
                        "items": rows,
                        "total": total,
                        "stats": stats,
                        "page": page,
                        "pageSize": page_size
                    }
                })
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"获取队列配置列表失败: {e}")
        logger.exception(e)
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-queue-monitor/configs/categories', methods=['GET'])
@log_api_call("redis_queue_monitor.get_queue_config_categories")
def get_queue_config_categories():
    """获取所有分类列表"""
    try:
        env_id = request.args.get('env_id', type=int)
        
        with get_db_cursor() as cur:
            if env_id:
                cur.execute("""
                    SELECT DISTINCT category, COUNT(*) as count
                    FROM redis_queue_config
                    WHERE env_id = %s
                    GROUP BY category
                    ORDER BY category
                """, (env_id,))
            else:
                cur.execute("""
                    SELECT DISTINCT category, COUNT(*) as count
                    FROM redis_queue_config
                    GROUP BY category
                    ORDER BY category
                """)
            categories = cur.fetchall()
            return jsonify({
                "success": True,
                "data": categories
            })
    except Exception as e:
        logger.error(f"获取分类列表失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-queue-monitor/configs/queue-keys', methods=['GET'])
@log_api_call("redis_queue_monitor.get_queue_config_queue_keys")
def get_queue_config_queue_keys():
    """获取所有队列key列表"""
    try:
        env_id = request.args.get('env_id', type=int)
        
        with get_db_cursor() as cur:
            if env_id:
                cur.execute("""
                    SELECT DISTINCT queue_key
                    FROM redis_queue_config
                    WHERE env_id = %s
                    ORDER BY queue_key
                """, (env_id,))
            else:
                cur.execute("""
                    SELECT DISTINCT queue_key
                    FROM redis_queue_config
                    ORDER BY queue_key
                """)
            queue_keys = cur.fetchall()
            # 提取queue_key字段，返回字符串数组
            queue_key_list = [q['queue_key'] for q in queue_keys]
            return jsonify({
                "success": True,
                "data": queue_key_list
            })
    except Exception as e:
        logger.error(f"获取队列key列表失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-queue-monitor/configs/<int:config_id>', methods=['GET'])
@log_api_call("redis_queue_monitor.get_queue_config")
def get_queue_config(config_id):
    """获取队列配置详情"""
    try:
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                cur.execute("""
                    SELECT 
                        id, env_id, queue_key, queue_type, category, description,
                        is_monitored, is_alert_enabled,
                        webhook_url, alert_on_length_high, alert_on_growth_surge,
                        alert_on_growth_drop, alert_on_empty, alert_on_failed_rate_high,
                        length_threshold, growth_rate_threshold, drop_rate_threshold,
                        failed_rate_threshold, heartbeat_timeout,
                        alert_status, last_alert_time,
                        last_scan_time, latest_queue_length, status,
                        created_at, updated_at
                    FROM redis_queue_config
                    WHERE id = %s
                """, (config_id,))
                row = cur.fetchone()
                
                if not row:
                    return jsonify({
                        "success": False,
                        "message": "队列配置不存在",
                        "data": None
                    }), 404
                
                format_row_datetime_fields(row)
                # 格式化decimal字段
                for field in ['growth_rate_threshold', 'drop_rate_threshold', 'failed_rate_threshold']:
                    if field in row:
                        row[field] = format_decimal_field(row[field])
                
                return jsonify({
                    "success": True,
                    "message": "ok",
                    "data": row
                })
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"获取队列配置详情失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-queue-monitor/configs', methods=['POST'])
@log_api_call("redis_queue_monitor.create_queue_config")
def create_queue_config():
    """创建队列配置"""
    try:
        data = request.get_json()
        current_app.logger.info(f"创建队列配置: {data.get('queue_key')}")
        
        # 必填字段验证
        required_fields = ['env_id', 'queue_key', 'queue_type', 'category']
        for field in required_fields:
            if not data.get(field):
                return jsonify({
                    "success": False,
                    "message": f"缺少必填字段: {field}",
                    "data": None
                }), 400
        
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                # 检查env_id是否存在
                cur.execute("SELECT id FROM redis_env_config WHERE id = %s", (data['env_id'],))
                if not cur.fetchone():
                    return jsonify({
                        "success": False,
                        "message": f"环境ID {data['env_id']} 不存在",
                        "data": None
                    }), 400
                
                # 检查(env_id, queue_key)组合是否已存在
                cur.execute("""
                    SELECT id FROM redis_queue_config 
                    WHERE env_id = %s AND queue_key = %s
                """, (data['env_id'], data['queue_key']))
                if cur.fetchone():
                    return jsonify({
                        "success": False,
                        "message": f"环境 {data['env_id']} 下的队列key {data['queue_key']} 已存在",
                        "data": None
                    }), 400
                
                # 插入新配置
                cur.execute("""
                    INSERT INTO redis_queue_config (
                        env_id, queue_key, queue_type, category, description,
                        is_monitored, is_alert_enabled,
                        webhook_url, alert_on_length_high, alert_on_growth_surge,
                        alert_on_growth_drop, alert_on_empty, alert_on_failed_rate_high,
                        length_threshold, growth_rate_threshold, drop_rate_threshold,
                        failed_rate_threshold, heartbeat_timeout, status
                    ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
                """, (
                    data['env_id'],
                    data['queue_key'],
                    data['queue_type'],
                    data['category'],
                    data.get('description'),
                    data.get('is_monitored', 1),
                    data.get('is_alert_enabled', 0),
                    data.get('webhook_url'),
                    data.get('alert_on_length_high', 1),
                    data.get('alert_on_growth_surge', 1),
                    data.get('alert_on_growth_drop', 1),
                    data.get('alert_on_empty', 0),
                    data.get('alert_on_failed_rate_high', 1),
                    data.get('length_threshold', 1000000),
                    data.get('growth_rate_threshold', 50.00),
                    data.get('drop_rate_threshold', 50.00),
                    data.get('failed_rate_threshold', 10.00),
                    data.get('heartbeat_timeout', 600),
                    data.get('status', 'active')
                ))
                conn.commit()
                
                new_id = cur.lastrowid
                current_app.logger.info(f"队列配置创建成功 | id: {new_id}")
                
                return jsonify({
                    "success": True,
                    "message": "队列配置创建成功",
                    "data": {"id": new_id}
                })
        except Exception as e:
            conn.rollback()
            current_app.logger.error(f"创建队列配置失败: {str(e)}")
            return jsonify({
                "success": False,
                "message": f"创建队列配置失败: {str(e)}",
                "data": None
            }), 500
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"创建队列配置失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-queue-monitor/configs/<int:config_id>', methods=['PUT'])
@log_api_call("redis_queue_monitor.update_queue_config")
def update_queue_config(config_id):
    """更新队列配置"""
    try:
        data = request.get_json()
        current_app.logger.info(f"更新队列配置 | id: {config_id}")
        
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                # 检查配置是否存在
                cur.execute("SELECT id, env_id, queue_key FROM redis_queue_config WHERE id = %s", (config_id,))
                current_config = cur.fetchone()
                if not current_config:
                    return jsonify({
                        "success": False,
                        "message": "队列配置不存在",
                        "data": None
                    }), 404
                
                # 如果更新env_id或queue_key，检查(env_id, queue_key)唯一性（排除当前记录）
                if 'env_id' in data or 'queue_key' in data:
                    check_env_id = data.get('env_id', current_config['env_id'])
                    check_queue_key = data.get('queue_key', current_config['queue_key'])
                    
                    cur.execute("""
                        SELECT id FROM redis_queue_config 
                        WHERE env_id = %s AND queue_key = %s AND id != %s
                    """, (check_env_id, check_queue_key, config_id))
                    if cur.fetchone():
                        return jsonify({
                            "success": False,
                            "message": f"环境 {check_env_id} 下的队列key {check_queue_key} 已存在",
                            "data": None
                        }), 400
                    
                    # 如果更新了env_id，检查env_id是否存在
                    if 'env_id' in data:
                        cur.execute("SELECT id FROM redis_env_config WHERE id = %s", (data['env_id'],))
                        if not cur.fetchone():
                            return jsonify({
                                "success": False,
                                "message": f"环境ID {data['env_id']} 不存在",
                                "data": None
                            }), 400
                
                # 构建更新语句
                update_fields = []
                params = []
                
                allowed_fields = {
                    'env_id', 'queue_key', 'queue_type', 'category', 'description',
                    'is_monitored', 'is_alert_enabled',
                    'webhook_url', 'alert_on_length_high', 'alert_on_growth_surge',
                    'alert_on_growth_drop', 'alert_on_empty', 'alert_on_failed_rate_high',
                    'length_threshold', 'growth_rate_threshold', 'drop_rate_threshold',
                    'failed_rate_threshold', 'heartbeat_timeout', 'status'
                }
                
                for field in allowed_fields:
                    if field in data:
                        update_fields.append(f"{field} = %s")
                        params.append(data[field])
                
                if not update_fields:
                    return jsonify({
                        "success": False,
                        "message": "没有需要更新的字段",
                        "data": None
                    }), 400
                
                params.append(config_id)
                
                cur.execute(
                    f"UPDATE redis_queue_config SET {', '.join(update_fields)} WHERE id = %s",
                    params
                )
                conn.commit()
                
                current_app.logger.info(f"队列配置更新成功 | id: {config_id}")
                
                return jsonify({
                    "success": True,
                    "message": "队列配置更新成功",
                    "data": {"id": config_id}
                })
        except Exception as e:
            conn.rollback()
            current_app.logger.error(f"更新队列配置失败: {str(e)}")
            return jsonify({
                "success": False,
                "message": f"更新队列配置失败: {str(e)}",
                "data": None
            }), 500
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"更新队列配置失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-queue-monitor/configs/<int:config_id>', methods=['DELETE'])
@log_api_call("redis_queue_monitor.delete_queue_config")
def delete_queue_config(config_id):
    """删除队列配置"""
    try:
        current_app.logger.info(f"删除队列配置 | id: {config_id}")
        
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                # 检查配置是否存在
                cur.execute("SELECT queue_key FROM redis_queue_config WHERE id = %s", (config_id,))
                row = cur.fetchone()
                if not row:
                    return jsonify({
                        "success": False,
                        "message": "队列配置不存在",
                        "data": None
                    }), 404
                
                queue_key = row['queue_key']
                
                # 删除配置（级联删除会同时删除关联的统计数据）
                cur.execute("DELETE FROM redis_queue_config WHERE id = %s", (config_id,))
                conn.commit()
                
                current_app.logger.info(f"队列配置删除成功 | id: {config_id} | queue_key: {queue_key}")
                
                return jsonify({
                    "success": True,
                    "message": "队列配置删除成功",
                    "data": None
                })
        except Exception as e:
            conn.rollback()
            current_app.logger.error(f"删除队列配置失败: {str(e)}")
            return jsonify({
                "success": False,
                "message": f"删除队列配置失败: {str(e)}",
                "data": None
            }), 500
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"删除队列配置失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


# ============================================================================
# 队列统计数据表 API (redis_queue_stats) - 队列监控系统
# ============================================================================

@bp.route('/api/redis-queue-monitor/stats', methods=['GET'])
@log_api_call("redis_queue_monitor.list_queue_stats")
def list_queue_stats():
    """获取队列统计数据列表（支持分页、排序、筛选、时间范围）"""
    try:
        config_id = request.args.get('config_id', type=int)
        queue_key = request.args.get('queue_key', '').strip()
        page = request.args.get('page', 1, type=int)
        page_size = request.args.get('pageSize', 20, type=int)
        start_time = request.args.get('start_time', '').strip()
        end_time = request.args.get('end_time', '').strip()
        sort = request.args.get('sort', 'collection_time').strip()
        order = request.args.get('order', 'desc').strip()
        
        # 验证排序字段
        allowed_sort_fields = [
            'id', 'config_id', 'queue_key', 'length', 'previous_length',
            'delta', 'growth_rate', 'failed_count', 'total_count',
            'failed_rate', 'collection_time', 'created_at'
        ]
        if sort not in allowed_sort_fields:
            sort = 'collection_time'
        
        # 验证排序方向
        order = order.lower()
        if order not in ['asc', 'desc']:
            order = 'desc'
        
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                # 构建WHERE条件
                where_conditions = []
                params = []
                
                if config_id:
                    where_conditions.append("config_id = %s")
                    params.append(config_id)
                
                if queue_key:
                    where_conditions.append("queue_key = %s")
                    params.append(queue_key)
                
                if start_time:
                    where_conditions.append("collection_time >= %s")
                    params.append(start_time)
                
                if end_time:
                    where_conditions.append("collection_time <= %s")
                    params.append(end_time)
                
                where_clause = " AND ".join(where_conditions) if where_conditions else "1=1"
                
                # 查询总数
                count_sql = f"SELECT COUNT(*) as total FROM redis_queue_stats WHERE {where_clause}"
                cur.execute(count_sql, params)
                total = cur.fetchone()['total']
                
                # 排序和分页
                offset = (page - 1) * page_size
                order_clause = f"{sort} {order.upper()}"
                
                # 查询数据
                sql = f"""
                    SELECT 
                        id, config_id, queue_key,
                        length, previous_length, delta, growth_rate,
                        failed_count, total_count, failed_rate,
                        collection_time, created_at
                    FROM redis_queue_stats
                    WHERE {where_clause}
                    ORDER BY {order_clause}
                    LIMIT %s OFFSET %s
                """
                cur.execute(sql, params + [page_size, offset])
                rows = cur.fetchall()
                
                # 格式化时间字段和decimal字段
                for row in rows:
                    format_row_datetime_fields(row)
                    # 格式化decimal字段
                    for field in ['growth_rate', 'failed_rate']:
                        if field in row:
                            row[field] = format_decimal_field(row[field])
                
                return jsonify({
                    "success": True,
                    "message": "ok",
                    "data": {
                        "items": rows,
                        "total": total,
                        "page": page,
                        "pageSize": page_size
                    }
                })
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"获取队列统计数据列表失败: {e}")
        logger.exception(e)
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-queue-monitor/stats/<int:stats_id>', methods=['GET'])
@log_api_call("redis_queue_monitor.get_queue_stats")
def get_queue_stats(stats_id):
    """获取队列统计数据详情"""
    try:
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                cur.execute("""
                    SELECT 
                        id, config_id, queue_key,
                        length, previous_length, delta, growth_rate,
                        failed_count, total_count, failed_rate,
                        collection_time, created_at
                    FROM redis_queue_stats
                    WHERE id = %s
                """, (stats_id,))
                row = cur.fetchone()
                
                if not row:
                    return jsonify({
                        "success": False,
                        "message": "队列统计数据不存在",
                        "data": None
                    }), 404
                
                format_row_datetime_fields(row)
                # 格式化decimal字段
                for field in ['growth_rate', 'failed_rate']:
                    if field in row:
                        row[field] = format_decimal_field(row[field])
                
                return jsonify({
                    "success": True,
                    "message": "ok",
                    "data": row
                })
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"获取队列统计数据详情失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-queue-monitor/stats/trend', methods=['GET'])
@log_api_call("redis_queue_monitor.get_queue_stats_trend")
def get_queue_stats_trend():
    """获取队列趋势数据（用于图表展示）"""
    try:
        config_id = request.args.get('config_id', type=int)
        queue_key = request.args.get('queue_key', '').strip()
        start_time = request.args.get('start_time', '').strip()
        end_time = request.args.get('end_time', '').strip()
        limit = request.args.get('limit', 100, type=int)  # 默认返回100条
        
        if not config_id and not queue_key:
            return jsonify({
                "success": False,
                "message": "缺少参数: config_id 或 queue_key",
                "data": None
            }), 400
        
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                # 构建WHERE条件
                where_conditions = []
                params = []
                
                if config_id:
                    where_conditions.append("config_id = %s")
                    params.append(config_id)
                
                if queue_key:
                    where_conditions.append("queue_key = %s")
                    params.append(queue_key)
                
                if start_time:
                    where_conditions.append("collection_time >= %s")
                    params.append(start_time)
                
                if end_time:
                    where_conditions.append("collection_time <= %s")
                    params.append(end_time)
                
                where_clause = " AND ".join(where_conditions) if where_conditions else "1=1"
                
                # 查询趋势数据（按时间排序）
                sql = f"""
                    SELECT 
                        collection_time,
                        length,
                        delta,
                        growth_rate,
                        failed_count,
                        failed_rate
                    FROM redis_queue_stats
                    WHERE {where_clause}
                    ORDER BY collection_time ASC
                    LIMIT %s
                """
                cur.execute(sql, params + [limit])
                rows = cur.fetchall()
                
                # 格式化数据
                trend_data = []
                for row in rows:
                    trend_data.append({
                        'time': format_datetime_field(row['collection_time']),
                        'length': row['length'],
                        'delta': row['delta'],
                        'growth_rate': format_decimal_field(row['growth_rate']),
                        'failed_count': row['failed_count'],
                        'failed_rate': format_decimal_field(row['failed_rate'])
                    })
                
                return jsonify({
                    "success": True,
                    "message": "ok",
                    "data": trend_data
                })
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"获取队列趋势数据失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-queue-monitor/stats/summary', methods=['GET'])
@log_api_call("redis_queue_monitor.get_queue_stats_summary")
def get_queue_stats_summary():
    """获取队列统计摘要"""
    try:
        config_id = request.args.get('config_id', type=int)
        queue_key = request.args.get('queue_key', '').strip()
        start_time = request.args.get('start_time', '').strip()
        end_time = request.args.get('end_time', '').strip()
        
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                # 构建WHERE条件
                where_conditions = []
                params = []
                
                if config_id:
                    where_conditions.append("config_id = %s")
                    params.append(config_id)
                
                if queue_key:
                    where_conditions.append("queue_key = %s")
                    params.append(queue_key)
                
                if start_time:
                    where_conditions.append("collection_time >= %s")
                    params.append(start_time)
                
                if end_time:
                    where_conditions.append("collection_time <= %s")
                    params.append(end_time)
                
                where_clause = " AND ".join(where_conditions) if where_conditions else "1=1"
                
                # 查询统计摘要
                summary_sql = f"""
                    SELECT 
                        COUNT(*) as total_count,
                        AVG(length) as avg_length,
                        MAX(length) as max_length,
                        MIN(length) as min_length,
                        AVG(growth_rate) as avg_growth_rate,
                        AVG(failed_rate) as avg_failed_rate,
                        SUM(failed_count) as total_failed_count,
                        SUM(total_count) as total_request_count
                    FROM redis_queue_stats
                    WHERE {where_clause}
                """
                cur.execute(summary_sql, params)
                summary = cur.fetchone()
                
                # 处理NULL值并格式化decimal字段
                for key in summary:
                    if summary[key] is None:
                        summary[key] = 0
                    elif isinstance(summary[key], float):
                        summary[key] = format_decimal_field(summary[key])
                
                return jsonify({
                    "success": True,
                    "message": "ok",
                    "data": summary
                })
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"获取队列统计摘要失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


# ============================================================================
# 爬虫状态表 API (redis_spider_status) - 队列监控系统
# ============================================================================

@bp.route('/api/redis-queue-monitor/spiders', methods=['GET'])
@log_api_call("redis_queue_monitor.list_spider_status")
def list_spider_status():
    """获取爬虫状态列表（支持分页、排序、筛选）"""
    try:
        env_id = request.args.get('env_id', type=int)
        page = request.args.get('page', 1, type=int)
        page_size = request.args.get('pageSize', 20, type=int)
        spider_name = request.args.get('spider_name', '').strip()
        is_alive = request.args.get('is_alive', '').strip()
        search = request.args.get('search', '').strip()
        sort = request.args.get('sort', 'last_check_time').strip()
        order = request.args.get('order', 'desc').strip()
        
        # 验证排序字段
        allowed_sort_fields = [
            'id', 'env_id', 'spider_name', 'heartbeat_time',
            'request_queue_length', 'failed_queue_length', 'failed_rate',
            'is_alive', 'heartbeat_timeout_seconds', 'last_check_time',
            'created_at', 'updated_at'
        ]
        if sort not in allowed_sort_fields:
            sort = 'last_check_time'
        
        # 验证排序方向
        order = order.lower()
        if order not in ['asc', 'desc']:
            order = 'desc'
        
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                # 构建WHERE条件
                where_conditions = []
                params = []
                
                if env_id:
                    where_conditions.append("env_id = %s")
                    params.append(env_id)
                
                if spider_name:
                    where_conditions.append("spider_name = %s")
                    params.append(spider_name)
                
                if is_alive != '' and is_alive is not None:
                    where_conditions.append("is_alive = %s")
                    params.append(int(is_alive))
                
                if search:
                    where_conditions.append("spider_name LIKE %s")
                    params.append(f'%{search}%')
                
                where_clause = " AND ".join(where_conditions) if where_conditions else "1=1"
                
                # 查询总数
                count_sql = f"SELECT COUNT(*) as total FROM redis_spider_status WHERE {where_clause}"
                cur.execute(count_sql, params)
                total = cur.fetchone()['total']
                
                # 排序和分页
                offset = (page - 1) * page_size
                
                # 构建排序子句（对于时间戳字段，将 NULL 值排在最后）
                nullable_timestamp_fields = ['heartbeat_time', 'last_check_time']
                if sort in nullable_timestamp_fields:
                    order_clause = f"({sort} IS NULL), {sort} {order.upper()}"
                else:
                    order_clause = f"{sort} {order.upper()}"
                
                # 查询数据
                sql = f"""
                    SELECT 
                        id, env_id, spider_name,
                        heartbeat_time, request_queue_length, failed_queue_length, failed_rate,
                        is_alive, heartbeat_timeout_seconds,
                        last_check_time, created_at, updated_at
                    FROM redis_spider_status
                    WHERE {where_clause}
                    ORDER BY {order_clause}
                    LIMIT %s OFFSET %s
                """
                cur.execute(sql, params + [page_size, offset])
                rows = cur.fetchall()
                
                # 格式化时间字段和decimal字段
                for row in rows:
                    format_row_datetime_fields(row)
                    # 格式化decimal字段
                    if 'failed_rate' in row:
                        row['failed_rate'] = format_decimal_field(row['failed_rate'])
                
                return jsonify({
                    "success": True,
                    "message": "ok",
                    "data": {
                        "items": rows,
                        "total": total,
                        "page": page,
                        "pageSize": page_size
                    }
                })
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"获取爬虫状态列表失败: {e}")
        logger.exception(e)
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-queue-monitor/spiders/<int:spider_id>', methods=['GET'])
@log_api_call("redis_queue_monitor.get_spider_status")
def get_spider_status(spider_id):
    """获取爬虫状态详情"""
    try:
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                cur.execute("""
                    SELECT 
                        id, env_id, spider_name,
                        heartbeat_time, request_queue_length, failed_queue_length, failed_rate,
                        is_alive, heartbeat_timeout_seconds,
                        last_check_time, created_at, updated_at
                    FROM redis_spider_status
                    WHERE id = %s
                """, (spider_id,))
                row = cur.fetchone()
                
                if not row:
                    return jsonify({
                        "success": False,
                        "message": "爬虫状态不存在",
                        "data": None
                    }), 404
                
                format_row_datetime_fields(row)
                # 格式化decimal字段
                if 'failed_rate' in row:
                    row['failed_rate'] = format_decimal_field(row['failed_rate'])
                
                return jsonify({
                    "success": True,
                    "message": "ok",
                    "data": row
                })
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"获取爬虫状态详情失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-queue-monitor/spiders/stats', methods=['GET'])
@log_api_call("redis_queue_monitor.get_spider_status_stats")
def get_spider_status_stats():
    """获取爬虫状态统计"""
    try:
        env_id = request.args.get('env_id', type=int)
        
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                # 构建WHERE条件
                where_conditions = []
                params = []
                
                if env_id:
                    where_conditions.append("env_id = %s")
                    params.append(env_id)
                
                where_clause = " AND ".join(where_conditions) if where_conditions else "1=1"
                
                # 查询统计信息
                stats_sql = f"""
                    SELECT 
                        COUNT(*) as total_count,
                        SUM(CASE WHEN is_alive = 1 THEN 1 ELSE 0 END) as alive_count,
                        SUM(CASE WHEN is_alive = 0 THEN 1 ELSE 0 END) as dead_count,
                        AVG(request_queue_length) as avg_request_queue_length,
                        AVG(failed_queue_length) as avg_failed_queue_length,
                        AVG(failed_rate) as avg_failed_rate,
                        SUM(request_queue_length) as total_request_queue_length,
                        SUM(failed_queue_length) as total_failed_queue_length
                    FROM redis_spider_status
                    WHERE {where_clause}
                """
                cur.execute(stats_sql, params)
                stats = cur.fetchone()
                
                # 处理NULL值并格式化decimal字段
                for key in stats:
                    if stats[key] is None:
                        stats[key] = 0
                    elif isinstance(stats[key], float):
                        stats[key] = format_decimal_field(stats[key])
                
                return jsonify({
                    "success": True,
                    "message": "ok",
                    "data": stats
                })
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"获取爬虫状态统计失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


@bp.route('/api/redis-queue-monitor/spiders/<int:spider_id>/heartbeat', methods=['PUT'])
@log_api_call("redis_queue_monitor.update_spider_heartbeat")
def update_spider_heartbeat(spider_id):
    """更新爬虫心跳（模拟接口，实际由监控脚本更新）"""
    try:
        data = request.get_json() or {}
        current_app.logger.info(f"更新爬虫心跳 | id: {spider_id}")
        
        conn = get_db_connection()
        try:
            with conn.cursor() as cur:
                # 检查爬虫状态是否存在
                cur.execute("SELECT id FROM redis_spider_status WHERE id = %s", (spider_id,))
                if not cur.fetchone():
                    return jsonify({
                        "success": False,
                        "message": "爬虫状态不存在",
                        "data": None
                    }), 404
                
                # 更新心跳时间和相关字段
                update_fields = []
                params = []
                
                # 允许更新的字段
                allowed_fields = {
                    'heartbeat_time', 'request_queue_length', 'failed_queue_length',
                    'failed_rate', 'is_alive', 'heartbeat_timeout_seconds'
                }
                
                for field in allowed_fields:
                    if field in data:
                        update_fields.append(f"{field} = %s")
                        params.append(data[field])
                
                # 如果没有提供heartbeat_time，使用当前时间
                if 'heartbeat_time' not in data:
                    update_fields.append("heartbeat_time = NOW()")
                
                # 更新last_check_time
                update_fields.append("last_check_time = NOW()")
                
                if not update_fields:
                    return jsonify({
                        "success": False,
                        "message": "没有需要更新的字段",
                        "data": None
                    }), 400
                
                params.append(spider_id)
                
                cur.execute(
                    f"UPDATE redis_spider_status SET {', '.join(update_fields)} WHERE id = %s",
                    params
                )
                conn.commit()
                
                current_app.logger.info(f"爬虫心跳更新成功 | id: {spider_id}")
                
                return jsonify({
                    "success": True,
                    "message": "爬虫心跳更新成功",
                    "data": {"id": spider_id}
                })
        except Exception as e:
            conn.rollback()
            current_app.logger.error(f"更新爬虫心跳失败: {str(e)}")
            return jsonify({
                "success": False,
                "message": f"更新爬虫心跳失败: {str(e)}",
                "data": None
            }), 500
        finally:
            conn.close()
    except Exception as e:
        logger.error(f"更新爬虫心跳失败: {e}")
        return jsonify({"success": False, "error": str(e)}), 500


# ============================================================================
# 批次记录表 API (redis_monitor_batch) - 两个系统共用
# ============================================================================

# 批次记录相关API已按需求移除
