"""
定时任务调度服务
基于 APScheduler 实现定时任务管理
"""
from apscheduler.schedulers.background import BackgroundScheduler
from datetime import datetime
import logging
from typing import Optional

logger = logging.getLogger(__name__)

# 全局调度器实例
scheduler: Optional[BackgroundScheduler] = None


def get_scheduler() -> BackgroundScheduler:
    """获取调度器实例（单例模式）"""
    global scheduler
    if scheduler is None:
        scheduler = BackgroundScheduler()
    return scheduler


def init_scheduler():
    """初始化调度器并添加默认任务"""
    sched = get_scheduler()
    
    # 添加定时任务
    add_default_jobs(sched)
    
    # 启动调度器
    sched.start()
    logger.info("定时任务调度器已启动")
    
    return sched


def add_default_jobs(sched: BackgroundScheduler):
    """添加默认的定时任务"""
    
    # 任务1：清理过期缓存（每30分钟执行一次）
    sched.add_job(
        func=cleanup_expired_cache,
        trigger="interval",
        minutes=30,
        id="cleanup_cache",
        replace_existing=True
    )
    
    # 任务2：清理过期验证码（每5分钟执行一次）
    sched.add_job(
        func=cleanup_expired_sms_codes,
        trigger="interval",
        minutes=5,
        id="cleanup_sms_codes",
        replace_existing=True
    )
    
    # 任务3：统计数据（每天凌晨2点执行）
    sched.add_job(
        func=update_statistics,
        trigger="cron",
        hour=2,
        minute=0,
        id="update_statistics",
        replace_existing=True
    )
    
    # 任务4：同步ES索引（每小时执行一次）
    sched.add_job(
        func=sync_es_index,
        trigger="interval",
        hours=1,
        id="sync_es_index",
        replace_existing=True
    )
    
    # 任务5：清理超时未支付订单（每1分钟执行一次）
    sched.add_job(
        func=cleanup_unpaid_orders,
        trigger="interval",
        minutes=1,
        id="cleanup_unpaid_orders",
        replace_existing=True
    )
    
    logger.info("已添加所有默认定时任务")


def shutdown_scheduler():
    """关闭调度器"""
    global scheduler
    if scheduler and scheduler.running:
        scheduler.shutdown()
        logger.info("定时任务调度器已关闭")


# ==================== 定时任务函数 ====================

def cleanup_expired_cache():
    """清理过期的缓存数据"""
    try:
        from utils.myredis import r
        import time
        
        # 清理过期的缓存key（以 cache: 开头的key，如果TTL为-2表示已过期）
        # 注意：Redis会自动清理过期的key，这里主要用于日志记录
        cache_pattern = "cache:*"
        keys = r.get_keys(cache_pattern)
        
        expired_count = 0
        for key in keys:
            ttl = r.ttl(key)
            if ttl == -2:  # key不存在
                expired_count += 1
        
        if expired_count > 0:
            logger.info(f"清理过期缓存: 发现 {expired_count} 个已过期的缓存key")
        else:
            logger.debug("缓存清理: 无过期缓存需要清理")
            
    except Exception as e:
        logger.error(f"清理过期缓存失败: {e}")


def cleanup_expired_sms_codes():
    """清理过期的短信验证码"""
    try:
        from utils.myredis import r
        
        # 清理过期的验证码（Redis会自动清理，这里主要用于统计）
        sms_pattern = "sms:*"
        keys = r.get_keys(sms_pattern)
        
        valid_count = 0
        expired_count = 0
        
        for key in keys:
            ttl = r.ttl(key)
            if ttl == -2:
                expired_count += 1
            elif ttl > 0:
                valid_count += 1
        
        logger.info(f"验证码统计: 有效 {valid_count} 个，已过期 {expired_count} 个")
        
    except Exception as e:
        logger.error(f"清理过期验证码失败: {e}")


def update_statistics():
    """更新统计数据（用户数、内容数等）"""
    try:
        from database import SessionLocal
        from models.models import User, Content
        from utils.myredis import r
        import json
        
        db = SessionLocal()
        try:
            # 统计数据
            user_count = db.query(User).count()
            content_count = db.query(Content).filter(Content.status == 1).count()
            
            stats = {
                "user_count": user_count,
                "content_count": content_count,
                "updated_at": datetime.now().isoformat()
            }
            
            # 缓存统计数据（缓存24小时）
            cache_key = "cache:statistics"
            r.setex_str(cache_key, 86400, json.dumps(stats))
            
            logger.info(f"统计数据更新: 用户数={user_count}, 内容数={content_count}")
            
        finally:
            db.close()
            
    except Exception as e:
        logger.error(f"更新统计数据失败: {e}")


def sync_es_index():
    """同步Elasticsearch索引（如果ES可用）"""
    try:
        from config.es_client import sync_all_contents_to_es, check_es_connection
        from database import SessionLocal
        
        # 检查ES连接
        if not check_es_connection():
            logger.warning("Elasticsearch未连接，跳过索引同步")
            return
        
        db = SessionLocal()
        try:
            # 同步所有内容到ES
            success = sync_all_contents_to_es(db)
            if success:
                logger.info("ES索引同步完成")
            else:
                logger.warning("ES索引同步失败")
        finally:
            db.close()
            
    except ImportError:
        logger.debug("Elasticsearch模块未安装，跳过索引同步")
    except Exception as e:
        logger.error(f"同步ES索引失败: {e}")


def cleanup_unpaid_orders():
    """
    清理超时未支付订单：状态=0 且创建时间超过10分钟的订单将被删除
    """
    try:
        from datetime import timedelta
        from database import SessionLocal
        from models.order_models import Order
        
        db = SessionLocal()
        try:
            deadline = datetime.now() - timedelta(minutes=10)
            # 查询超时未支付订单
            overdue_orders = db.query(Order).filter(
                Order.status == 0,
                Order.created_at < deadline
            ).all()
            
            if not overdue_orders:
                return
            
            count = len(overdue_orders)
            for od in overdue_orders:
                db.delete(od)  # 级联删除订单项（已在模型中设置）
            db.commit()
            logger.info(f"清理未支付订单: 已删除 {count} 笔超过10分钟未支付的订单")
        except Exception as e:
            db.rollback()
            logger.error(f"清理未支付订单失败: {e}")
        finally:
            db.close()
    except Exception as e:
        logger.error(f"清理未支付订单任务执行异常: {e}")

