"""
任务调度服务 - 实现基于优先级的任务调度
"""
import asyncio
import threading
import time
from datetime import datetime
from queue import Queue
from typing import Dict, List, Optional, Any
from sqlalchemy.orm import Session
from sqlalchemy import desc, func
from core.database import SessionLocal
from models.schemas import TaskStatus
from core.config import get_settings
from core.logger import get_logger, get_task_logger,api_logger
from services.parse_service import parse_service
from models.database import User, ParseTask, ParseResult

logger = get_logger(__name__)
settings = get_settings()

class TaskSchedulerService:
    """任务调度服务"""
    
    def __init__(self):
        self._running = False
        self._thread = None
        self._max_concurrent_tasks = settings.max_concurrent_tasks or 3
        self._check_interval = settings.check_pending_tasks_interval  # 检查间隔（秒）
        self._task_queue = Queue()
        # 添加队列锁，确保队列操作的线程安全
        self._queue_lock = threading.Lock()
    
    def start(self):
        """启动任务调度器"""
        if self._running:
            return
        # 启动时立即检查一次待处理任务
        threading.Thread(target=self._check_pending_tasks_on_startup, daemon=True).start()
        self._thread = threading.Thread(target=self._scheduler_worker, daemon=True)
        self._thread.start()
        self._running = True
        logger.info("任务调度器已启动")
        
        
    
    def stop(self):
        """停止任务调度器"""
        self._running = False
        if self._thread:
            self._thread.join()
        logger.info("任务调度器已停止")
    

    async def _process_parse_task_async(
        task_id: str,
        user_id: int,
        file_path: str,
        options: dict):
        """异步处理解析任务"""
        from core.database import SessionLocal
        
        db = SessionLocal()
        task_logger = get_task_logger(task_id)
        
        try:
            # 获取任务记录
            task = db.query(ParseTask).filter(ParseTask.task_id == task_id).first()
            if not task:
                task_logger.error("Task not found in database")
                return
            
            # 更新任务状态
            task.status = TaskStatus.PROCESSING
            task.started_at = datetime.now()
            db.commit()
            
            task_logger.info("Async task processing started")
            
            # 执行解析
            result = await parse_service.parse_document(
                file_path=file_path,
                task_id=task_id,
                options=options,
                task_logger=task_logger
            )
            
            # 更新任务状态
            task.status = TaskStatus.COMPLETED
            task.progress = 100
            task.completed_at = datetime.now()
            task.result_path = result.get("result_path")
            db.commit()
            
            # 保存解析结果
            for content_type, content_info in result.get("results", {}).items():
                parse_result = ParseResult(
                    task_id=task.id,
                    content_type=content_type,
                    content_path=content_info.get("path"),
                    file_size=content_info.get("size"),
                    metadata=content_info.get("metadata")
                )
                db.add(parse_result)
            
            db.commit()
            
            task_logger.info("Async task completed successfully")
            api_logger.log_task_event("parse", "async_task_completed", {"task_id": task_id})
            
        except Exception as e:
            # 更新任务状态
            task.status = TaskStatus.FAILED
            task.error_message = str(e)
            task.completed_at = datetime.now()
            db.commit()
            
            task_logger.error("Async task failed", error=e)
            api_logger.log_error(e, {"task_id": task_id, "endpoint": "async_task"})
            
        finally:
            db.close()

    def _scheduler_worker(self):
        """调度器工作线程"""
        logger.info("任务调度器工作线程已启动")
        while self._running:
            try:
                # 定期从数据库刷新任务队列
                self._refresh_task_queue()
                task = self._get_task_from_queue()
                if task:
                    logger.info(f"从队列获取任务: {task.task_id} (优先级: {task.priority}),开始解析...")
                    asyncio.run(self._start_task(task))
                else:
                    logger.info(f"任务队列已空，等待{self._check_interval}秒后做下一次检查")
                    time.sleep(self._check_interval)
            except Exception as e:
                logger.error(f"任务调度器工作线程异常: {e}")
                time.sleep(5)  # 出错后休眠5秒
    
    def _check_pending_tasks_on_startup(self):
        """启动时检查待处理任务"""
        try:
            logger.info("检查启动时的待处理任务...")
            db = SessionLocal()
            
            # 查询所有处于pending状态的异步任务
            pending_tasks = db.query(ParseTask).filter(
                ParseTask.status == TaskStatus.PENDING
            ).all()
            
            if pending_tasks:
                logger.info(f"发现 {len(pending_tasks)} 个待处理任务，将重新加入调度队列")
                for task in pending_tasks:
                    logger.info(f"待处理任务: {task.task_id} (优先级: {task.priority})")
            else:
                logger.info("没有发现待处理任务")
            
            # 查询所有处于pending状态的同步任务
            processing_asks = db.query(ParseTask).filter(
                ParseTask.status == TaskStatus.PROCESSING
            ).all()
            
            if processing_asks:
                logger.info(f"发现 {len(processing_asks)} 个处理中的任务，将标记为失败状态")
                for task in processing_asks:
                    logger.info(f"识别到处理中的任务: {task.task_id} (优先级: {task.priority})")
                    # 将同步任务标记为失败，因为同步任务应该立即处理
                    task.status = TaskStatus.FAILED
                    task.error_message = "系统重启时发现处理中的任务，已标记为失败"
                    task.completed_at = datetime.now()
                
                db.commit()
                logger.info(f"已将 {len(processing_asks)} 个处理中的任务标记为失败")
            else:
                logger.info("系统启动时，没有发现处理中的任务")
                
        except Exception as e:
            logger.error(f"检查启动时待处理任务失败: {e}")
        finally:
            db.close()

      
    def _refresh_task_queue(self):
        """从数据库刷新任务队列"""
        try:
            # 从数据库获取待处理任务
            pending_tasks = self._get_pending_tasks_from_db()
            logger.info(f"定时检查任务，从数据库获取到 {len(pending_tasks)} 个待处理任务")
            if not pending_tasks:
                return []

            with self._queue_lock:
                # 清空现有队列
                while not self._task_queue.empty():
                    self._task_queue.get()

                # 将任务放入队列
                for task in pending_tasks:
                    self._task_queue.put(task)
                
                if pending_tasks:
                    logger.info(f"已刷新任务队列，新增 {len(pending_tasks)} 个任务")
                
                return pending_tasks

        except Exception as e:
            logger.error(f"刷新任务队列失败: {e}")
    
    def _get_pending_tasks_from_db(self) -> List[ParseTask]:
        """从数据库获取待处理的任务（按优先级排序）"""
        try:
            db = SessionLocal()
            
            # 查询待处理的任务，按优先级升序排列（1为最高优先级）
            tasks = db.query(ParseTask).filter(
                ParseTask.status == TaskStatus.PENDING
            ).order_by(
                ParseTask.priority.asc(),  # 优先级升序
                ParseTask.created_at.asc()  # 创建时间升序
            ).all()
            
            logger.debug(f"从数据库获取到 {len(tasks)} 个待处理任务")
            for task in tasks:
                logger.debug(f"待处理任务: {task.task_id} (优先级: {task.priority})")
            
            return tasks
            
        except Exception as e:
            logger.error(f"从数据库获取待处理任务失败: {e}")
            return []
        finally:
            db.close()
    
    def _get_task_from_queue(self) -> Optional[ParseTask]:
        """从队列中获取任务"""
        try:
            with self._queue_lock:
                if not self._task_queue.empty():
                    task = self._task_queue.get()
                    logger.info(f"从队列获取任务: {task.task_id} (优先级: {task.priority})")
                    return task
                return None
        except Exception as e:
            logger.error(f"从队列获取任务失败: {e}")
            return None
    
    
    
    async def _start_task(self, task: ParseTask):
        """启动任务"""
        try:
            logger.info(f"开始启动任务 {task.task_id} (优先级: {task.priority})")
            await self._process_parse_task_async(task.task_id,task.user_id,task.file_path,task.parse_options)
            logger.info(f"文档解析成功： {task.task_id} (优先级: {task.priority})")
            
        except Exception as e:
            logger.error(f"文档解析失败： {task.task_id}: {e}")
    

    def get_scheduler_status(self) -> Dict[str, Any]:
        """获取调度器状态"""
        return {
            "running": self._running,
            "max_concurrent_tasks": self._max_concurrent_tasks,
            "current_tasks": len(self._current_tasks),
            "current_task_ids": list(self._current_tasks.keys())
        }

# 全局任务调度器实例
task_scheduler = TaskSchedulerService()

def start_task_scheduler():
    """启动任务调度器"""
    task_scheduler.start()

def stop_task_scheduler():
    """停止任务调度器"""
    task_scheduler.stop()

def get_task_scheduler_status() -> Dict[str, Any]:
    """获取任务调度器状态"""
    return task_scheduler.get_scheduler_status()