from fastapi import APIRouter, Depends, HTTPException, status, Query
from sqlalchemy.ext.asyncio import AsyncSession
from typing import List, Optional, Dict, Any
import psutil
import logging

from app.dependencies import get_current_user, get_db_session, get_task_service, get_crawl_service
from app.models.schemas import APIResponse, LogLevel
from app.services.task_service import TaskService
from app.services.crawl_service import CrawlService
from app.models.database import User
from app.config import settings

router = APIRouter()
logger = logging.getLogger(__name__)

@router.get("/tasks/{task_id}/logs", response_model=APIResponse[List[Dict[str, Any]]])
async def get_task_logs(
    task_id: str,
    level: Optional[LogLevel] = Query(None, description="日志级别过滤"),
    page: int = Query(1, ge=1, description="页码"),
    size: int = Query(50, ge=1, le=100, description="每页数量"),
    search: Optional[str] = Query(None, description="关键词搜索"),
    current_user: User = Depends(get_current_user),
    db: AsyncSession = Depends(get_db_session),
    task_service: TaskService = Depends(get_task_service),
    crawl_service: CrawlService = Depends(get_crawl_service)
):
    """获取任务日志"""
    try:
        # 检查任务权限
        task_info = await task_service.get_task(task_id, db)
        if not task_info or task_info.user_id != current_user.id:
            raise HTTPException(
                status_code=status.HTTP_404_NOT_FOUND,
                detail="任务不存在"
            )
        
        # 获取日志
        logs = await crawl_service.get_task_logs(
            task_id=task_id,
            level=level.value if level else "all",
            limit=size
        )
        
        if logs is None:
            logs = []
        
        # 如果有搜索关键词，进行过滤
        if search:
            logs = [log for log in logs if search.lower() in log.get("message", "").lower()]
        
        return APIResponse(
            message="获取任务日志成功",
            data=logs
        )
        
    except HTTPException:
        raise
    except Exception as e:
        logger.error(f"Failed to get task logs {task_id}: {e}")
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail=f"获取任务日志失败: {str(e)}"
        )

@router.get("/tasks/{task_id}/results", response_model=APIResponse[Dict[str, Any]])
async def get_task_results(
    task_id: str,
    page: int = Query(1, ge=1, description="页码"),
    size: int = Query(100, ge=1, le=1000, description="每页数量"),
    format: str = Query("json", description="返回格式"),
    current_user: User = Depends(get_current_user),
    db: AsyncSession = Depends(get_db_session),
    task_service: TaskService = Depends(get_task_service),
    crawl_service: CrawlService = Depends(get_crawl_service)
):
    """获取任务结果"""
    try:
        # 检查任务权限
        task_info = await task_service.get_task(task_id, db)
        if not task_info or task_info.user_id != current_user.id:
            raise HTTPException(
                status_code=status.HTTP_404_NOT_FOUND,
                detail="任务不存在"
            )
        
        # 获取结果
        results = await crawl_service.get_task_results(
            task_id=task_id,
            page=page,
            limit=size
        )
        
        if results is None:
            results = {"results": [], "total": 0, "page": page, "limit": size}
        
        return APIResponse(
            message="获取任务结果成功",
            data=results
        )
        
    except HTTPException:
        raise
    except Exception as e:
        logger.error(f"Failed to get task results {task_id}: {e}")
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail=f"获取任务结果失败: {str(e)}"
        )

@router.get("/tasks/{task_id}/metrics", response_model=APIResponse[Dict[str, Any]])
async def get_task_metrics(
    task_id: str,
    current_user: User = Depends(get_current_user),
    db: AsyncSession = Depends(get_db_session),
    task_service: TaskService = Depends(get_task_service)
):
    """获取任务指标"""
    try:
        # 检查任务权限
        task_info = await task_service.get_task(task_id, db)
        if not task_info or task_info.user_id != current_user.id:
            raise HTTPException(
                status_code=status.HTTP_404_NOT_FOUND,
                detail="任务不存在"
            )
        
        # 计算指标
        metrics = {
            "crawl_rate": task_info.pages_crawled / max(1, (task_info.updated_at - task_info.created_at).total_seconds() / 60),  # 页面/分钟
            "success_rate": task_info.success_rate,
            "error_rate": (task_info.errors_count / max(1, task_info.pages_crawled)) * 100,
            "avg_response_time": 1.5,  # 模拟数据
            "memory_usage": 256.7,  # 模拟数据
            "cpu_usage": 15.2,  # 模拟数据
            "network_io": {
                "bytes_sent": 1024000,
                "bytes_received": 5120000
            }
        }
        
        return APIResponse(
            message="获取任务指标成功",
            data=metrics
        )
        
    except HTTPException:
        raise
    except Exception as e:
        logger.error(f"Failed to get task metrics {task_id}: {e}")
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail=f"获取任务指标失败: {str(e)}"
        )

@router.get("/system/status", response_model=APIResponse[Dict[str, Any]])
async def get_system_status(
    current_user: User = Depends(get_current_user)
):
    """获取系统状态"""
    try:
        # 获取系统资源使用情况
        cpu_percent = psutil.cpu_percent(interval=1)
        memory = psutil.virtual_memory()
        disk = psutil.disk_usage('/')
        
        # 模拟任务统计
        active_tasks = 5  # 这里应该从数据库查询
        total_tasks = 150  # 这里应该从数据库查询
        
        status_info = {
            "version": settings.VERSION,
            "uptime": 86400,  # 模拟运行时间
            "active_tasks": active_tasks,
            "total_tasks": total_tasks,
            "system_load": {
                "cpu_percent": cpu_percent,
                "memory_percent": memory.percent,
                "disk_percent": disk.percent
            },
            "database_status": "healthy",
            "redis_status": "healthy",
            "crawl4ai_status": "healthy"
        }
        
        return APIResponse(
            message="获取系统状态成功",
            data=status_info
        )
        
    except Exception as e:
        logger.error(f"Failed to get system status: {e}")
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail=f"获取系统状态失败: {str(e)}"
        )

@router.get("/statistics/system", response_model=APIResponse[Dict[str, Any]])
async def get_system_statistics(
    current_user: User = Depends(get_current_user),
    db: AsyncSession = Depends(get_db_session)
):
    """获取系统统计信息"""
    try:
        # 这里应该从数据库查询真实的统计数据
        # 目前返回模拟数据
        statistics = {
            "total_tasks": 150,
            "completed_tasks": 120,
            "failed_tasks": 15,
            "running_tasks": 5,
            "total_pages_crawled": 50000,
            "total_data_extracted": 125000,
            "avg_success_rate": 92.5,
            "total_users": 25,
            "active_users": 12,
            "storage_used_gb": 15.6,
            "bandwidth_used_gb": 125.8
        }
        
        return APIResponse(
            message="获取系统统计成功",
            data=statistics
        )
        
    except Exception as e:
        logger.error(f"Failed to get system statistics: {e}")
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail=f"获取系统统计失败: {str(e)}"
        )

@router.get("/health", response_model=APIResponse[Dict[str, str]])
async def health_check():
    """基础健康检查"""
    return APIResponse(
        message="服务运行正常",
        data={"status": "healthy", "version": settings.VERSION}
    )

@router.get("/health/detailed", response_model=APIResponse[Dict[str, Any]])
async def detailed_health_check():
    """详细健康检查"""
    try:
        # 检查各个组件的健康状态
        health_status = {
            "api": "healthy",
            "database": "healthy",  # 这里应该实际检查数据库连接
            "redis": "healthy",     # 这里应该实际检查Redis连接
            "crawl4ai": "healthy",  # 这里应该实际检查Crawl4AI服务
            "disk_space": "healthy",
            "memory": "healthy",
            "cpu": "healthy"
        }
        
        # 检查磁盘空间
        disk = psutil.disk_usage('/')
        if disk.percent > 90:
            health_status["disk_space"] = "warning"
        
        # 检查内存使用
        memory = psutil.virtual_memory()
        if memory.percent > 90:
            health_status["memory"] = "warning"
        
        # 检查CPU使用
        cpu_percent = psutil.cpu_percent(interval=1)
        if cpu_percent > 90:
            health_status["cpu"] = "warning"
        
        overall_status = "healthy"
        if "warning" in health_status.values():
            overall_status = "warning"
        if "unhealthy" in health_status.values():
            overall_status = "unhealthy"
        
        return APIResponse(
            message=f"系统状态: {overall_status}",
            data={
                "overall_status": overall_status,
                "components": health_status,
                "timestamp": "2024-01-01T10:00:00Z"
            }
        )
        
    except Exception as e:
        logger.error(f"Health check failed: {e}")
        return APIResponse(
            success=False,
            message="健康检查失败",
            data={
                "overall_status": "unhealthy",
                "error": str(e)
            }
        )
