"""爬虫监控和统计API"""
import logging
from typing import Dict, Any, Optional
from datetime import datetime, timedelta
from fastapi import FastAPI, HTTPException, BackgroundTasks
from fastapi.responses import JSONResponse
from pydantic import BaseModel
import asyncio
import aioredis

logger = logging.getLogger(__name__)

app = FastAPI(title="Weibo Crawler Monitor", version="1.0.0")


class CrawlerStats:
    """爬虫统计管理器"""
    
    def __init__(self, redis_url: str = "redis://localhost:6379"):
        """初始化统计管理器
        
        Args:
            redis_url: Redis连接URL
        """
        self.redis_url = redis_url
        self.redis_client = None
        self.stats_key = "crawler:stats"
        self.metrics_key = "crawler:metrics"
        self.health_key = "crawler:health"
        
    async def connect(self):
        """连接Redis"""
        if not self.redis_client:
            self.redis_client = await aioredis.from_url(
                self.redis_url,
                encoding="utf-8",
                decode_responses=True
            )
            
    async def close(self):
        """关闭连接"""
        if self.redis_client:
            await self.redis_client.close()
            self.redis_client = None
            
    async def record_success(self, spider_name: str = "weibo"):
        """记录成功爬取
        
        Args:
            spider_name: 爬虫名称
        """
        await self.connect()
        
        # 增加成功计数
        await self.redis_client.hincrby(f"{self.stats_key}:{spider_name}", "success_count", 1)
        await self.redis_client.hincrby(f"{self.stats_key}:total", "success_count", 1)
        
        # 更新成功率
        await self._update_success_rate(spider_name)
        
        # 记录时间
        await self.redis_client.hset(
            f"{self.stats_key}:{spider_name}",
            "last_success",
            datetime.utcnow().isoformat()
        )
        
    async def record_failure(self, spider_name: str = "weibo", reason: str = None):
        """记录失败爬取
        
        Args:
            spider_name: 爬虫名称
            reason: 失败原因
        """
        await self.connect()
        
        # 增加失败计数
        await self.redis_client.hincrby(f"{self.stats_key}:{spider_name}", "failure_count", 1)
        await self.redis_client.hincrby(f"{self.stats_key}:total", "failure_count", 1)
        
        # 更新成功率
        await self._update_success_rate(spider_name)
        
        # 记录失败原因
        if reason:
            await self.redis_client.lpush(
                f"{self.stats_key}:{spider_name}:failures",
                f"{datetime.utcnow().isoformat()}:{reason}"
            )
            # 只保留最近100条失败记录
            await self.redis_client.ltrim(f"{self.stats_key}:{spider_name}:failures", 0, 99)
            
    async def _update_success_rate(self, spider_name: str):
        """更新成功率
        
        Args:
            spider_name: 爬虫名称
        """
        stats = await self.redis_client.hgetall(f"{self.stats_key}:{spider_name}")
        
        success = int(stats.get("success_count", 0))
        failure = int(stats.get("failure_count", 0))
        total = success + failure
        
        if total > 0:
            rate = (success / total) * 100
            await self.redis_client.hset(
                f"{self.stats_key}:{spider_name}",
                "success_rate",
                f"{rate:.2f}"
            )
            
    async def get_stats(self, spider_name: str = None) -> Dict[str, Any]:
        """获取统计信息
        
        Args:
            spider_name: 爬虫名称，为空则返回所有
            
        Returns:
            统计信息
        """
        await self.connect()
        
        if spider_name:
            stats = await self.redis_client.hgetall(f"{self.stats_key}:{spider_name}")
            return {
                "spider": spider_name,
                "success_count": int(stats.get("success_count", 0)),
                "failure_count": int(stats.get("failure_count", 0)),
                "success_rate": float(stats.get("success_rate", 0)),
                "last_success": stats.get("last_success"),
                "status": await self.get_health_status(spider_name)
            }
        else:
            # 返回所有爬虫统计
            total_stats = await self.redis_client.hgetall(f"{self.stats_key}:total")
            return {
                "total_success": int(total_stats.get("success_count", 0)),
                "total_failure": int(total_stats.get("failure_count", 0)),
                "overall_success_rate": float(total_stats.get("success_rate", 0)),
                "spiders": {
                    "weibo": await self.get_stats("weibo")
                }
            }
            
    async def record_metric(self, metric_name: str, value: float):
        """记录指标
        
        Args:
            metric_name: 指标名称
            value: 指标值
        """
        await self.connect()
        
        # 记录指标值
        timestamp = datetime.utcnow().isoformat()
        await self.redis_client.zadd(
            f"{self.metrics_key}:{metric_name}",
            {f"{timestamp}:{value}": datetime.utcnow().timestamp()}
        )
        
        # 只保留最近24小时的数据
        cutoff = datetime.utcnow().timestamp() - 86400
        await self.redis_client.zremrangebyscore(
            f"{self.metrics_key}:{metric_name}",
            0,
            cutoff
        )
        
    async def get_metrics(self, metric_name: str, hours: int = 1) -> List[Dict]:
        """获取指标数据
        
        Args:
            metric_name: 指标名称
            hours: 获取最近N小时的数据
            
        Returns:
            指标数据列表
        """
        await self.connect()
        
        cutoff = datetime.utcnow().timestamp() - (hours * 3600)
        data = await self.redis_client.zrangebyscore(
            f"{self.metrics_key}:{metric_name}",
            cutoff,
            '+inf'
        )
        
        metrics = []
        for item in data:
            timestamp, value = item.rsplit(':', 1)
            metrics.append({
                "timestamp": timestamp,
                "value": float(value)
            })
            
        return metrics
        
    async def update_health(self, spider_name: str, status: str, message: str = None):
        """更新健康状态
        
        Args:
            spider_name: 爬虫名称
            status: 状态（healthy, warning, error）
            message: 状态消息
        """
        await self.connect()
        
        health_data = {
            "status": status,
            "timestamp": datetime.utcnow().isoformat(),
            "message": message or ""
        }
        
        await self.redis_client.hset(
            f"{self.health_key}:{spider_name}",
            "current",
            json.dumps(health_data)
        )
        
        # 设置过期时间（5分钟）
        await self.redis_client.expire(f"{self.health_key}:{spider_name}", 300)
        
    async def get_health_status(self, spider_name: str) -> Dict[str, Any]:
        """获取健康状态
        
        Args:
            spider_name: 爬虫名称
            
        Returns:
            健康状态信息
        """
        await self.connect()
        
        health_data = await self.redis_client.hget(
            f"{self.health_key}:{spider_name}",
            "current"
        )
        
        if health_data:
            return json.loads(health_data)
        else:
            return {
                "status": "unknown",
                "timestamp": None,
                "message": "No health check data available"
            }


# 创建全局统计实例
stats_manager = CrawlerStats()


# API路由
@app.on_event("startup")
async def startup_event():
    """应用启动事件"""
    await stats_manager.connect()
    logger.info("监控API已启动")
    

@app.on_event("shutdown")
async def shutdown_event():
    """应用关闭事件"""
    await stats_manager.close()
    logger.info("监控API已关闭")
    

@app.get("/api/v1/crawler/status")
async def get_crawler_status():
    """获取爬虫状态
    
    Returns:
        爬虫状态信息
    """
    try:
        stats = await stats_manager.get_stats()
        return JSONResponse(
            status_code=200,
            content={
                "status": "success",
                "data": stats,
                "timestamp": datetime.utcnow().isoformat()
            }
        )
    except Exception as e:
        logger.error(f"获取状态失败: {e}")
        raise HTTPException(status_code=500, detail=str(e))
        

@app.get("/api/v1/crawler/stats/{spider_name}")
async def get_spider_stats(spider_name: str):
    """获取特定爬虫统计
    
    Args:
        spider_name: 爬虫名称
        
    Returns:
        爬虫统计信息
    """
    try:
        stats = await stats_manager.get_stats(spider_name)
        return JSONResponse(
            status_code=200,
            content={
                "status": "success",
                "data": stats,
                "timestamp": datetime.utcnow().isoformat()
            }
        )
    except Exception as e:
        logger.error(f"获取统计失败: {e}")
        raise HTTPException(status_code=500, detail=str(e))
        

@app.get("/api/v1/crawler/health")
async def health_check():
    """健康检查端点
    
    Returns:
        健康状态
    """
    try:
        # 检查微博爬虫健康状态
        weibo_health = await stats_manager.get_health_status("weibo")
        
        # 获取成功率
        stats = await stats_manager.get_stats("weibo")
        success_rate = stats.get("success_rate", 0)
        
        # 判断整体健康状态
        if success_rate >= 90:
            overall_status = "healthy"
        elif success_rate >= 70:
            overall_status = "warning"
        else:
            overall_status = "error"
            
        return JSONResponse(
            status_code=200 if overall_status != "error" else 503,
            content={
                "status": overall_status,
                "success_rate": success_rate,
                "spiders": {
                    "weibo": weibo_health
                },
                "timestamp": datetime.utcnow().isoformat()
            }
        )
    except Exception as e:
        logger.error(f"健康检查失败: {e}")
        return JSONResponse(
            status_code=503,
            content={
                "status": "error",
                "error": str(e),
                "timestamp": datetime.utcnow().isoformat()
            }
        )
        

@app.post("/api/v1/crawler/record/success")
async def record_success(spider_name: str = "weibo"):
    """记录成功爬取
    
    Args:
        spider_name: 爬虫名称
        
    Returns:
        操作结果
    """
    try:
        await stats_manager.record_success(spider_name)
        return JSONResponse(
            status_code=200,
            content={
                "status": "success",
                "message": "Success recorded"
            }
        )
    except Exception as e:
        logger.error(f"记录失败: {e}")
        raise HTTPException(status_code=500, detail=str(e))
        

@app.post("/api/v1/crawler/record/failure")
async def record_failure(spider_name: str = "weibo", reason: str = None):
    """记录失败爬取
    
    Args:
        spider_name: 爬虫名称
        reason: 失败原因
        
    Returns:
        操作结果
    """
    try:
        await stats_manager.record_failure(spider_name, reason)
        return JSONResponse(
            status_code=200,
            content={
                "status": "success",
                "message": "Failure recorded"
            }
        )
    except Exception as e:
        logger.error(f"记录失败: {e}")
        raise HTTPException(status_code=500, detail=str(e))


from typing import List
import json  # 添加缺失的导入