"""
流量监控模块 - 统计HTTP请求流量使用情况
"""

import time
import asyncio
from typing import Optional, Dict, Any
import aiohttp
import redis.asyncio as redis
import redis as sync_redis  # 添加同步Redis客户端
from app.config import settings
from app.utils.logger import get_monitor_logger


class TrafficMonitor:
    """代理流量监控器"""
    
    def __init__(self):
        self.redis_client = None
        self.sync_redis_client = None  # 添加同步Redis客户端
        self.logger = get_monitor_logger()
        self.enabled = getattr(settings, 'ENABLE_TRAFFIC_MONITOR', True)  # 默认启用
        
    async def get_redis(self):
        """获取Redis连接"""
        if not self.redis_client:
            self.redis_client = redis.Redis.from_url(settings.REDIS_URL)
        return self.redis_client

    def get_sync_redis(self):
        """获取同步Redis连接"""
        if not self.sync_redis_client:
            self.sync_redis_client = sync_redis.Redis.from_url(settings.REDIS_URL, decode_responses=False)
        return self.sync_redis_client
    
    async def record_traffic(self,
                           proxy_url: Optional[str],
                           request_size: int,
                           response_size: int,
                           task_type: str = "unknown",
                           success: bool = True,
                           url: str = ""):
        """记录流量数据"""
        # 添加调试日志
        self.logger.debug(f"🔍 流量统计调用: enabled={self.enabled}, task_type={task_type}, request_size={request_size}, response_size={response_size}")

        if not self.enabled:
            self.logger.debug("⚠️ 流量统计已禁用，跳过记录")
            return

        try:
            redis_client = await self.get_redis()
            timestamp = int(time.time())
            date_key = time.strftime("%Y-%m-%d", time.localtime(timestamp))
            hour_key = time.strftime("%Y-%m-%d-%H", time.localtime(timestamp))

            # 流量数据
            total_size = request_size + response_size

            # 添加调试日志
            self.logger.debug(f"📊 准备记录流量: date_key={date_key}, total_size={total_size}")

            # 多维度统计
            pipe = redis_client.pipeline()

            # 1. 总流量统计
            pipe.hincrby(f"traffic:total:{date_key}", "request_bytes", request_size)
            pipe.hincrby(f"traffic:total:{date_key}", "response_bytes", response_size)
            pipe.hincrby(f"traffic:total:{date_key}", "total_bytes", total_size)
            pipe.hincrby(f"traffic:total:{date_key}", "request_count", 1)
            if success:
                pipe.hincrby(f"traffic:total:{date_key}", "success_count", 1)
            else:
                pipe.hincrby(f"traffic:total:{date_key}", "error_count", 1)

            # 2. 按代理统计
            proxy_key = self._get_proxy_key(proxy_url)
            pipe.hincrby(f"traffic:proxy:{proxy_key}:{date_key}", "total_bytes", total_size)
            pipe.hincrby(f"traffic:proxy:{proxy_key}:{date_key}", "request_count", 1)

            # 3. 按任务类型统计
            pipe.hincrby(f"traffic:task:{task_type}:{date_key}", "total_bytes", total_size)
            pipe.hincrby(f"traffic:task:{task_type}:{date_key}", "request_count", 1)

            # 4. 按小时统计
            pipe.hincrby(f"traffic:hourly:{hour_key}", "total_bytes", total_size)
            pipe.hincrby(f"traffic:hourly:{hour_key}", "request_count", 1)

            # 5. 设置过期时间（保留30天）
            expire_time = 30 * 24 * 3600
            pipe.expire(f"traffic:total:{date_key}", expire_time)
            pipe.expire(f"traffic:proxy:{proxy_key}:{date_key}", expire_time)
            pipe.expire(f"traffic:task:{task_type}:{date_key}", expire_time)
            pipe.expire(f"traffic:hourly:{hour_key}", expire_time)

            # 执行Redis管道
            result = await pipe.execute()
            self.logger.debug(f"📊 Redis管道执行结果: {len(result)} 个操作完成")

            # 记录详细日志
            self.logger.info(f"📊 流量统计: {task_type} | {proxy_key} | "
                           f"请求:{request_size}B 响应:{response_size}B 总计:{total_size}B | "
                           f"成功:{success} | URL:{url[:50]}...")

        except Exception as e:
            self.logger.error(f"❌ 流量统计记录失败: {e}")
            import traceback
            self.logger.error(f"❌ 流量统计错误堆栈: {traceback.format_exc()}")

    def record_traffic_sync(self,
                           proxy_url: Optional[str],
                           request_size: int,
                           response_size: int,
                           task_type: str = "unknown",
                           success: bool = True,
                           url: str = ""):
        """同步记录流量数据 - 用于Celery任务"""
        # 添加调试日志
        self.logger.debug(f"🔍 同步流量统计调用: enabled={self.enabled}, task_type={task_type}, request_size={request_size}, response_size={response_size}")

        if not self.enabled:
            self.logger.debug("⚠️ 流量统计已禁用，跳过记录")
            return

        try:
            redis_client = self.get_sync_redis()
            timestamp = int(time.time())
            date_key = time.strftime("%Y-%m-%d", time.localtime(timestamp))
            hour_key = time.strftime("%Y-%m-%d-%H", time.localtime(timestamp))

            # 流量数据
            total_size = request_size + response_size

            # 添加调试日志
            self.logger.debug(f"📊 准备同步记录流量: date_key={date_key}, total_size={total_size}")

            # 多维度统计 - 使用同步Redis管道
            pipe = redis_client.pipeline()

            # 1. 总流量统计
            pipe.hincrby(f"traffic:total:{date_key}", "request_bytes", request_size)
            pipe.hincrby(f"traffic:total:{date_key}", "response_bytes", response_size)
            pipe.hincrby(f"traffic:total:{date_key}", "total_bytes", total_size)
            pipe.hincrby(f"traffic:total:{date_key}", "request_count", 1)
            if success:
                pipe.hincrby(f"traffic:total:{date_key}", "success_count", 1)
            else:
                pipe.hincrby(f"traffic:total:{date_key}", "error_count", 1)

            # 2. 按代理统计
            proxy_key = self._get_proxy_key(proxy_url)
            pipe.hincrby(f"traffic:proxy:{proxy_key}:{date_key}", "total_bytes", total_size)
            pipe.hincrby(f"traffic:proxy:{proxy_key}:{date_key}", "request_count", 1)

            # 3. 按任务类型统计
            pipe.hincrby(f"traffic:task:{task_type}:{date_key}", "total_bytes", total_size)
            pipe.hincrby(f"traffic:task:{task_type}:{date_key}", "request_count", 1)

            # 4. 按小时统计
            pipe.hincrby(f"traffic:hourly:{hour_key}", "total_bytes", total_size)
            pipe.hincrby(f"traffic:hourly:{hour_key}", "request_count", 1)

            # 5. 设置过期时间（保留30天）
            expire_time = 3 * 24 * 3600
            pipe.expire(f"traffic:total:{date_key}", expire_time)
            pipe.expire(f"traffic:proxy:{proxy_key}:{date_key}", expire_time)
            pipe.expire(f"traffic:task:{task_type}:{date_key}", expire_time)
            pipe.expire(f"traffic:hourly:{hour_key}", expire_time)

            # 执行Redis管道
            result = pipe.execute()
            self.logger.debug(f"📊 同步Redis管道执行结果: {len(result)} 个操作完成")

            # 记录详细日志
            # self.logger.info(f"📊 流量统计: {task_type} | {proxy_key} | "
            #                f"请求:{request_size}B 响应:{response_size}B 总计:{total_size}B | "
            #                f"成功:{success} | URL:{url[:50]}...")

        except Exception as e:
            self.logger.error(f"❌ 同步流量统计记录失败: {e}")
            import traceback
            self.logger.error(f"❌ 同步流量统计错误堆栈: {traceback.format_exc()}")
    
    def _get_proxy_key(self, proxy_url: Optional[str]) -> str:
        """从代理URL提取标识符"""
        if not proxy_url:
            return "direct"
        
        # 提取代理服务器标识，隐藏敏感信息
        try:
            from urllib.parse import urlparse
            parsed = urlparse(proxy_url)
            return f"{parsed.hostname}:{parsed.port}"
        except:
            return "unknown_proxy"
    
    async def get_daily_stats(self, date: str = None) -> Dict[str, Any]:
        """获取日流量统计"""
        if not date:
            date = time.strftime("%Y-%m-%d")
            
        try:
            redis_client = await self.get_redis()
            stats = await redis_client.hgetall(f"traffic:total:{date}")
            
            if not stats:
                return {
                    'date': date,
                    'total_bytes': 0,
                    'request_bytes': 0,
                    'response_bytes': 0,
                    'request_count': 0,
                    'success_count': 0,
                    'error_count': 0,
                    'success_rate': 0.0,
                    'avg_response_size': 0
                }
            
            total_bytes = int(stats.get(b'total_bytes', 0))
            request_bytes = int(stats.get(b'request_bytes', 0))
            response_bytes = int(stats.get(b'response_bytes', 0))
            request_count = int(stats.get(b'request_count', 0))
            success_count = int(stats.get(b'success_count', 0))
            error_count = int(stats.get(b'error_count', 0))
            
            return {
                'date': date,
                'total_bytes': total_bytes,
                'total_mb': round(total_bytes / 1024 / 1024, 2),
                'request_bytes': request_bytes,
                'response_bytes': response_bytes,
                'request_count': request_count,
                'success_count': success_count,
                'error_count': error_count,
                'success_rate': round(success_count / max(request_count, 1) * 100, 2),
                'avg_response_size': response_bytes // max(request_count, 1)
            }
        except Exception as e:
            self.logger.error(f"❌ 获取日流量统计失败: {e}")
            return {}
    
    async def get_proxy_stats(self, date: str = None) -> Dict[str, Dict[str, Any]]:
        """获取按代理的流量统计"""
        if not date:
            date = time.strftime("%Y-%m-%d")
            
        try:
            redis_client = await self.get_redis()
            pattern = f"traffic:proxy:*:{date}"
            keys = await redis_client.keys(pattern)
            
            proxy_stats = {}
            for key in keys:
                key_str = key.decode() if isinstance(key, bytes) else key
                proxy_name = key_str.split(':')[2]  # 提取代理名称
                stats = await redis_client.hgetall(key)
                
                total_bytes = int(stats.get(b'total_bytes', 0))
                request_count = int(stats.get(b'request_count', 0))
                
                proxy_stats[proxy_name] = {
                    'total_bytes': total_bytes,
                    'total_mb': round(total_bytes / 1024 / 1024, 2),
                    'request_count': request_count,
                    'avg_size': total_bytes // max(request_count, 1)
                }
            
            return proxy_stats
        except Exception as e:
            self.logger.error(f"❌ 获取代理流量统计失败: {e}")
            return {}

    async def get_hourly_stats(self, hour_key: str) -> Dict[str, Any]:
        """获取指定小时的流量统计"""
        try:
            redis_client = await self.get_redis()
            stats = await redis_client.hgetall(f"traffic:hourly:{hour_key}")

            if not stats:
                return {}

            # 处理字节类型的键值
            total_bytes = int(stats.get(b'total_bytes', 0))
            request_count = int(stats.get(b'request_count', 0))

            return {
                'total_bytes': total_bytes,
                'total_mb': round(total_bytes / 1024 / 1024, 2),
                'request_count': request_count,
                'hour': hour_key
            }

        except Exception as e:
            self.logger.error(f"❌ 获取小时统计失败: {e}")
            return {}


class TrafficAwareSession:
    """带流量统计的HTTP会话包装器"""
    
    def __init__(self, task_type: str = "monitor"):
        self.task_type = task_type
        self.session = None
        
    async def __aenter__(self):
        self.session = aiohttp.ClientSession()
        return self
        
    async def __aexit__(self, exc_type, exc_val, exc_tb):
        if self.session:
            await self.session.close()
    
    async def get(self, url: str, proxy: Optional[str] = None, **kwargs) -> aiohttp.ClientResponse:
        """带流量统计的GET请求"""
        request_size = self._estimate_request_size(url, kwargs)
        
        try:
            async with self.session.get(url, proxy=proxy, **kwargs) as response:
                # 读取响应内容以计算大小
                content = await response.read()
                response_size = len(content)
                
                # 记录流量
                await traffic_monitor.record_traffic(
                    proxy_url=proxy,
                    request_size=request_size,
                    response_size=response_size,
                    task_type=self.task_type,
                    success=response.status == 200,
                    url=url
                )
                
                # 创建新的响应对象，包含内容
                response._content = content
                return response
                
        except Exception as e:
            # 记录失败的请求
            await traffic_monitor.record_traffic(
                proxy_url=proxy,
                request_size=request_size,
                response_size=0,
                task_type=self.task_type,
                success=False,
                url=url
            )
            raise
    
    def _estimate_request_size(self, url: str, kwargs: Dict[str, Any]) -> int:
        """估算请求大小"""
        size = len(url.encode('utf-8'))
        
        # 估算headers大小
        headers = kwargs.get('headers', {})
        for k, v in headers.items():
            size += len(f"{k}: {v}\r\n".encode('utf-8'))
        
        # 估算其他请求数据
        if 'data' in kwargs:
            size += len(str(kwargs['data']).encode('utf-8'))
        if 'json' in kwargs:
            import json
            size += len(json.dumps(kwargs['json']).encode('utf-8'))
            
        return size + 200  # 加上HTTP协议开销


# 全局实例
traffic_monitor = TrafficMonitor()


# 流量统计查询函数
async def print_traffic_stats(date: str = None):
    """打印流量统计信息"""
    daily_stats = await traffic_monitor.get_daily_stats(date)
    proxy_stats = await traffic_monitor.get_proxy_stats(date)
    
    print(f"\n📊 流量统计报告 - {daily_stats.get('date', 'N/A')}")
    print("=" * 50)
    print(f"总流量: {daily_stats.get('total_mb', 0)} MB")
    print(f"请求数: {daily_stats.get('request_count', 0)}")
    print(f"成功率: {daily_stats.get('success_rate', 0)}%")
    print(f"平均响应大小: {daily_stats.get('avg_response_size', 0)} 字节")
    
    print(f"\n📡 代理使用情况:")
    for proxy, stats in proxy_stats.items():
        print(f"  {proxy}: {stats['total_mb']} MB ({stats['request_count']} 请求)")
