import asyncio
import json
import logging
import os.path
import time
from datetime import datetime
from ipaddress import ip_address
from typing import Dict, List, Optional

from fastapi import Request, Response
from geoip2.database import Reader
from geoip2.errors import AddressNotFoundError
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
from fastapi.responses import StreamingResponse

from app.core.redis_conn import redis_conn
from app.models.system_access_log_model import AccessLogModel
from app.utils.timezone import now
from app.core.context import get_request_id

logger = logging.getLogger(__name__)


class AccessLogMiddleware(BaseHTTPMiddleware):
    REDIS_KEY_PREFIX = "access_logs"
    GEOIP_DB_PATH = "assets/GeoLite2-City.mmdb"
    # 记录白名单
    WHITE_LIST = ["/openapi.json", "/docs", "/health", "*access_logs/logs"]

    def __init__(self, app, batch_size: int = 1):
        super().__init__(app)
        self.batch_size = batch_size
        self.instance_id = os.getpid()  # 使用进程ID作为实例标识
        self.redis_key = f"{self.REDIS_KEY_PREFIX}:{self.instance_id}"
        self._flush_lock = asyncio.Lock()
        self._counter_key = f"access_logs_counter:{self.instance_id}"
        self._geo_reader = None

    @property
    def geo_reader(self) -> Optional[Reader]:
        """延迟加载GeoIP数据库"""
        if self._geo_reader is None and os.path.exists(self.GEOIP_DB_PATH):
            try:
                self._geo_reader = Reader(self.GEOIP_DB_PATH)
            except Exception as e:
                logger.error(f"Failed to load GeoIP database: {e}")
        return self._geo_reader

    async def get_and_increment_counter(self) -> int:
        """原子操作获取并递增计数器"""
        return await redis_conn.incr(self._counter_key)

    async def reset_counter(self) -> None:
        """重置计数器"""
        await redis_conn.delete(self._counter_key)

    def get_geo_info(self, ip: str) -> Dict[str, Optional[str]]:
        """获取IP地理位置信息"""
        default_geo = {"country": None, "city": None, "latitude": None, "longitude": None, "timezone": None}

        if not self.geo_reader:
            return default_geo

        try:
            # 检查是否为私有IP
            ip_obj = ip_address(ip)
            if ip_obj.is_private:
                return default_geo

            # 查询地理位置信息
            response = self.geo_reader.city(ip)
            return {
                "country": response.country.name,
                "city": response.city.name,
                "latitude": response.location.latitude,
                "longitude": response.location.longitude,
                "timezone": response.location.time_zone,
            }
        except (AddressNotFoundError, ValueError):
            return default_geo

    async def get_request_body(self, request: Request) -> Optional[str]:
        """获取请求体内容"""
        try:
            body = await request.body()
            return body.decode() if body else None
        except Exception:
            return None

    async def get_response_body(self, response: Response) -> Optional[str]:
        """获取响应体内容"""
        if isinstance(response, StreamingResponse):
            return None

        try:
            # 获取原始响应体
            body = b""
            async for chunk in response.body_iterator:
                body += chunk

            # 如果响应体为空，直接返回
            if not body:
                # 创建一个简单的异步生成器函数
                async def empty_body_iterator():
                    yield b""

                response.body_iterator = empty_body_iterator()
                return None

            # 创建新的异步生成器函数
            async def body_iterator():
                yield body

            # 重新设置响应体迭代器
            response.body_iterator = body_iterator()

            try:
                return body.decode()
            except UnicodeDecodeError:
                return str(body)

        except Exception as e:
            logger.error(f"Error getting response body: {e}")

            # 确保响应体迭代器不为空
            async def empty_body_iterator():
                yield b""

            response.body_iterator = empty_body_iterator()
            return None

    async def get_request_params(self, request: Request) -> Dict:
        """获取请求参数，包括查询参数和路径参数"""
        params = {
            "query_params": dict(request.query_params),
            "path_params": dict(request.path_params),
        }
        return params

    def is_path_in_whitelist(self, path: str) -> bool:
        """检查路径是否在白名单中，支持通配符匹配"""
        for pattern in self.WHITE_LIST:
            if pattern.startswith("*"):
                if path.endswith(pattern[1:]):
                    return True
            else:
                if path == pattern:
                    return True
        return False

    async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response:
        start_time = time.time()

        try:
            # 获取请求体和参数
            request_body = await self.get_request_body(request)
            request_params = await self.get_request_params(request)

            response = await call_next(request)
            process_time = (time.time() - start_time) * 1000  # ms

            # 使用新的白名单检查方法
            if self.is_path_in_whitelist(request.url.path):
                return response

            # 获取响应体
            response_body = await self.get_response_body(response)

            # 获取IP地址
            ip = request.client.host
            geo_info = self.get_geo_info(ip)

            # 获取当前时间并转换为 ISO 格式字符串
            current_time = now().isoformat()

            # 构建访问日志
            log_data = {
                "request_id": get_request_id(),
                "path": request.url.path,
                "method": request.method,
                "ip": ip,
                "user_agent": request.headers.get("user-agent"),
                "status_code": response.status_code,
                "process_time": process_time,
                "created_at": current_time,
                "request_body": request_body,
                "request_params": request_params,
                "response_body": response_body,
                **geo_info,
            }

            # 存储到Redis并更新计数器
            await redis_conn.lpush(self.redis_key, json.dumps(log_data))
            counter = await self.get_and_increment_counter()

            # 达到批处理大小时触发同步
            if counter >= self.batch_size:
                await self.reset_counter()
                asyncio.create_task(self.flush_to_db())

            return response

        except Exception as e:
            logger.error(f"Error in access log middleware: {e}")
            if not response:
                raise
            return response

    async def flush_to_db(self):
        """将Redis中的日志同步到数据库"""
        async with self._flush_lock:
            logs_to_flush: List[dict] = []

            # 使用管道批量获取日志
            async with redis_conn.pipeline(transaction=True) as pipe:
                while len(logs_to_flush) < self.batch_size:
                    log_data = await pipe.rpop(self.redis_key).execute()
                    if not log_data or not log_data[0]:
                        break
                    try:
                        # First try to decode the JSON data
                        log_dict = json.loads(log_data[0])

                        # Validate and clean the data before processing
                        if isinstance(log_dict, dict):
                            # Remove any None values to prevent serialization issues
                            log_dict = {k: v for k, v in log_dict.items() if v is not None}

                            # If there's a profile field, ensure it's properly serialized
                            if "profile" in log_dict and isinstance(log_dict["profile"], dict):
                                # Clean the profile data
                                profile = log_dict["profile"]
                                if "gender" in profile:
                                    # Ensure gender is just the integer value without comments
                                    try:
                                        profile["gender"] = int(str(profile["gender"]).split()[0])
                                    except (ValueError, IndexError):
                                        profile["gender"] = 0  # Default value if conversion fails

                            logs_to_flush.append(log_dict)
                        else:
                            logger.warning(f"Skipping invalid log data format: {log_dict}")

                    except json.JSONDecodeError as e:
                        logger.error(f"Failed to decode JSON log data: {e}")
                        continue
                    except Exception as e:
                        logger.error(f"Error processing log data: {e}")
                        continue

            if not logs_to_flush:
                return

            try:
                # Validate each log entry before bulk create
                validated_logs = []
                for log_data in logs_to_flush:
                    try:
                        # Ensure the data can be properly serialized
                        json.dumps(log_data)  # Test serialization
                        validated_logs.append(
                            AccessLogModel(**{**log_data, "created_at": datetime.fromisoformat(log_data["created_at"])})
                        )
                    except (TypeError, ValueError, json.JSONDecodeError) as e:
                        logger.error(f"Invalid log data format: {e}")
                        continue

                if validated_logs:
                    # Only attempt to save validated logs
                    await AccessLogModel.bulk_create(validated_logs)
                else:
                    logger.warning("No valid logs to save after validation")

            except Exception as e:
                # If saving fails, put only the valid JSON data back into Redis
                logger.error(f"Failed to save logs to database: {e}")
                valid_logs = []
                for log in logs_to_flush:
                    try:
                        valid_logs.append(json.dumps(log))
                    except (TypeError, json.JSONDecodeError):
                        continue
                if valid_logs:
                    await redis_conn.rpush(self.redis_key, *valid_logs)

    async def close(self):
        """关闭资源"""
        if self._geo_reader:
            self._geo_reader.close()
            self._geo_reader = None
