# -*- coding: utf-8 -*-
"""
日志管理服务

按照模块文档2.4.3节要求实现日志管理功能：
- 日志查询和筛选
- 日志分析和统计
- 日志导出功能
"""

import os
import json
import gzip
from typing import Dict, List, Optional, Any, Tuple
from datetime import datetime, timedelta
from sqlalchemy.orm import Session
from sqlalchemy import and_, or_, desc, func, text
from pathlib import Path
import logging

from ..core.database import get_db
from ..core.exceptions import BusinessError, ValidationError
from ..models.rbac import PermissionLog, User
from ..models.system_config import SystemConfig

logger = logging.getLogger(__name__)


class LogService:
    """日志管理服务类"""

    def __init__(self, db: Session):
        self.db = db

    async def query_logs(
        self,
        log_type: Optional[str] = None,
        level: Optional[str] = None,
        start_time: Optional[datetime] = None,
        end_time: Optional[datetime] = None,
        user_id: Optional[int] = None,
        keyword: Optional[str] = None,
        ip_address: Optional[str] = None,
        page: int = 1,
        size: int = 50
    ) -> Dict[str, Any]:
        """
        查询日志记录
        
        Args:
            log_type: 日志类型（system/operation/error/security/performance/access）
            level: 日志级别（DEBUG/INFO/WARN/ERROR）
            start_time: 开始时间
            end_time: 结束时间
            user_id: 用户ID
            keyword: 关键词搜索
            ip_address: IP地址
            page: 页码
            size: 每页数量
            
        Returns:
            日志查询结果
        """
        try:
            # 构建查询条件
            query = self.db.query(PermissionLog)
            
            # 时间范围筛选
            if start_time:
                query = query.filter(PermissionLog.operated_at >= start_time)
            if end_time:
                query = query.filter(PermissionLog.operated_at <= end_time)
            
            # 用户筛选
            if user_id:
                query = query.filter(PermissionLog.operator_id == user_id)
            
            # IP地址筛选
            if ip_address:
                query = query.filter(PermissionLog.client_ip == ip_address)
            
            # 日志类型筛选（通过operation_type字段模拟）
            if log_type:
                type_mapping = {
                    "system": ["SYSTEM_%"],
                    "operation": ["CREATE", "UPDATE", "DELETE", "LOGIN", "LOGOUT"],
                    "error": ["%ERROR%", "%FAIL%"],
                    "security": ["LOGIN", "LOGOUT", "AUTH_%", "PERMISSION_%"],
                    "performance": ["PERFORMANCE_%"],
                    "access": ["ACCESS_%", "LOGIN", "LOGOUT"]
                }
                
                if log_type in type_mapping:
                    patterns = type_mapping[log_type]
                    conditions = [PermissionLog.operation_type.like(pattern) for pattern in patterns]
                    query = query.filter(or_(*conditions))
            
            # 关键词搜索
            if keyword:
                query = query.filter(
                    or_(
                        PermissionLog.operation_desc.like(f"%{keyword}%"),
                        PermissionLog.operation_type.like(f"%{keyword}%")
                    )
                )
            
            # 总数统计
            total = query.count()
            
            # 分页和排序
            logs = query.order_by(desc(PermissionLog.operated_at)).offset(
                (page - 1) * size
            ).limit(size).all()
            
            # 格式化结果
            log_items = []
            for log in logs:
                # 获取用户信息
                user = self.db.query(User).filter(User.id == log.operator_id).first()
                username = user.username if user else "unknown"
                
                log_item = {
                    "id": log.id,
                    "timestamp": log.operated_at.isoformat(),
                    "type": self._classify_log_type(log.operation_type),
                    "level": self._get_log_level(log.operation_type),
                    "operation": log.operation_type,
                    "description": log.operation_desc,
                    "user_id": log.operator_id,
                    "username": username,
                    "target_type": log.target_type,
                    "target_id": log.target_id,
                    "client_ip": log.client_ip,
                    "user_agent": getattr(log, 'user_agent', None)
                }
                log_items.append(log_item)
            
            return {
                "items": log_items,
                "total": total,
                "page": page,
                "size": size,
                "pages": (total + size - 1) // size
            }
            
        except Exception as e:
            logger.error(f"查询日志失败: {e}", exc_info=True)
            raise BusinessError(f"查询日志失败: {str(e)}")

    async def analyze_logs(
        self,
        analysis_type: str,
        time_range: int = 24,
        group_by: str = "hour"
    ) -> Dict[str, Any]:
        """
        日志分析统计
        
        Args:
            analysis_type: 分析类型（activity/error/security/performance）
            time_range: 时间范围（小时）
            group_by: 分组方式（hour/day/user/ip/operation）
            
        Returns:
            分析结果
        """
        try:
            end_time = datetime.now()
            start_time = end_time - timedelta(hours=time_range)
            
            if analysis_type == "activity":
                return await self._analyze_activity_logs(start_time, end_time, group_by)
            elif analysis_type == "error":
                return await self._analyze_error_logs(start_time, end_time, group_by)
            elif analysis_type == "security":
                return await self._analyze_security_logs(start_time, end_time, group_by)
            elif analysis_type == "performance":
                return await self._analyze_performance_logs(start_time, end_time, group_by)
            else:
                raise ValidationError(f"不支持的分析类型: {analysis_type}")
                
        except Exception as e:
            logger.error(f"日志分析失败: {e}")
            raise BusinessError(f"日志分析失败: {str(e)}")

    async def export_logs(
        self,
        export_format: str = "json",
        log_type: Optional[str] = None,
        start_time: Optional[datetime] = None,
        end_time: Optional[datetime] = None,
        max_records: int = 10000
    ) -> Dict[str, Any]:
        """
        导出日志数据
        
        Args:
            export_format: 导出格式（json/csv）
            log_type: 日志类型筛选
            start_time: 开始时间
            end_time: 结束时间
            max_records: 最大记录数
            
        Returns:
            导出文件信息
        """
        try:
            # 查询日志数据
            query_result = await self.query_logs(
                log_type=log_type,
                start_time=start_time,
                end_time=end_time,
                page=1,
                size=max_records
            )
            
            log_data = query_result["items"]
            
            # 生成导出文件
            timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
            filename = f"logs_export_{timestamp}.{export_format}"
            
            export_path = self._get_export_path()
            file_path = export_path / filename
            
            if export_format == "json":
                export_data = {
                    "export_info": {
                        "exported_at": datetime.now().isoformat(),
                        "total_records": len(log_data),
                        "time_range": {
                            "start": start_time.isoformat() if start_time else None,
                            "end": end_time.isoformat() if end_time else None
                        },
                        "filters": {
                            "log_type": log_type
                        }
                    },
                    "logs": log_data
                }
                
                with open(file_path, 'w', encoding='utf-8') as f:
                    json.dump(export_data, f, ensure_ascii=False, indent=2)
                    
            elif export_format == "csv":
                import csv
                
                with open(file_path, 'w', newline='', encoding='utf-8') as f:
                    if log_data:
                        writer = csv.DictWriter(f, fieldnames=log_data[0].keys())
                        writer.writeheader()
                        writer.writerows(log_data)
            else:
                raise ValidationError(f"不支持的导出格式: {export_format}")
            
            file_size = file_path.stat().st_size
            
            return {
                "filename": filename,
                "file_path": str(file_path),
                "file_size": file_size,
                "record_count": len(log_data),
                "format": export_format,
                "exported_at": datetime.now().isoformat()
            }
            
        except Exception as e:
            logger.error(f"导出日志失败: {e}")
            raise BusinessError(f"导出日志失败: {str(e)}")

    async def get_log_statistics(
        self,
        time_range: int = 24
    ) -> Dict[str, Any]:
        """获取日志统计信息"""
        try:
            end_time = datetime.now()
            start_time = end_time - timedelta(hours=time_range)
            
            # 基础统计
            total_logs = self.db.query(PermissionLog).filter(
                PermissionLog.operated_at.between(start_time, end_time)
            ).count()
            
            # 按类型统计
            type_stats = {}
            for log_type in ["system", "operation", "error", "security", "access"]:
                type_stats[log_type] = await self._count_logs_by_type(
                    log_type, start_time, end_time
                )
            
            # 按用户统计（前10名）
            user_stats = self.db.query(
                PermissionLog.operator_id,
                func.count(PermissionLog.id).label('count')
            ).filter(
                PermissionLog.operated_at.between(start_time, end_time)
            ).group_by(PermissionLog.operator_id).order_by(
                desc('count')
            ).limit(10).all()
            
            # 获取用户名
            user_activity = []
            for user_id, count in user_stats:
                user = self.db.query(User).filter(User.id == user_id).first()
                username = user.username if user else f"用户{user_id}"
                user_activity.append({
                    "user_id": user_id,
                    "username": username,
                    "activity_count": count
                })
            
            # 按小时统计（24小时分布）
            hourly_stats = []
            for i in range(24):
                hour_start = start_time + timedelta(hours=i)
                hour_end = hour_start + timedelta(hours=1)
                
                if hour_end > end_time:
                    break
                    
                count = self.db.query(PermissionLog).filter(
                    PermissionLog.operated_at.between(hour_start, hour_end)
                ).count()
                
                hourly_stats.append({
                    "hour": hour_start.hour,
                    "timestamp": hour_start.isoformat(),
                    "count": count
                })
            
            return {
                "time_range_hours": time_range,
                "total_logs": total_logs,
                "type_distribution": type_stats,
                "top_active_users": user_activity,
                "hourly_distribution": hourly_stats,
                "statistics_generated_at": datetime.now().isoformat()
            }
            
        except Exception as e:
            logger.error(f"获取日志统计失败: {e}")
            raise BusinessError(f"获取日志统计失败: {str(e)}")

    async def _analyze_activity_logs(
        self, 
        start_time: datetime, 
        end_time: datetime, 
        group_by: str
    ) -> Dict[str, Any]:
        """分析活动日志"""
        try:
            if group_by == "hour":
                # 按小时分组
                results = []
                current = start_time.replace(minute=0, second=0, microsecond=0)
                
                while current < end_time:
                    next_hour = current + timedelta(hours=1)
                    count = self.db.query(PermissionLog).filter(
                        PermissionLog.operated_at.between(current, next_hour)
                    ).count()
                    
                    results.append({
                        "timestamp": current.isoformat(),
                        "hour": current.hour,
                        "activity_count": count
                    })
                    current = next_hour
                
                return {
                    "analysis_type": "activity",
                    "group_by": group_by,
                    "data": results
                }
            elif group_by == "user":
                # 按用户分组
                user_stats = self.db.query(
                    PermissionLog.operator_id,
                    func.count(PermissionLog.id).label('count')
                ).filter(
                    PermissionLog.operated_at.between(start_time, end_time)
                ).group_by(PermissionLog.operator_id).order_by(
                    desc('count')
                ).limit(20).all()
                
                results = []
                for user_id, count in user_stats:
                    user = self.db.query(User).filter(User.id == user_id).first()
                    username = user.username if user else f"用户{user_id}"
                    results.append({
                        "user_id": user_id,
                        "username": username,
                        "activity_count": count
                    })
                
                return {
                    "analysis_type": "activity",
                    "group_by": group_by,
                    "data": results
                }
            else:
                return {"data": []}
                
        except Exception as e:
            logger.error(f"分析活动日志失败: {e}")
            return {"data": [], "error": str(e)}

    async def _analyze_error_logs(
        self, 
        start_time: datetime, 
        end_time: datetime, 
        group_by: str
    ) -> Dict[str, Any]:
        """分析错误日志"""
        try:
            # 查询错误日志
            error_logs = self.db.query(PermissionLog).filter(
                and_(
                    PermissionLog.operated_at.between(start_time, end_time),
                    or_(
                        PermissionLog.operation_type.like('%ERROR%'),
                        PermissionLog.operation_type.like('%FAIL%')
                    )
                )
            ).all()
            
            if group_by == "operation":
                # 按操作类型分组
                operation_errors = {}
                for log in error_logs:
                    op_type = log.operation_type
                    if op_type not in operation_errors:
                        operation_errors[op_type] = 0
                    operation_errors[op_type] += 1
                
                results = [
                    {"operation_type": op, "error_count": count}
                    for op, count in sorted(operation_errors.items(), 
                                          key=lambda x: x[1], reverse=True)
                ]
                
                return {
                    "analysis_type": "error",
                    "group_by": group_by,
                    "total_errors": len(error_logs),
                    "data": results
                }
            else:
                return {
                    "analysis_type": "error",
                    "total_errors": len(error_logs),
                    "data": []
                }
                
        except Exception as e:
            logger.error(f"分析错误日志失败: {e}")
            return {"data": [], "error": str(e)}

    async def _analyze_security_logs(
        self, 
        start_time: datetime, 
        end_time: datetime, 
        group_by: str
    ) -> Dict[str, Any]:
        """分析安全日志"""
        try:
            # 查询安全相关日志
            security_logs = self.db.query(PermissionLog).filter(
                and_(
                    PermissionLog.operated_at.between(start_time, end_time),
                    or_(
                        PermissionLog.operation_type.like('%LOGIN%'),
                        PermissionLog.operation_type.like('%AUTH%'),
                        PermissionLog.operation_type.like('%PERMISSION%')
                    )
                )
            ).all()
            
            if group_by == "ip":
                # 按IP地址分组
                ip_stats = {}
                for log in security_logs:
                    ip = log.client_ip or "unknown"
                    if ip not in ip_stats:
                        ip_stats[ip] = {"total": 0, "login_attempts": 0}
                    ip_stats[ip]["total"] += 1
                    if "LOGIN" in log.operation_type:
                        ip_stats[ip]["login_attempts"] += 1
                
                results = [
                    {
                        "ip_address": ip,
                        "total_activities": stats["total"],
                        "login_attempts": stats["login_attempts"]
                    }
                    for ip, stats in sorted(ip_stats.items(), 
                                          key=lambda x: x[1]["total"], reverse=True)
                ]
                
                return {
                    "analysis_type": "security",
                    "group_by": group_by,
                    "total_events": len(security_logs),
                    "data": results
                }
            else:
                return {
                    "analysis_type": "security",
                    "total_events": len(security_logs),
                    "data": []
                }
                
        except Exception as e:
            logger.error(f"分析安全日志失败: {e}")
            return {"data": [], "error": str(e)}

    async def _analyze_performance_logs(
        self, 
        start_time: datetime, 
        end_time: datetime, 
        group_by: str
    ) -> Dict[str, Any]:
        """分析性能日志"""
        try:
            # 简化实现，返回模拟数据
            return {
                "analysis_type": "performance",
                "group_by": group_by,
                "data": [
                    {"metric": "avg_response_time", "value": 150, "unit": "ms"},
                    {"metric": "max_response_time", "value": 500, "unit": "ms"},
                    {"metric": "slow_queries", "value": 5, "unit": "count"}
                ]
            }
        except Exception as e:
            logger.error(f"分析性能日志失败: {e}")
            return {"data": [], "error": str(e)}

    async def _count_logs_by_type(
        self, 
        log_type: str, 
        start_time: datetime, 
        end_time: datetime
    ) -> int:
        """按类型统计日志数量"""
        try:
            type_mapping = {
                "system": ["SYSTEM_%"],
                "operation": ["CREATE", "UPDATE", "DELETE"],
                "error": ["%ERROR%", "%FAIL%"],
                "security": ["LOGIN", "LOGOUT", "AUTH_%", "PERMISSION_%"],
                "access": ["ACCESS_%", "LOGIN", "LOGOUT"]
            }
            
            if log_type not in type_mapping:
                return 0
            
            patterns = type_mapping[log_type]
            conditions = [PermissionLog.operation_type.like(pattern) for pattern in patterns]
            
            count = self.db.query(PermissionLog).filter(
                and_(
                    PermissionLog.operated_at.between(start_time, end_time),
                    or_(*conditions)
                )
            ).count()
            
            return count
        except Exception:
            return 0

    def _classify_log_type(self, operation_type: str) -> str:
        """根据操作类型分类日志"""
        if any(keyword in operation_type for keyword in ["ERROR", "FAIL"]):
            return "error"
        elif any(keyword in operation_type for keyword in ["LOGIN", "LOGOUT", "AUTH"]):
            return "security"
        elif any(keyword in operation_type for keyword in ["CREATE", "UPDATE", "DELETE"]):
            return "operation"
        elif "SYSTEM" in operation_type:
            return "system"
        else:
            return "access"

    def _get_log_level(self, operation_type: str) -> str:
        """根据操作类型推断日志级别"""
        if any(keyword in operation_type for keyword in ["ERROR", "FAIL"]):
            return "ERROR"
        elif any(keyword in operation_type for keyword in ["WARN", "WARNING"]):
            return "WARN"
        elif "DEBUG" in operation_type:
            return "DEBUG"
        else:
            return "INFO"

    def _get_export_path(self) -> Path:
        """获取导出文件路径"""
        export_path = Path("data/exports")
        export_path.mkdir(parents=True, exist_ok=True)
        return export_path