"""
日志服务
"""

import json
import csv
import io
from typing import  datetime, timedelta
from typing import  List, Dict, Any, Optional, BinaryIO
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, and_, func, desc
from typing import  HTTPException

from models.log import Log
from models.execution import Execution
from utils.logger import get_logger

logger = get_logger(__name__)


class LogService:
    """日志服务"""

    def __init__(self):
        pass

    async def get_logs(
        self,
        db: AsyncSession,
        execution_id: Optional[int] = None,
        level: Optional[str] = None,
        step: Optional[str] = None,
        repo_id: Optional[int] = None,
        start_time: Optional[datetime] = None,
        end_time: Optional[datetime] = None,
        page: int = 1,
        size: int = 50,
        search: Optional[str] = None
    ) -> Dict[str, Any]:
        """获取日志列表"""
        try:
            # 构建查询条件
            conditions = []

            if execution_id:
                conditions.append(Log.execution_id == execution_id)

            if level:
                conditions.append(Log.level == level)

            if step:
                conditions.append(Log.step == step)

            if repo_id:
                conditions.append(Log.repo_id == repo_id)

            if start_time:
                conditions.append(Log.created_at >= start_time)

            if end_time:
                conditions.append(Log.created_at <= end_time)

            if search:
                conditions.append(
                    func.lower(Log.message).contains(func.lower(search))
                )

            # 构建基础查询
            base_query = select(Log).where(and_(*conditions))

            # 获取总数
            count_query = select(func.count(Log.id)).where(and_(*conditions))
            count_result = await db.execute(count_query)
            total_count = count_result.scalar()

            # 获取日志列表
            query = base_query.order_by(desc(Log.created_at)).offset((page - 1) * size).limit(size)
            result = await db.execute(query)
            logs = result.scalars().all()

            # 格式化日志数据
            log_items = []
            for log in logs:
                log_item = {
                    "id": log.id,
                    "execution_id": log.execution_id,
                    "level": log.level,
                    "message": log.message,
                    "step": log.step,
                    "repo_id": log.repo_id,
                    "details": log.details,
                    "created_at": log.created_at.isoformat() if log.created_at else None
                }
                log_items.append(log_item)

            return {
                "items": log_items,
                "total": total_count,
                "page": page,
                "size": size,
                "pages": (total_count + size - 1) // size
            }

        except Exception as e:
            logger.error("获取日志列表失败", error=str(e))
            raise

    async def get_log_by_id(
        self,
        db: AsyncSession,
        log_id: int
    ) -> Optional[Log]:
        """根据ID获取日志"""
        try:
            log = await db.get(Log, log_id)
            return log
        except Exception as e:
            logger.error("获取日志详情失败", log_id=log_id, error=str(e))
            raise

    async def get_execution_logs(
        self,
        db: AsyncSession,
        execution_id: int,
        level: Optional[str] = None,
        step: Optional[str] = None,
        page: int = 1,
        size: int = 50
    ) -> Dict[str, Any]:
        """获取执行记录的日志"""
        try:
            # 检查执行记录是否存在
            execution = await db.get(Execution, execution_id)
            if not execution:
                raise HTTPException(status_code=404, detail="执行记录不存在")

            return await self.get_logs(
                db=db,
                execution_id=execution_id,
                level=level,
                step=step,
                page=page,
                size=size
            )

        except HTTPException:
            raise
        except Exception as e:
            logger.error("获取执行日志失败", execution_id=execution_id, error=str(e))
            raise

    async def export_logs(
        self,
        db: AsyncSession,
        format: str = "json",
        execution_id: Optional[int] = None,
        level: Optional[str] = None,
        step: Optional[str] = None,
        repo_id: Optional[int] = None,
        start_time: Optional[datetime] = None,
        end_time: Optional[datetime] = None,
        search: Optional[str] = None
    ) -> BinaryIO:
        """导出日志"""
        try:
            # 获取所有符合条件的日志（不分页）
            logs_data = await self.get_logs(
                db=db,
                execution_id=execution_id,
                level=level,
                step=step,
                repo_id=repo_id,
                start_time=start_time,
                end_time=end_time,
                page=1,
                size=10000,  # 设置较大的值获取所有数据
                search=search
            )

            logs = logs_data["items"]

            if format == "json":
                return await self._export_json(logs)
            elif format == "csv":
                return await self._export_csv(logs)
            elif format == "txt":
                return await self._export_txt(logs)
            else:
                raise HTTPException(status_code=400, detail=f"不支持的导出格式: {format}")

        except HTTPException:
            raise
        except Exception as e:
            logger.error("导出日志失败", format=format, error=str(e))
            raise

    async def _export_json(self, logs: List[Dict[str, Any]]) -> BinaryIO:
        """导出为JSON格式"""
        output = io.StringIO()
        json.dump(logs, output, ensure_ascii=False, indent=2)
        output.seek(0)
        return io.BytesIO(output.getvalue().encode('utf-8'))

    async def _export_csv(self, logs: List[Dict[str, Any]]) -> BinaryIO:
        """导出为CSV格式"""
        output = io.StringIO()

        if not logs:
            return io.BytesIO(output.getvalue().encode('utf-8'))

        fieldnames = ['id', 'execution_id', 'level', 'message', 'step', 'repo_id', 'details', 'created_at']
        writer = csv.DictWriter(output, fieldnames=fieldnames)

        writer.writeheader()
        for log in logs:
            # 处理details字段，转换为JSON字符串
            log_row = log.copy()
            if log_row.get('details'):
                log_row['details'] = json.dumps(log_row['details'], ensure_ascii=False)
            writer.writerow(log_row)

        output.seek(0)
        return io.BytesIO(output.getvalue().encode('utf-8'))

    async def _export_txt(self, logs: List[Dict[str, Any]]) -> BinaryIO:
        """导出为TXT格式"""
        output = io.StringIO()

        for log in logs:
            created_at = log.get('created_at', 'N/A')
            level = log.get('level', 'N/A')
            step = log.get('step', 'N/A')
            message = log.get('message', '')
            repo_id = log.get('repo_id')

            line = f"[{created_at}] [{level}] [{step}]"
            if repo_id:
                line += f" [Repo:{repo_id}]"
            line += f" {message}\n"

            output.write(line)

        output.seek(0)
        return io.BytesIO(output.getvalue().encode('utf-8'))

    async def get_log_statistics(
        self,
        db: AsyncSession,
        execution_id: Optional[int] = None,
        days: int = 7
    ) -> Dict[str, Any]:
        """获取日志统计信息"""
        try:
            # 计算时间范围
            end_time = datetime.utcnow()
            start_time = end_time - timedelta(days=days)

            # 构建基础查询条件
            conditions = [
                Log.created_at >= start_time,
                Log.created_at <= end_time
            ]

            if execution_id:
                conditions.append(Log.execution_id == execution_id)

            # 按日志级别统计
            level_stats = {}
            for level in ['info', 'warning', 'error', 'debug']:
                count_query = select(func.count(Log.id)).where(
                    and_(*conditions, Log.level == level)
                )
                result = await db.execute(count_query)
                level_stats[level] = result.scalar()

            # 按步骤统计
            step_stats_query = select(
                Log.step,
                func.count(Log.id).label('count')
            ).where(
                and_(*conditions)
            ).group_by(Log.step).order_by(desc('count'))

            step_result = await db.execute(step_stats_query)
            step_stats = {row.step: row.count for row in step_result}

            # 按日期统计
            date_stats_query = select(
                func.date(Log.created_at).label('date'),
                func.count(Log.id).label('count')
            ).where(
                and_(*conditions)
            ).group_by(func.date(Log.created_at)).order_by('date')

            date_result = await db.execute(date_stats_query)
            date_stats = {str(row.date): row.count for row in date_result}

            return {
                "period": {
                    "start_time": start_time.isoformat(),
                    "end_time": end_time.isoformat(),
                    "days": days
                },
                "level_stats": level_stats,
                "step_stats": step_stats,
                "date_stats": date_stats,
                "total_logs": sum(level_stats.values())
            }

        except Exception as e:
            logger.error("获取日志统计失败", error=str(e))
            raise

    async def cleanup_old_logs(
        self,
        db: AsyncSession,
        retention_days: int = 30
    ) -> Dict[str, Any]:
        """清理过期日志"""
        try:
            # 计算过期时间
            cutoff_date = datetime.utcnow() - timedelta(days=retention_days)

            # 查询过期日志数量
            count_query = select(func.count(Log.id)).where(Log.created_at < cutoff_date)
            count_result = await db.execute(count_query)
            expired_count = count_result.scalar()

            if expired_count == 0:
                return {
                    "status": "success",
                    "message": "没有过期日志需要清理",
                    "deleted_count": 0,
                    "retention_days": retention_days
                }

            # 删除过期日志
            from sqlalchemy import delete
            delete_query = delete(Log).where(Log.created_at < cutoff_date)
            await db.execute(delete_query)
            await db.commit()

            logger.info(
                "清理过期日志完成",
                deleted_count=expired_count,
                retention_days=retention_days,
                cutoff_date=cutoff_date.isoformat()
            )

            return {
                "status": "success",
                "message": f"已清理 {expired_count} 条过期日志",
                "deleted_count": expired_count,
                "retention_days": retention_days,
                "cutoff_date": cutoff_date.isoformat()
            }

        except Exception as e:
            await db.rollback()
            logger.error("清理过期日志失败", error=str(e))
            raise

    async def search_logs(
        self,
        db: AsyncSession,
        query: str,
        execution_id: Optional[int] = None,
        page: int = 1,
        size: int = 50
    ) -> Dict[str, Any]:
        """搜索日志"""
        try:
            return await self.get_logs(
                db=db,
                execution_id=execution_id,
                search=query,
                page=page,
                size=size
            )
        except Exception as e:
            logger.error("搜索日志失败", query=query, error=str(e))
            raise

    async def get_log_levels(self) -> List[str]:
        """获取所有日志级别"""
        return ['info', 'warning', 'error', 'debug']

    async def get_log_steps(self, db: AsyncSession) -> List[str]:
        """获取所有日志步骤"""
        try:
            query = select(Log.step).distinct()
            result = await db.execute(query)
            steps = [row.step for row in result if row.step]
            return steps
        except Exception as e:
            logger.error("获取日志步骤失败", error=str(e))
            return []


# 全局日志服务实例
log_service = LogService()