"""
日志相关的Celery任务
"""

import asyncio
from typing import Dict, Any, Optional

from celery import current_task
from .celery import celery
from services.log_service import log_service
from database import AsyncSessionLocal
from utils.logger import get_logger

logger = get_logger(__name__)


@celery.task
def export_logs(
    format: str = "json",
    execution_id: Optional[int] = None,
    level: Optional[str] = None,
    step: Optional[str] = None,
    repo_id: Optional[int] = None,
    start_time: Optional[str] = None,
    end_time: Optional[str] = None,
    search: Optional[str] = None
) -> Dict[str, Any]:
    """
    导出日志任务

    Args:
        format: 导出格式 (json, csv, txt)
        execution_id: 执行记录ID
        level: 日志级别
        step: 执行步骤
        repo_id: 仓库ID
        start_time: 开始时间 (ISO格式字符串)
        end_time: 结束时间 (ISO格式字符串)
        search: 搜索关键词

    Returns:
        导出结果
    """
    task_id = current_task.request.id

    logger.info(
        "开始导出日志",
        task_id=task_id,
        format=format,
        execution_id=execution_id
    )

    try:
        # 转换时间字符串为datetime对象
        from datetime import datetime
        parsed_start_time = None
        parsed_end_time = None

        if start_time:
            try:
                parsed_start_time = datetime.fromisoformat(start_time.replace('Z', '+00:00'))
            except ValueError:
                logger.warning("开始时间格式无效", start_time=start_time)

        if end_time:
            try:
                parsed_end_time = datetime.fromisoformat(end_time.replace('Z', '+00:00'))
            except ValueError:
                logger.warning("结束时间格式无效", end_time=end_time)

        # 运行异步任务
        loop = asyncio.new_event_loop()
        asyncio.set_event_loop(loop)
        try:
            result = loop.run_until_complete(
                _export_logs_async(
                    format=format,
                    execution_id=execution_id,
                    level=level,
                    step=step,
                    repo_id=repo_id,
                    start_time=parsed_start_time,
                    end_time=parsed_end_time,
                    search=search
                )
            )
            return result
        finally:
            loop.close()

    except Exception as e:
        logger.error(
            "导出日志失败",
            task_id=task_id,
            format=format,
            error=str(e)
        )
        raise


async def _export_logs_async(
    format: str,
    execution_id: Optional[int] = None,
    level: Optional[str] = None,
    step: Optional[str] = None,
    repo_id: Optional[int] = None,
    start_time: Optional[datetime] = None,
    end_time: Optional[datetime] = None,
    search: Optional[str] = None
) -> Dict[str, Any]:
    """异步导出日志"""
    async with AsyncSessionLocal() as db:
        # 获取日志数据
        logs_data = await log_service.get_logs(
            db=db,
            execution_id=execution_id,
            level=level,
            step=step,
            repo_id=repo_id,
            start_time=start_time,
            end_time=end_time,
            page=1,
            size=10000,
            search=search
        )

        logs = logs_data["items"]
        total_count = len(logs)

        # 导出文件
        export_file = await log_service.export_logs(
            db=db,
            format=format,
            execution_id=execution_id,
            level=level,
            step=step,
            repo_id=repo_id,
            start_time=start_time,
            end_time=end_time,
            search=search
        )

        # 保存文件到临时位置
        import os
        import tempfile
        from datetime import datetime

        timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
        filename = f"logs_export_{timestamp}.{format}"

        temp_dir = tempfile.gettempdir()
        file_path = os.path.join(temp_dir, filename)

        with open(file_path, 'wb') as f:
            f.write(export_file.getvalue())

        return {
            "status": "success",
            "message": "日志导出成功",
            "file_path": file_path,
            "filename": filename,
            "format": format,
            "size": os.path.getsize(file_path),
            "log_count": total_count,
            "export_time": datetime.now().isoformat()
        }


@celery.task
def cleanup_old_logs(retention_days: int = 30) -> Dict[str, Any]:
    """
    清理过期日志任务

    Args:
        retention_days: 日志保留天数

    Returns:
        清理结果
    """
    task_id = current_task.request.id

    logger.info(
        "开始清理过期日志",
        task_id=task_id,
        retention_days=retention_days
    )

    try:
        # 运行异步任务
        loop = asyncio.new_event_loop()
        asyncio.set_event_loop(loop)
        try:
            result = loop.run_until_complete(
                _cleanup_old_logs_async(retention_days)
            )
            return result
        finally:
            loop.close()

    except Exception as e:
        logger.error(
            "清理过期日志失败",
            task_id=task_id,
            retention_days=retention_days,
            error=str(e)
        )
        raise


async def _cleanup_old_logs_async(retention_days: int) -> Dict[str, Any]:
    """异步清理过期日志"""
    async with AsyncSessionLocal() as db:
        result = await log_service.cleanup_old_logs(db, retention_days)
        return result


@celery.task
def generate_log_report(
    execution_id: Optional[int] = None,
    days: int = 7
) -> Dict[str, Any]:
    """
    生成日志报告任务

    Args:
        execution_id: 执行记录ID
        days: 统计天数

    Returns:
        日志报告
    """
    task_id = current_task.request.id

    logger.info(
        "开始生成日志报告",
        task_id=task_id,
        execution_id=execution_id,
        days=days
    )

    try:
        # 运行异步任务
        loop = asyncio.new_event_loop()
        asyncio.set_event_loop(loop)
        try:
            result = loop.run_until_complete(
                _generate_log_report_async(execution_id, days)
            )
            return result
        finally:
            loop.close()

    except Exception as e:
        logger.error(
            "生成日志报告失败",
            task_id=task_id,
            execution_id=execution_id,
            error=str(e)
        )
        raise


async def _generate_log_report_async(
    execution_id: Optional[int] = None,
    days: int = 7
) -> Dict[str, Any]:
    """异步生成日志报告"""
    async with AsyncSessionLocal() as db:
        # 获取统计信息
        stats = await log_service.get_log_statistics(
            db=db,
            execution_id=execution_id,
            days=days
        )

        # 获取最新的错误日志
        error_logs_data = await log_service.get_logs(
            db=db,
            execution_id=execution_id,
            level="error",
            page=1,
            size=10
        )

        # 获取最新的警告日志
        warning_logs_data = await log_service.get_logs(
            db=db,
            execution_id=execution_id,
            level="warning",
            page=1,
            size=10
        )

        return {
            "status": "success",
            "message": "日志报告生成成功",
            "execution_id": execution_id,
            "period": stats["period"],
            "statistics": {
                "total_logs": stats["total_logs"],
                "level_stats": stats["level_stats"],
                "step_stats": stats["step_stats"],
                "date_stats": stats["date_stats"]
            },
            "recent_errors": error_logs_data["items"],
            "recent_warnings": warning_logs_data["items"],
            "generated_at": stats["period"]["end_time"]
        }


@celery.task
def archive_logs(
    execution_id: Optional[int] = None,
    older_than_days: int = 90
) -> Dict[str, Any]:
    """
    归档日志任务

    Args:
        execution_id: 执行记录ID
        older_than_days: 归档多少天前的日志

    Returns:
        归档结果
    """
    task_id = current_task.request.id

    logger.info(
        "开始归档日志",
        task_id=task_id,
        execution_id=execution_id,
        older_than_days=older_than_days
    )

    try:
        # 运行异步任务
        loop = asyncio.new_event_loop()
        asyncio.set_event_loop(loop)
        try:
            result = loop.run_until_complete(
                _archive_logs_async(execution_id, older_than_days)
            )
            return result
        finally:
            loop.close()

    except Exception as e:
        logger.error(
            "归档日志失败",
            task_id=task_id,
            execution_id=execution_id,
            error=str(e)
        )
        raise


async def _archive_logs_async(
    execution_id: Optional[int] = None,
    older_than_days: int = 90
) -> Dict[str, Any]:
    """异步归档日志"""
    from datetime import datetime, timedelta

    async with AsyncSessionLocal() as db:
        # 计算归档时间点
        archive_date = datetime.utcnow() - timedelta(days=older_than_days)

        # 获取需要归档的日志
        logs_data = await log_service.get_logs(
            db=db,
            execution_id=execution_id,
            end_time=archive_date,
            page=1,
            size=50000
        )

        logs = logs_data["items"]
        total_count = len(logs)

        if total_count == 0:
            return {
                "status": "success",
                "message": "没有需要归档的日志",
                "archived_count": 0,
                "archive_date": archive_date.isoformat()
            }

        # 导出为JSON格式进行归档
        archive_file = await log_service.export_logs(
            db=db,
            format="json",
            execution_id=execution_id,
            end_time=archive_date
        )

        # 保存归档文件
        import os
        import tempfile

        timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
        filename = f"logs_archive_{timestamp}.json"

        temp_dir = tempfile.gettempdir()
        file_path = os.path.join(temp_dir, filename)

        with open(file_path, 'wb') as f:
            f.write(archive_file.getvalue())

        return {
            "status": "success",
            "message": f"已归档 {total_count} 条日志",
            "archived_count": total_count,
            "archive_file": file_path,
            "filename": filename,
            "archive_date": archive_date.isoformat(),
            "file_size": os.path.getsize(file_path)
        }