#!/usr/bin/env python3

import logging
import time
import uuid
from datetime import datetime
from typing import Any, Dict, List, Optional

from fastapi import APIRouter, Depends, HTTPException
from fastapi.responses import JSONResponse

from .schemas import (ErrorResponse, HealthResponse, ProcessingResponse,
                      TextInput)
from .services import TextProcessingService

# 配置日志
logger = logging.getLogger(__name__)

# 创建路由器
router = APIRouter(prefix="/api/v1/text", tags=["文本处理"])

# 依赖注入：获取文本处理服务实例
def get_text_service() -> TextProcessingService:
    """获取文本处理服务实例"""
    return TextProcessingService()

@router.post(
    "/process",
    response_model=ProcessingResponse,
    summary="处理文本内容",
    description="接收文本内容，使用StructBERT进行分类，使用PALM2.0生成摘要"
)
async def process_text(
    text_input: TextInput,
    service: TextProcessingService = Depends(get_text_service)
) -> ProcessingResponse:
    """
    处理文本内容的主要接口

    Args:
        text_input: 文本输入数据
        service: 文本处理服务

    Returns:
        ProcessingResponse: 处理结果

    Raises:
        HTTPException: 当处理失败时抛出异常
    """
    request_id = str(uuid.uuid4())
    start_time = time.time()

    try:
        logger.info(f"开始处理文本请求 {request_id}, 用户: {text_input.user_id}")

        # 调用文本处理服务
        result = await service.process_text(
            text=text_input.text,
            enable_classification=text_input.enable_classification,
            enable_summary=text_input.enable_summary,
            user_id=text_input.user_id,
            session_id=text_input.session_id
        )

        processing_time = time.time() - start_time
        result.processing_time = processing_time

        logger.info(f"文本处理完成 {request_id}, 耗时: {processing_time:.2f}秒")

        return ProcessingResponse(
            success=True,
            message="文本处理成功",
            data=result,
            request_id=request_id
        )

    except Exception as e:
        processing_time = time.time() - start_time
        error_msg = f"文本处理失败: {str(e)}"
        logger.error(f"请求 {request_id} 处理失败: {error_msg}, 耗时: {processing_time:.2f}秒")

        raise HTTPException(
            status_code=500,
            detail={
                "success": False,
                "message": error_msg,
                "error_code": "PROCESSING_ERROR",
                "request_id": request_id
            }
        )

@router.get(
    "/health",
    response_model=HealthResponse,
    summary="健康检查",
    description="检查文本处理服务的运行状态和模型加载情况"
)
async def health_check(
    service: TextProcessingService = Depends(get_text_service)
) -> HealthResponse:
    """
    健康检查接口

    Args:
        service: 文本处理服务

    Returns:
        HealthResponse: 健康状态信息
    """
    try:
        health_info = await service.get_health_status()
        return HealthResponse(**health_info)

    except Exception as e:
        logger.error(f"健康检查失败: {str(e)}")
        raise HTTPException(
            status_code=500,
            detail={
                "success": False,
                "message": f"健康检查失败: {str(e)}",
                "error_code": "HEALTH_CHECK_ERROR"
            }
        )

@router.post(
    "/batch",
    response_model=Dict[str, Any],
    summary="批量处理文本",
    description="批量处理多个文本内容"
)
async def batch_process_text(
    texts: Dict[str, TextInput],
    service: TextProcessingService = Depends(get_text_service)
) -> Dict[str, Any]:
    """
    批量处理文本接口

    Args:
        texts: 文本输入字典，key为标识，value为文本数据
        service: 文本处理服务

    Returns:
        Dict: 批量处理结果
    """
    request_id = str(uuid.uuid4())
    start_time = time.time()
    results = {}
    errors = {}

    try:
        logger.info(f"开始批量处理文本请求 {request_id}, 文本数量: {len(texts)}")

        for text_id, text_input in texts.items():
            try:
                result = await service.process_text(
                    text=text_input.text,
                    enable_classification=text_input.enable_classification,
                    enable_summary=text_input.enable_summary,
                    user_id=text_input.user_id,
                    session_id=text_input.session_id
                )
                results[text_id] = result

            except Exception as e:
                errors[text_id] = str(e)
                logger.error(f"批量处理中文本 {text_id} 失败: {str(e)}")

        processing_time = time.time() - start_time
        logger.info(f"批量处理完成 {request_id}, 成功: {len(results)}, 失败: {len(errors)}, 耗时: {processing_time:.2f}秒")

        return {
            "success": True,
            "message": f"批量处理完成，成功: {len(results)}, 失败: {len(errors)}",
            "results": results,
            "errors": errors,
            "request_id": request_id,
            "processing_time": processing_time
        }

    except Exception as e:
        processing_time = time.time() - start_time
        error_msg = f"批量处理失败: {str(e)}"
        logger.error(f"批量请求 {request_id} 失败: {error_msg}, 耗时: {processing_time:.2f}秒")

        raise HTTPException(
            status_code=500,
            detail={
                "success": False,
                "message": error_msg,
                "error_code": "BATCH_PROCESSING_ERROR",
                "request_id": request_id
            }
        )

# 新增性能监控和管理端点

@router.get("/metrics", summary="获取性能指标")
async def get_performance_metrics(
    service: TextProcessingService = Depends(get_text_service)
) -> Dict[str, Any]:
    """
    获取详细的性能指标

    Returns:
        性能指标数据
    """
    try:
        metrics = service.get_performance_metrics()
        return {
            "status": "success",
            "data": metrics,
            "timestamp": datetime.now().isoformat()
        }
    except Exception as e:
        logger.error(f"获取性能指标失败: {e}")
        raise HTTPException(
            status_code=500,
            detail=f"获取性能指标失败: {str(e)}"
        )

@router.post("/cache/clear", summary="清理缓存")
async def clear_cache(
    service: TextProcessingService = Depends(get_text_service)
) -> Dict[str, Any]:
    """
    清理结果缓存

    Returns:
        清理结果
    """
    try:
        result = service.clear_cache()
        logger.info("缓存清理成功")
        return {
            "status": "success",
            "message": "缓存已清理",
            "data": result,
            "timestamp": datetime.now().isoformat()
        }
    except Exception as e:
        logger.error(f"清理缓存失败: {e}")
        raise HTTPException(
            status_code=500,
            detail=f"清理缓存失败: {str(e)}"
        )

@router.put("/config", summary="更新服务配置")
async def update_config(
    config_update: Dict[str, Any],
    service: TextProcessingService = Depends(get_text_service)
) -> Dict[str, Any]:
    """
    更新服务配置

    Args:
        config_update: 配置更新数据

    Returns:
        配置更新结果
    """
    try:
        result = service.update_config(config_update)
        logger.info(f"配置更新成功: {config_update}")
        return {
            "status": "success",
            "message": "配置更新成功",
            "data": result,
            "timestamp": datetime.now().isoformat()
        }
    except Exception as e:
        logger.error(f"更新配置失败: {e}")
        raise HTTPException(
            status_code=500,
            detail=f"更新配置失败: {str(e)}"
        )

@router.post("/models/warm-up", summary="模型预热")
async def warm_up_models(
    sample_texts: Optional[List[str]] = None,
    service: TextProcessingService = Depends(get_text_service)
) -> Dict[str, Any]:
    """
    对模型进行预热

    Args:
        sample_texts: 可选的样本文本列表

    Returns:
        预热结果
    """
    try:
        # 预热StructBERT模型
        structbert_result = await service.structbert_manager.warm_up(sample_texts)

        # 预热PALM2.0模型
        palm2_result = await service.palm2_manager.warm_up(sample_texts)

        result = {
            "structbert_warm_up": structbert_result,
            "palm2_warm_up": palm2_result,
            "success": structbert_result or palm2_result
        }

        logger.info(f"模型预热完成: {result}")
        return {
            "status": "success",
            "message": "模型预热完成",
            "data": result,
            "timestamp": datetime.now().isoformat()
        }
    except Exception as e:
        logger.error(f"模型预热失败: {e}")
        raise HTTPException(
            status_code=500,
            detail=f"模型预热失败: {str(e)}"
        )

@router.get("/models/status", summary="获取模型状态")
async def get_models_status(
    service: TextProcessingService = Depends(get_text_service)
) -> Dict[str, Any]:
    """
    获取所有模型的详细状态

    Returns:
        模型状态信息
    """
    try:
        structbert_status = await service.structbert_manager.get_model_status()
        palm2_status = await service.palm2_manager.get_model_status()

        return {
            "status": "success",
            "data": {
                "structbert": structbert_status,
                "palm2": palm2_status
            },
            "timestamp": datetime.now().isoformat()
        }
    except Exception as e:
        logger.error(f"获取模型状态失败: {e}")
        raise HTTPException(
            status_code=500,
            detail=f"获取模型状态失败: {str(e)}"
        )

@router.post("/models/reload", summary="重新加载模型")
async def reload_models(
    reload_structbert: bool = True,
    reload_palm2: bool = True,
    service: TextProcessingService = Depends(get_text_service)
) -> Dict[str, Any]:
    """
    重新加载指定的模型

    Args:
        reload_structbert: 是否重新加载StructBERT模型
        reload_palm2: 是否重新加载PALM2.0模型

    Returns:
        重新加载结果
    """
    try:
        results = {}

        if reload_structbert:
            # 清理并重新加载StructBERT
            await service.structbert_manager.cleanup()
            structbert_result = await service.structbert_manager.load_model()
            results["structbert"] = structbert_result

        if reload_palm2:
            # 清理并重新加载PALM2.0
            await service.palm2_manager.cleanup()
            palm2_result = await service.palm2_manager.load_model()
            results["palm2"] = palm2_result

        logger.info(f"模型重新加载完成: {results}")
        return {
            "status": "success",
            "message": "模型重新加载完成",
            "data": results,
            "timestamp": datetime.now().isoformat()
        }
    except Exception as e:
        logger.error(f"模型重新加载失败: {e}")
        raise HTTPException(
            status_code=500,
            detail=f"模型重新加载失败: {str(e)}"
        )

@router.post("/cleanup", summary="清理资源")
async def cleanup_resources(
    service: TextProcessingService = Depends(get_text_service)
) -> Dict[str, Any]:
    """
    清理服务资源

    Returns:
        清理结果
    """
    try:
        await service.cleanup_resources()
        logger.info("资源清理完成")
        return {
            "status": "success",
            "message": "资源清理完成",
            "timestamp": datetime.now().isoformat()
        }
    except Exception as e:
        logger.error(f"资源清理失败: {e}")
        raise HTTPException(
            status_code=500,
            detail=f"资源清理失败: {str(e)}"
        )

@router.get("/stats/reset", summary="重置统计信息")
async def reset_statistics(
    service: TextProcessingService = Depends(get_text_service)
) -> Dict[str, Any]:
    """
    重置所有统计信息

    Returns:
        重置结果
    """
    try:
        # 重置服务统计
        old_stats = service.stats.copy()
        service.stats = {
            'requests_processed': 0,
            'cache_hits': 0,
            'cache_misses': 0,
            'average_processing_time': 0,
            'last_gc_time': time.time(),
            'memory_peak': 0
        }

        # 重置模型统计
        service.structbert_manager.reset_stats()
        service.palm2_manager.reset_stats()

        logger.info("统计信息已重置")
        return {
            "status": "success",
            "message": "统计信息已重置",
            "data": {
                "previous_stats": old_stats,
                "reset_time": datetime.now().isoformat()
            },
            "timestamp": datetime.now().isoformat()
        }
    except Exception as e:
        logger.error(f"重置统计信息失败: {e}")
        raise HTTPException(
            status_code=500,
            detail=f"重置统计信息失败: {str(e)}"
        )

@router.get("/models/cache-info", summary="获取模型缓存信息")
async def get_model_cache_info():
    """获取模型缓存信息"""
    try:
        from ..config.model_path_config import get_model_path_manager
        from .models.modelscope_loader import get_modelscope_loader

        manager = get_model_path_manager()
        loader = get_modelscope_loader()

        cache_info = manager.get_cache_info()
        loader_info = loader.get_loaded_models_info()

        return {
            "status": "success",
            "data": {
                "cache_info": cache_info,
                "loader_info": loader_info
            },
            "timestamp": datetime.now().isoformat()
        }
    except Exception as e:
        logger.error(f"获取模型缓存信息失败: {e}")
        raise HTTPException(
            status_code=500,
            detail=f"获取模型缓存信息失败: {str(e)}"
        )
