from fastapi import APIRouter, HTTPException, status, Depends
from pydantic import BaseModel
from typing import List, Dict, Any
from sqlalchemy.orm import Session
from sqlalchemy import text
from app.utils.database import get_db_session
import asyncio
import logging
import time
from concurrent.futures import ThreadPoolExecutor
import uvicorn

# 配置日志
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

# 创建路由器
router = APIRouter(prefix="/high-concurrency", tags=["high_concurrency"])

# 定义请求和响应模型
class ProcessDataRequest(BaseModel):
    data: Dict[str, Any]

class ProcessDataResponse(BaseModel):
    success: bool
    message: str
    data: Dict[str, Any]

class BatchProcessRequest(BaseModel):
    items: List[ProcessDataRequest]

class BatchProcessResponse(BaseModel):
    success: bool
    message: str
    processed_count: int
    results: List[ProcessDataResponse]

# 数据库连接池配置
# 使用连接池来支持高并发访问
# 连接池配置已在 database.py 中设置

# 异步处理函数
async def async_process_data(data: Dict[str, Any], db: Session) -> Dict[str, Any]:
    """异步处理数据"""
    try:
        # 模拟一些处理时间
        await asyncio.sleep(0.01)
        
        # 执行数据库操作
        # 这里使用原生SQL以提高性能
        result = db.execute(text("SELECT NOW() AS `current_time`"))
        current_time = result.scalar()
        
        # 返回处理结果
        return {
            "processed_at": str(current_time),
            "data": data,
            "status": "processed"
        }
    except Exception as e:
        logger.error(f"处理数据时出错: {str(e)}")
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail=f"处理数据时出错: {str(e)}"
        )

@router.post("/process", response_model=ProcessDataResponse)
async def process_data(request: ProcessDataRequest, db: Session = Depends(get_db_session)):
    """
    高并发数据处理接口
    
    这个接口使用异步处理和数据库连接池来支持高并发访问。
    """
    try:
        result = await async_process_data(request.data, db)
        return ProcessDataResponse(
            success=True,
            message="数据处理成功",
            data=result
        )
    except HTTPException:
        raise
    except Exception as e:
        logger.error(f"处理请求时出错: {str(e)}")
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail=f"处理请求时出错: {str(e)}"
        )

@router.post("/batch-process", response_model=BatchProcessResponse)
async def batch_process_data(request: BatchProcessRequest, db: Session = Depends(get_db_session)):
    """
    批量高并发数据处理接口
    
    这个接口可以同时处理多个数据项，使用异步并发处理来提高性能。
    """
    try:
        # 并发处理所有数据项
        tasks = [async_process_data(item.data, db) for item in request.items]
        results = await asyncio.gather(*tasks, return_exceptions=True)
        
        # 处理结果
        processed_results = []
        processed_count = 0
        
        for i, result in enumerate(results):
            if isinstance(result, Exception):
                logger.error(f"处理第 {i+1} 项数据时出错: {str(result)}")
                processed_results.append(ProcessDataResponse(
                    success=False,
                    message=f"处理第 {i+1} 项数据时出错: {str(result)}",
                    data={}
                ))
            else:
                processed_results.append(ProcessDataResponse(
                    success=True,
                    message="数据处理成功",
                    data=result
                ))
                processed_count += 1
        
        return BatchProcessResponse(
            success=True,
            message=f"批量处理完成，成功处理 {processed_count}/{len(request.items)} 项数据",
            processed_count=processed_count,
            results=processed_results
        )
    except Exception as e:
        logger.error(f"批量处理请求时出错: {str(e)}")
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail=f"批量处理请求时出错: {str(e)}"
        )

@router.get("/stress-test")
async def stress_test(concurrent_requests: int = 100, delay: float = 0.01):
    """
    压力测试接口
    
    这个接口用于测试系统的高并发处理能力。
    """
    try:
        start_time = time.time()
        
        # 创建并发任务
        async def worker():
            await asyncio.sleep(delay)
            return {"status": "completed"}
        
        tasks = [worker() for _ in range(concurrent_requests)]
        results = await asyncio.gather(*tasks)
        
        end_time = time.time()
        total_time = end_time - start_time
        
        return {
            "success": True,
            "concurrent_requests": concurrent_requests,
            "total_time": total_time,
            "average_time_per_request": total_time / concurrent_requests,
            "requests_per_second": concurrent_requests / total_time if total_time > 0 else 0
        }
    except Exception as e:
        logger.error(f"压力测试时出错: {str(e)}")
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail=f"压力测试时出错: {str(e)}"
        )