#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
FastAPI异步编程示例 - 展示Python异步Web开发的强大功能

本文件展示FastAPI的异步特性，包括异步路由、异步数据库操作、异步HTTP客户端、
并发处理、流式响应等高级功能。

与SpringBoot对比：
1. FastAPI原生支持async/await，SpringBoot需要WebFlux
2. Python异步语法更简洁，Java需要CompletableFuture或Reactor
3. FastAPI异步性能更高，特别是I/O密集型任务
4. Python异步生态更丰富（aiohttp、asyncpg等）

作者: Assistant
日期: 2024
"""

import asyncio
import aiohttp
import aiofiles
import time
from datetime import datetime, timedelta
from typing import List, Optional, Dict, Any, AsyncGenerator
from contextlib import asynccontextmanager

from fastapi import FastAPI, HTTPException, Depends, BackgroundTasks, Request
from fastapi.responses import StreamingResponse, JSONResponse
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel, Field
from starlette.concurrency import run_in_threadpool

import uvicorn
import logging
import json
import random
import asyncpg
from concurrent.futures import ThreadPoolExecutor
import httpx

# ============================================================================
# 1. 异步数据模型
# ============================================================================

class TaskRequest(BaseModel):
    """
    异步任务请求模型
    """
    name: str = Field(..., description="任务名称")
    duration: int = Field(..., ge=1, le=60, description="任务持续时间（秒）")
    task_type: str = Field(..., description="任务类型")
    params: Dict[str, Any] = Field(default_factory=dict, description="任务参数")


class TaskResponse(BaseModel):
    """
    异步任务响应模型
    """
    task_id: str = Field(..., description="任务ID")
    name: str = Field(..., description="任务名称")
    status: str = Field(..., description="任务状态")
    progress: float = Field(..., description="任务进度")
    result: Optional[Any] = Field(None, description="任务结果")
    created_at: datetime = Field(..., description="创建时间")
    started_at: Optional[datetime] = Field(None, description="开始时间")
    completed_at: Optional[datetime] = Field(None, description="完成时间")
    error: Optional[str] = Field(None, description="错误信息")


class ApiResponse(BaseModel):
    """
    统一API响应格式
    """
    success: bool = Field(..., description="请求是否成功")
    message: str = Field(..., description="响应消息")
    data: Optional[Any] = Field(None, description="响应数据")
    timestamp: datetime = Field(default_factory=datetime.now, description="响应时间")


class WebsiteInfo(BaseModel):
    """
    网站信息模型
    """
    url: str = Field(..., description="网站URL")
    title: str = Field(..., description="网站标题")
    status_code: int = Field(..., description="HTTP状态码")
    response_time: float = Field(..., description="响应时间（毫秒）")
    content_length: int = Field(..., description="内容长度")
    headers: Dict[str, str] = Field(..., description="响应头")


# ============================================================================
# 2. 异步任务管理器
# ============================================================================

class AsyncTaskManager:
    """
    异步任务管理器
    管理后台异步任务的执行、状态跟踪和结果存储
    """
    
    def __init__(self):
        self.tasks: Dict[str, TaskResponse] = {}
        self.running_tasks: Dict[str, asyncio.Task] = {}
        self.executor = ThreadPoolExecutor(max_workers=10)
    
    async def create_task(self, task_request: TaskRequest) -> str:
        """
        创建异步任务
        """
        task_id = f"task_{int(time.time() * 1000)}_{random.randint(1000, 9999)}"
        
        task_response = TaskResponse(
            task_id=task_id,
            name=task_request.name,
            status="pending",
            progress=0.0,
            created_at=datetime.now()
        )
        
        self.tasks[task_id] = task_response
        
        # 启动异步任务
        if task_request.task_type == "io_intensive":
            task = asyncio.create_task(self._io_intensive_task(task_id, task_request))
        elif task_request.task_type == "cpu_intensive":
            task = asyncio.create_task(self._cpu_intensive_task(task_id, task_request))
        elif task_request.task_type == "mixed":
            task = asyncio.create_task(self._mixed_task(task_id, task_request))
        else:
            task = asyncio.create_task(self._default_task(task_id, task_request))
        
        self.running_tasks[task_id] = task
        
        return task_id
    
    async def _io_intensive_task(self, task_id: str, task_request: TaskRequest):
        """
        I/O密集型任务（模拟网络请求、文件操作等）
        """
        try:
            self.tasks[task_id].status = "running"
            self.tasks[task_id].started_at = datetime.now()
            
            duration = task_request.duration
            steps = 10
            step_duration = duration / steps
            
            results = []
            
            for i in range(steps):
                # 模拟异步I/O操作
                await asyncio.sleep(step_duration)
                
                # 模拟HTTP请求
                async with aiohttp.ClientSession() as session:
                    try:
                        async with session.get('https://httpbin.org/delay/0.1') as response:
                            data = await response.json()
                            results.append(f"Step {i+1}: {data.get('url', 'N/A')}")
                    except Exception as e:
                        results.append(f"Step {i+1}: Error - {str(e)}")
                
                # 更新进度
                progress = (i + 1) / steps * 100
                self.tasks[task_id].progress = progress
                
                logging.info(f"Task {task_id} progress: {progress:.1f}%")
            
            # 任务完成
            self.tasks[task_id].status = "completed"
            self.tasks[task_id].progress = 100.0
            self.tasks[task_id].result = {
                "type": "io_intensive",
                "steps_completed": len(results),
                "results": results,
                "total_duration": duration
            }
            self.tasks[task_id].completed_at = datetime.now()
            
        except Exception as e:
            self.tasks[task_id].status = "failed"
            self.tasks[task_id].error = str(e)
            logging.error(f"Task {task_id} failed: {str(e)}")
        
        finally:
            # 清理运行中的任务
            if task_id in self.running_tasks:
                del self.running_tasks[task_id]
    
    async def _cpu_intensive_task(self, task_id: str, task_request: TaskRequest):
        """
        CPU密集型任务（在线程池中执行）
        """
        try:
            self.tasks[task_id].status = "running"
            self.tasks[task_id].started_at = datetime.now()
            
            # CPU密集型任务需要在线程池中执行，避免阻塞事件循环
            def cpu_bound_work(duration: int) -> Dict[str, Any]:
                """
                CPU密集型工作（计算质数）
                """
                start_time = time.time()
                primes = []
                num = 2
                
                while time.time() - start_time < duration:
                    is_prime = True
                    for i in range(2, int(num ** 0.5) + 1):
                        if num % i == 0:
                            is_prime = False
                            break
                    
                    if is_prime:
                        primes.append(num)
                    
                    num += 1
                    
                    # 更新进度（简化版）
                    elapsed = time.time() - start_time
                    progress = min(elapsed / duration * 100, 100)
                    
                    # 注意：这里无法直接更新任务状态，因为在线程池中
                
                return {
                    "primes_found": len(primes),
                    "largest_prime": max(primes) if primes else 0,
                    "actual_duration": time.time() - start_time
                }
            
            # 在线程池中执行CPU密集型任务
            result = await run_in_threadpool(cpu_bound_work, task_request.duration)
            
            # 任务完成
            self.tasks[task_id].status = "completed"
            self.tasks[task_id].progress = 100.0
            self.tasks[task_id].result = {
                "type": "cpu_intensive",
                **result
            }
            self.tasks[task_id].completed_at = datetime.now()
            
        except Exception as e:
            self.tasks[task_id].status = "failed"
            self.tasks[task_id].error = str(e)
            logging.error(f"Task {task_id} failed: {str(e)}")
        
        finally:
            if task_id in self.running_tasks:
                del self.running_tasks[task_id]
    
    async def _mixed_task(self, task_id: str, task_request: TaskRequest):
        """
        混合型任务（I/O + CPU）
        """
        try:
            self.tasks[task_id].status = "running"
            self.tasks[task_id].started_at = datetime.now()
            
            duration = task_request.duration
            half_duration = duration // 2
            
            # 第一阶段：I/O操作
            io_results = []
            for i in range(3):
                await asyncio.sleep(half_duration / 3)
                
                # 模拟文件I/O
                filename = f"/tmp/async_test_{task_id}_{i}.txt"
                async with aiofiles.open(filename, 'w') as f:
                    await f.write(f"Task {task_id} - Step {i+1} - {datetime.now()}")
                
                io_results.append(f"Created file: {filename}")
                
                progress = (i + 1) / 6 * 100  # 总共6步
                self.tasks[task_id].progress = progress
            
            # 第二阶段：CPU操作
            def cpu_work():
                result = 0
                for i in range(1000000):
                    result += i ** 2
                return result
            
            cpu_result = await run_in_threadpool(cpu_work)
            
            # 任务完成
            self.tasks[task_id].status = "completed"
            self.tasks[task_id].progress = 100.0
            self.tasks[task_id].result = {
                "type": "mixed",
                "io_operations": io_results,
                "cpu_result": cpu_result,
                "total_duration": duration
            }
            self.tasks[task_id].completed_at = datetime.now()
            
        except Exception as e:
            self.tasks[task_id].status = "failed"
            self.tasks[task_id].error = str(e)
            logging.error(f"Task {task_id} failed: {str(e)}")
        
        finally:
            if task_id in self.running_tasks:
                del self.running_tasks[task_id]
    
    async def _default_task(self, task_id: str, task_request: TaskRequest):
        """
        默认任务（简单的延时任务）
        """
        try:
            self.tasks[task_id].status = "running"
            self.tasks[task_id].started_at = datetime.now()
            
            duration = task_request.duration
            steps = 5
            step_duration = duration / steps
            
            for i in range(steps):
                await asyncio.sleep(step_duration)
                
                progress = (i + 1) / steps * 100
                self.tasks[task_id].progress = progress
            
            self.tasks[task_id].status = "completed"
            self.tasks[task_id].progress = 100.0
            self.tasks[task_id].result = {
                "type": "default",
                "message": f"Task {task_request.name} completed successfully",
                "duration": duration
            }
            self.tasks[task_id].completed_at = datetime.now()
            
        except Exception as e:
            self.tasks[task_id].status = "failed"
            self.tasks[task_id].error = str(e)
        
        finally:
            if task_id in self.running_tasks:
                del self.running_tasks[task_id]
    
    def get_task(self, task_id: str) -> Optional[TaskResponse]:
        """
        获取任务状态
        """
        return self.tasks.get(task_id)
    
    def get_all_tasks(self) -> List[TaskResponse]:
        """
        获取所有任务
        """
        return list(self.tasks.values())
    
    async def cancel_task(self, task_id: str) -> bool:
        """
        取消任务
        """
        if task_id in self.running_tasks:
            task = self.running_tasks[task_id]
            task.cancel()
            
            if task_id in self.tasks:
                self.tasks[task_id].status = "cancelled"
                self.tasks[task_id].completed_at = datetime.now()
            
            del self.running_tasks[task_id]
            return True
        
        return False


# ============================================================================
# 3. 异步HTTP客户端服务
# ============================================================================

class AsyncHttpService:
    """
    异步HTTP客户端服务
    演示如何使用异步HTTP客户端进行并发请求
    """
    
    def __init__(self):
        self.session: Optional[aiohttp.ClientSession] = None
    
    async def __aenter__(self):
        self.session = aiohttp.ClientSession(
            timeout=aiohttp.ClientTimeout(total=30)
        )
        return self
    
    async def __aexit__(self, exc_type, exc_val, exc_tb):
        if self.session:
            await self.session.close()
    
    async def fetch_website_info(self, url: str) -> WebsiteInfo:
        """
        获取网站信息
        """
        start_time = time.time()
        
        try:
            async with self.session.get(url) as response:
                content = await response.text()
                response_time = (time.time() - start_time) * 1000
                
                # 简单提取标题
                title = "Unknown"
                if '<title>' in content:
                    start = content.find('<title>') + 7
                    end = content.find('</title>', start)
                    if end > start:
                        title = content[start:end].strip()
                
                return WebsiteInfo(
                    url=url,
                    title=title,
                    status_code=response.status,
                    response_time=response_time,
                    content_length=len(content),
                    headers=dict(response.headers)
                )
        
        except Exception as e:
            response_time = (time.time() - start_time) * 1000
            return WebsiteInfo(
                url=url,
                title=f"Error: {str(e)}",
                status_code=0,
                response_time=response_time,
                content_length=0,
                headers={}
            )
    
    async def fetch_multiple_websites(self, urls: List[str]) -> List[WebsiteInfo]:
        """
        并发获取多个网站信息
        """
        tasks = [self.fetch_website_info(url) for url in urls]
        results = await asyncio.gather(*tasks, return_exceptions=True)
        
        # 处理异常结果
        website_infos = []
        for i, result in enumerate(results):
            if isinstance(result, Exception):
                website_infos.append(WebsiteInfo(
                    url=urls[i],
                    title=f"Exception: {str(result)}",
                    status_code=0,
                    response_time=0,
                    content_length=0,
                    headers={}
                ))
            else:
                website_infos.append(result)
        
        return website_infos


# ============================================================================
# 4. 应用程序生命周期和依赖
# ============================================================================

# 全局实例
task_manager = AsyncTaskManager()

@asynccontextmanager
async def lifespan(app: FastAPI):
    """
    应用程序生命周期管理
    """
    # 启动时执行
    logging.info("FastAPI异步应用启动")
    logging.info("初始化异步任务管理器")
    
    yield  # 应用运行期间
    
    # 关闭时执行
    logging.info("FastAPI异步应用关闭")
    logging.info("清理异步资源...")
    
    # 取消所有运行中的任务
    for task_id in list(task_manager.running_tasks.keys()):
        await task_manager.cancel_task(task_id)
    
    # 关闭线程池
    task_manager.executor.shutdown(wait=True)


def get_task_manager() -> AsyncTaskManager:
    """
    获取任务管理器实例
    """
    return task_manager


# ============================================================================
# 5. FastAPI应用实例
# ============================================================================

app = FastAPI(
    title="FastAPI异步编程示例",
    description="演示FastAPI异步特性的高级示例应用",
    version="1.0.0",
    lifespan=lifespan
)

# 配置CORS
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
logger = logging.getLogger(__name__)


# ============================================================================
# 6. 异步路由定义
# ============================================================================

@app.get("/", tags=["首页"])
async def root():
    """
    异步首页
    演示基本的异步路由
    """
    # 模拟异步操作
    await asyncio.sleep(0.1)
    
    return {
        "message": "欢迎使用FastAPI异步编程示例",
        "features": [
            "异步任务管理",
            "并发HTTP请求",
            "流式响应",
            "后台任务",
            "实时数据流"
        ],
        "timestamp": datetime.now()
    }


@app.post("/tasks", response_model=ApiResponse, tags=["异步任务"])
async def create_task(
    task_request: TaskRequest,
    background_tasks: BackgroundTasks,
    manager: AsyncTaskManager = Depends(get_task_manager)
):
    """
    创建异步任务
    演示后台任务的创建和管理
    """
    try:
        task_id = await manager.create_task(task_request)
        
        # 添加后台任务（用于清理等操作）
        background_tasks.add_task(
            log_task_creation, 
            task_id, 
            task_request.name
        )
        
        logger.info(f"创建异步任务: {task_id} - {task_request.name}")
        
        return ApiResponse(
            success=True,
            message="异步任务创建成功",
            data={
                "task_id": task_id,
                "status": "pending",
                "estimated_duration": task_request.duration
            }
        )
    
    except Exception as e:
        logger.error(f"创建异步任务失败: {str(e)}")
        raise HTTPException(status_code=500, detail=str(e))


@app.get("/tasks/{task_id}", response_model=ApiResponse, tags=["异步任务"])
async def get_task_status(
    task_id: str,
    manager: AsyncTaskManager = Depends(get_task_manager)
):
    """
    获取任务状态
    演示异步任务状态查询
    """
    task = manager.get_task(task_id)
    
    if not task:
        raise HTTPException(status_code=404, detail=f"任务 {task_id} 不存在")
    
    return ApiResponse(
        success=True,
        message="获取任务状态成功",
        data=task
    )


@app.get("/tasks", response_model=ApiResponse, tags=["异步任务"])
async def get_all_tasks(
    manager: AsyncTaskManager = Depends(get_task_manager)
):
    """
    获取所有任务
    """
    tasks = manager.get_all_tasks()
    
    # 按创建时间排序
    tasks.sort(key=lambda x: x.created_at, reverse=True)
    
    return ApiResponse(
        success=True,
        message=f"获取任务列表成功，共 {len(tasks)} 个任务",
        data=tasks
    )


@app.delete("/tasks/{task_id}", response_model=ApiResponse, tags=["异步任务"])
async def cancel_task(
    task_id: str,
    manager: AsyncTaskManager = Depends(get_task_manager)
):
    """
    取消任务
    演示异步任务的取消
    """
    success = await manager.cancel_task(task_id)
    
    if not success:
        raise HTTPException(status_code=404, detail=f"任务 {task_id} 不存在或已完成")
    
    logger.info(f"取消异步任务: {task_id}")
    
    return ApiResponse(
        success=True,
        message="任务取消成功",
        data={"cancelled_task_id": task_id}
    )


@app.post("/websites/batch-check", response_model=ApiResponse, tags=["并发请求"])
async def batch_check_websites(urls: List[str]):
    """
    批量检查网站状态
    演示并发HTTP请求的强大功能
    """
    if len(urls) > 20:
        raise HTTPException(status_code=400, detail="最多支持20个URL")
    
    start_time = time.time()
    
    async with AsyncHttpService() as http_service:
        website_infos = await http_service.fetch_multiple_websites(urls)
    
    total_time = (time.time() - start_time) * 1000
    
    logger.info(f"批量检查 {len(urls)} 个网站，耗时 {total_time:.2f}ms")
    
    return ApiResponse(
        success=True,
        message=f"批量检查完成，共检查 {len(urls)} 个网站",
        data={
            "websites": website_infos,
            "total_time_ms": total_time,
            "average_time_ms": total_time / len(urls) if urls else 0
        }
    )


@app.get("/stream/numbers", tags=["流式响应"])
async def stream_numbers(count: int = 10, delay: float = 0.5):
    """
    流式响应示例
    演示如何实现实时数据流
    """
    if count > 100:
        raise HTTPException(status_code=400, detail="最多支持100个数字")
    
    async def generate_numbers():
        """
        异步生成器，产生数字流
        """
        for i in range(count):
            data = {
                "number": i + 1,
                "timestamp": datetime.now().isoformat(),
                "progress": (i + 1) / count * 100
            }
            
            yield f"data: {json.dumps(data)}\n\n"
            
            if i < count - 1:  # 最后一个数字不需要延时
                await asyncio.sleep(delay)
        
        # 发送结束信号
        yield f"data: {{\"finished\": true}}\n\n"
    
    return StreamingResponse(
        generate_numbers(),
        media_type="text/plain",
        headers={
            "Cache-Control": "no-cache",
            "Connection": "keep-alive",
        }
    )


@app.get("/stream/logs", tags=["流式响应"])
async def stream_logs(duration: int = 10):
    """
    模拟日志流
    演示长连接和实时数据推送
    """
    if duration > 60:
        raise HTTPException(status_code=400, detail="最长支持60秒")
    
    async def generate_logs():
        """
        生成模拟日志
        """
        start_time = time.time()
        log_count = 0
        
        while time.time() - start_time < duration:
            log_count += 1
            
            # 模拟不同类型的日志
            log_types = ["INFO", "DEBUG", "WARNING", "ERROR"]
            log_type = random.choice(log_types)
            
            log_entry = {
                "id": log_count,
                "timestamp": datetime.now().isoformat(),
                "level": log_type,
                "message": f"这是第 {log_count} 条 {log_type} 日志",
                "module": f"module_{random.randint(1, 5)}",
                "elapsed": time.time() - start_time
            }
            
            yield f"data: {json.dumps(log_entry, ensure_ascii=False)}\n\n"
            
            # 随机延时，模拟真实日志产生
            await asyncio.sleep(random.uniform(0.1, 1.0))
        
        # 发送结束信号
        yield f"data: {{\"finished\": true, \"total_logs\": {log_count}}}\n\n"
    
    return StreamingResponse(
        generate_logs(),
        media_type="text/plain",
        headers={
            "Cache-Control": "no-cache",
            "Connection": "keep-alive",
        }
    )


@app.get("/concurrent/demo", response_model=ApiResponse, tags=["并发演示"])
async def concurrent_demo():
    """
    并发处理演示
    展示异步编程在并发处理方面的优势
    """
    start_time = time.time()
    
    # 定义多个异步任务
    async def task1():
        await asyncio.sleep(1)
        return "任务1完成"
    
    async def task2():
        await asyncio.sleep(1.5)
        return "任务2完成"
    
    async def task3():
        await asyncio.sleep(0.8)
        return "任务3完成"
    
    async def http_task():
        async with aiohttp.ClientSession() as session:
            async with session.get('https://httpbin.org/delay/1') as response:
                data = await response.json()
                return f"HTTP任务完成: {data.get('url', 'N/A')}"
    
    # 并发执行所有任务
    results = await asyncio.gather(
        task1(),
        task2(), 
        task3(),
        http_task(),
        return_exceptions=True
    )
    
    total_time = time.time() - start_time
    
    # 处理结果
    processed_results = []
    for i, result in enumerate(results):
        if isinstance(result, Exception):
            processed_results.append(f"任务{i+1}失败: {str(result)}")
        else:
            processed_results.append(result)
    
    logger.info(f"并发演示完成，耗时 {total_time:.2f}秒")
    
    return ApiResponse(
        success=True,
        message="并发演示完成",
        data={
            "results": processed_results,
            "total_time_seconds": total_time,
            "note": "如果串行执行，预计需要4.3秒以上"
        }
    )


@app.get("/performance/comparison", response_model=ApiResponse, tags=["性能对比"])
async def performance_comparison():
    """
    性能对比演示
    对比同步和异步处理的性能差异
    """
    # 异步版本
    async def async_version():
        start_time = time.time()
        
        tasks = []
        for i in range(10):
            tasks.append(asyncio.sleep(0.1))  # 模拟I/O操作
        
        await asyncio.gather(*tasks)
        
        return time.time() - start_time
    
    # 同步版本（在线程池中执行）
    def sync_version():
        start_time = time.time()
        
        for i in range(10):
            time.sleep(0.1)  # 模拟I/O操作
        
        return time.time() - start_time
    
    # 执行对比
    async_time = await async_version()
    sync_time = await run_in_threadpool(sync_version)
    
    improvement = (sync_time - async_time) / sync_time * 100
    
    return ApiResponse(
        success=True,
        message="性能对比完成",
        data={
            "async_time_seconds": async_time,
            "sync_time_seconds": sync_time,
            "improvement_percentage": improvement,
            "note": "异步版本在I/O密集型任务中表现更优"
        }
    )


# ============================================================================
# 7. 后台任务函数
# ============================================================================

async def log_task_creation(task_id: str, task_name: str):
    """
    记录任务创建的后台任务
    """
    await asyncio.sleep(1)  # 模拟异步操作
    logger.info(f"后台任务: 记录任务创建 - ID: {task_id}, 名称: {task_name}")


# ============================================================================
# 8. 异常处理
# ============================================================================

@app.exception_handler(asyncio.TimeoutError)
async def timeout_exception_handler(request: Request, exc: asyncio.TimeoutError):
    """
    处理超时异常
    """
    logger.warning(f"请求超时: {request.url}")
    return JSONResponse(
        status_code=408,
        content=ApiResponse(
            success=False,
            message="请求超时",
            data=None
        ).dict()
    )


@app.exception_handler(aiohttp.ClientError)
async def http_client_exception_handler(request: Request, exc: aiohttp.ClientError):
    """
    处理HTTP客户端异常
    """
    logger.warning(f"HTTP客户端异常: {str(exc)}")
    return JSONResponse(
        status_code=502,
        content=ApiResponse(
            success=False,
            message=f"外部服务异常: {str(exc)}",
            data=None
        ).dict()
    )


# ============================================================================
# 9. 启动配置
# ============================================================================

if __name__ == "__main__":
    # 异步应用启动配置
    uvicorn.run(
        "03_fastapi_async:app",
        host="0.0.0.0",
        port=8001,
        reload=True,
        log_level="info",
        access_log=True,
        loop="asyncio",  # 指定事件循环
        workers=1        # 异步应用通常使用单进程
    )


# ============================================================================
# 10. FastAPI异步编程总结
# ============================================================================

"""
FastAPI异步编程特性总结:

1. 原生异步支持:
   - async/await语法简洁直观
   - 自动处理异步上下文
   - 无需额外配置

2. 高并发性能:
   - 单线程处理大量并发请求
   - I/O密集型任务性能优异
   - 内存占用低

3. 异步生态丰富:
   - aiohttp: 异步HTTP客户端
   - aiofiles: 异步文件操作
   - asyncpg: 异步PostgreSQL驱动
   - motor: 异步MongoDB驱动

4. 流式响应:
   - Server-Sent Events (SSE)
   - 实时数据推送
   - 长连接支持

5. 后台任务:
   - BackgroundTasks支持
   - 异步任务队列
   - 任务状态管理

6. 并发控制:
   - asyncio.gather()并发执行
   - asyncio.Semaphore()限制并发
   - asyncio.Queue()异步队列

7. 异常处理:
   - 异步异常捕获
   - 超时处理
   - 资源清理

8. 与SpringBoot对比:
   - FastAPI: 原生异步，语法简洁
   - SpringBoot: 需要WebFlux，学习曲线陡峭
   - FastAPI: 更适合I/O密集型应用
   - SpringBoot: 更适合企业级复杂应用

9. 最佳实践:
   - 合理使用async/await
   - 避免阻塞操作
   - 使用连接池
   - 监控异步任务
   - 处理异常和超时

10. 适用场景:
    - API网关
    - 微服务
    - 实时应用
    - 爬虫服务
    - 数据处理管道
"""