"""
后台任务系统主应用

完整的 Celery + FastAPI 集成示例
"""

from fastapi import FastAPI, HTTPException, status, BackgroundTasks as FastAPIBackgroundTasks
from fastapi.responses import JSONResponse
from celery.result import AsyncResult
from celery import chain, group, chord
from typing import Dict, Any, List
import time
import sys
from pathlib import Path

# 处理导入路径
if __name__ == "__main__" or "." not in __name__:
    sys.path.insert(0, str(Path(__file__).parent.parent.parent))
    from stage2_advanced.chapter04_background_tasks.celery_app import celery_app
    from stage2_advanced.chapter04_background_tasks import tasks, schemas
    from stage2_advanced.chapter04_background_tasks.config import TaskQueues
else:
    from .celery_app import celery_app
    from . import tasks, schemas
    from .config import TaskQueues


# ========== 创建 FastAPI 应用 ==========

app = FastAPI(
    title="后台任务系统",
    description="FastAPI + Celery 后台任务处理系统",
    version="1.0.0",
)


# ========== 根路径 ==========

@app.get("/", tags=["根路径"])
def root():
    """API 根路径"""
    return {
        "message": "后台任务系统 API",
        "version": "1.0.0",
        "docs": "/docs",
        "celery_status": "active" if celery_app else "inactive",
        "endpoints": {
            "basic_tasks": "POST /tasks/basic/*",
            "email_tasks": "POST /tasks/email/*",
            "file_tasks": "POST /tasks/files/*",
            "advanced": "POST /tasks/advanced/*",
            "task_status": "GET /tasks/{task_id}",
            "stats": "GET /stats",
        }
    }


# ========== FastAPI BackgroundTasks 示例 ==========

def simple_background_task(name: str, email: str):
    """
    简单的后台任务（FastAPI 自带）
    
    适用于轻量级、快速的任务
    """
    time.sleep(2)
    print(f"Sending welcome email to {name} ({email})")


@app.post("/fastapi/background-task", tags=["FastAPI BackgroundTasks"])
async def create_fastapi_background_task(
    name: str,
    email: str,
    background_tasks: FastAPIBackgroundTasks
):
    """
    使用 FastAPI 内置的 BackgroundTasks
    
    - **name**: 用户名
    - **email**: 邮箱
    
    适用场景：轻量级任务（发送邮件、记录日志等）
    """
    background_tasks.add_task(simple_background_task, name, email)
    
    return {
        "message": "Background task added",
        "note": "Task will be executed after response is sent"
    }


# ========== 基础 Celery 任务 ==========

@app.post("/tasks/basic/add", response_model=schemas.TaskResponse, tags=["基础任务"])
async def create_add_task(x: int, y: int):
    """
    创建简单的加法任务
    
    - **x**: 第一个数
    - **y**: 第二个数
    """
    task = tasks.add.apply_async(args=[x, y])
    
    return {
        "task_id": task.id,
        "task_name": "add",
        "status": task.status,
        "queue": "default"
    }


@app.post("/tasks/basic/multiply", response_model=schemas.TaskResponse, tags=["基础任务"])
async def create_multiply_task(x: int, y: int):
    """
    创建乘法任务
    
    - **x**: 第一个数
    - **y**: 第二个数
    """
    task = tasks.multiply.apply_async(args=[x, y])
    
    return {
        "task_id": task.id,
        "task_name": "multiply",
        "status": task.status,
        "queue": "default"
    }


@app.post("/tasks/basic/long-running", response_model=schemas.TaskResponse, tags=["基础任务"])
async def create_long_running_task(duration: int = 10):
    """
    创建长时间运行的任务（支持进度查询）
    
    - **duration**: 运行时长（秒）
    """
    task = tasks.long_running_task.apply_async(args=[duration])
    
    return {
        "task_id": task.id,
        "task_name": "long_running_task",
        "status": task.status,
        "queue": "default"
    }


@app.post("/tasks/basic/retry", response_model=schemas.TaskResponse, tags=["基础任务"])
async def create_retry_task(success_rate: float = 0.3):
    """
    创建会重试的任务
    
    - **success_rate**: 成功率（0.0-1.0）
    """
    task = tasks.task_with_retry.apply_async(args=[success_rate])
    
    return {
        "task_id": task.id,
        "task_name": "task_with_retry",
        "status": task.status,
        "queue": "default"
    }


# ========== 邮件任务 ==========

@app.post("/tasks/email/send", response_model=schemas.TaskResponse, tags=["邮件任务"])
async def send_email(email_request: schemas.EmailRequest):
    """
    发送单封邮件
    
    - **to_email**: 收件人邮箱
    - **subject**: 邮件主题
    - **body**: 邮件正文
    - **html**: 是否为 HTML 格式
    """
    task = tasks.send_email.apply_async(
        args=[
            email_request.to_email,
            email_request.subject,
            email_request.body,
            email_request.html
        ],
        queue=TaskQueues.EMAIL
    )
    
    return {
        "task_id": task.id,
        "task_name": "send_email",
        "status": task.status,
        "queue": TaskQueues.EMAIL
    }


@app.post("/tasks/email/bulk", response_model=schemas.TaskResponse, tags=["邮件任务"])
async def send_bulk_emails(bulk_request: schemas.BulkEmailRequest):
    """
    批量发送邮件
    
    - **emails**: 邮件列表
    """
    email_list = [email.model_dump() for email in bulk_request.emails]
    
    task = tasks.send_bulk_emails.apply_async(
        args=[email_list],
        queue=TaskQueues.EMAIL
    )
    
    return {
        "task_id": task.id,
        "task_name": "send_bulk_emails",
        "status": task.status,
        "queue": TaskQueues.EMAIL
    }


# ========== 文件处理任务 ==========

@app.post("/tasks/files/process-image", response_model=schemas.TaskResponse, tags=["文件处理"])
async def process_image(request: schemas.ImageProcessRequest):
    """
    处理图片
    
    - **image_path**: 图片路径
    - **operations**: 操作列表（如 ['thumbnail', 'watermark']）
    """
    task = tasks.process_image.apply_async(
        args=[request.image_path, request.operations],
        queue=TaskQueues.FILE_PROCESSING
    )
    
    return {
        "task_id": task.id,
        "task_name": "process_image",
        "status": task.status,
        "queue": TaskQueues.FILE_PROCESSING
    }


@app.post("/tasks/files/thumbnail", response_model=schemas.TaskResponse, tags=["文件处理"])
async def generate_thumbnail(request: schemas.ThumbnailRequest):
    """
    生成缩略图
    
    - **image_path**: 图片路径
    - **sizes**: 尺寸列表
    """
    task = tasks.generate_thumbnail.apply_async(
        args=[request.image_path, request.sizes],
        queue=TaskQueues.FILE_PROCESSING
    )
    
    return {
        "task_id": task.id,
        "task_name": "generate_thumbnail",
        "status": task.status,
        "queue": TaskQueues.FILE_PROCESSING
    }


# ========== 报表任务 ==========

@app.post("/tasks/report/generate", response_model=schemas.TaskResponse, tags=["报表任务"])
async def generate_report(request: schemas.ReportRequest):
    """
    生成报表
    
    - **report_type**: 报表类型
    - **start_date**: 开始日期
    - **end_date**: 结束日期
    """
    task = tasks.generate_report.apply_async(
        args=[request.report_type, request.start_date, request.end_date],
        queue=TaskQueues.REPORT
    )
    
    return {
        "task_id": task.id,
        "task_name": "generate_report",
        "status": task.status,
        "queue": TaskQueues.REPORT
    }


# ========== 高级任务模式 ==========

@app.post("/tasks/advanced/chain", tags=["高级任务"])
async def create_chain_task(request: schemas.ChainTaskRequest):
    """
    创建任务链（按顺序执行）
    
    - **data**: 初始数据
    
    任务链：step1 → step2 → step3
    """
    task_chain = chain(
        tasks.step1.s(request.data),
        tasks.step2.s(),
        tasks.step3.s()
    )
    
    result = task_chain.apply_async()
    
    return {
        "task_id": result.id,
        "task_name": "chain_task",
        "status": "PENDING",
        "message": "Chain task created: step1 -> step2 -> step3"
    }


@app.post("/tasks/advanced/group", tags=["高级任务"])
async def create_group_task(request: schemas.GroupTaskRequest):
    """
    创建任务组（并行执行）
    
    - **item_ids**: 项目 ID 列表
    
    所有任务会并行执行
    """
    task_group = group(
        tasks.process_item.s(item_id) for item_id in request.item_ids
    )
    
    result = task_group.apply_async()
    
    return {
        "task_id": result.id,
        "task_name": "group_task",
        "status": "PENDING",
        "message": f"Group task created with {len(request.item_ids)} items"
    }


@app.post("/tasks/advanced/chord", tags=["高级任务"])
async def create_chord_task(request: schemas.GroupTaskRequest):
    """
    创建和弦任务（并行 + 汇总）
    
    - **item_ids**: 项目 ID 列表
    
    所有任务并行执行后，汇总结果
    """
    # Chord = group + callback
    task_chord = chord(
        (tasks.process_item.s(item_id) for item_id in request.item_ids),
        tasks.add.s(1, 2)  # 汇总任务（这里只是示例）
    )
    
    result = task_chord.apply_async()
    
    return {
        "task_id": result.id,
        "task_name": "chord_task",
        "status": "PENDING",
        "message": "Chord task created (group + callback)"
    }


# ========== 任务管理 ==========

@app.get("/tasks/{task_id}", response_model=schemas.TaskResult, tags=["任务管理"])
async def get_task_status(task_id: str):
    """
    获取任务状态和结果
    
    - **task_id**: 任务 ID
    
    可能的状态：
    - PENDING: 等待执行
    - STARTED: 已开始
    - PROGRESS: 执行中
    - SUCCESS: 成功
    - FAILURE: 失败
    - RETRY: 重试中
    - REVOKED: 已取消
    """
    task = AsyncResult(task_id, app=celery_app)
    
    response = {
        "task_id": task_id,
        "status": task.status,
        "result": None,
        "error": None,
        "traceback": None,
        "started_at": None,
        "completed_at": None,
        "runtime": None
    }
    
    if task.state == 'PENDING':
        response["result"] = "Task is waiting to be executed"
    elif task.state == 'PROGRESS':
        response["result"] = task.info  # 进度信息
    elif task.state == 'SUCCESS':
        response["result"] = task.result
    elif task.state == 'FAILURE':
        response["error"] = str(task.info)
        response["traceback"] = task.traceback
    else:
        response["result"] = task.info
    
    return response


@app.post("/tasks/{task_id}/cancel", tags=["任务管理"])
async def cancel_task(task_id: str):
    """
    取消任务
    
    - **task_id**: 任务 ID
    """
    task = AsyncResult(task_id, app=celery_app)
    task.revoke(terminate=True)  # terminate=True 强制终止
    
    return {
        "message": f"Task {task_id} has been cancelled",
        "task_id": task_id,
        "status": "REVOKED"
    }


@app.post("/tasks/{task_id}/retry", tags=["任务管理"])
async def retry_task(task_id: str):
    """
    重试失败的任务
    
    - **task_id**: 任务 ID
    """
    task = AsyncResult(task_id, app=celery_app)
    
    if task.state != 'FAILURE':
        raise HTTPException(
            status_code=status.HTTP_400_BAD_REQUEST,
            detail="Task is not in FAILURE state"
        )
    
    # 重新执行相同的任务
    new_task = celery_app.send_task(
        task.name,
        args=task.args,
        kwargs=task.kwargs
    )
    
    return {
        "message": "Task has been retried",
        "original_task_id": task_id,
        "new_task_id": new_task.id
    }


# ========== 统计和监控 ==========

@app.get("/stats", tags=["统计"])
async def get_stats():
    """
    获取任务统计信息
    """
    inspect = celery_app.control.inspect()
    
    # 获取各种统计信息
    stats = inspect.stats()
    active_tasks = inspect.active()
    scheduled_tasks = inspect.scheduled()
    reserved_tasks = inspect.reserved()
    
    return {
        "workers": stats or {},
        "active_tasks": active_tasks or {},
        "scheduled_tasks": scheduled_tasks or {},
        "reserved_tasks": reserved_tasks or {},
        "total_workers": len(stats) if stats else 0
    }


@app.get("/workers", tags=["Worker 管理"])
async def get_workers():
    """
    获取所有 Worker 信息
    """
    inspect = celery_app.control.inspect()
    stats = inspect.stats()
    active = inspect.active()
    
    if not stats:
        return {
            "message": "No workers available",
            "workers": []
        }
    
    workers = []
    for worker_name, worker_stats in stats.items():
        workers.append({
            "name": worker_name,
            "status": "active",
            "pool": worker_stats.get("pool", {}).get("implementation"),
            "active_tasks": len(active.get(worker_name, [])) if active else 0,
            "total_processed": worker_stats.get("total", {})
        })
    
    return {
        "total": len(workers),
        "workers": workers
    }


@app.post("/workers/shutdown", tags=["Worker 管理"])
async def shutdown_workers():
    """
    关闭所有 Worker（慎用！）
    """
    celery_app.control.shutdown()
    
    return {
        "message": "Shutdown signal sent to all workers"
    }


# ========== 队列管理 ==========

@app.get("/queues", tags=["队列管理"])
async def get_queues():
    """
    获取所有队列信息
    """
    inspect = celery_app.control.inspect()
    active_queues = inspect.active_queues()
    
    return {
        "queues": active_queues or {},
        "defined_queues": {
            "default": TaskQueues.DEFAULT,
            "high_priority": TaskQueues.HIGH_PRIORITY,
            "low_priority": TaskQueues.LOW_PRIORITY,
            "email": TaskQueues.EMAIL,
            "report": TaskQueues.REPORT,
            "file_processing": TaskQueues.FILE_PROCESSING
        }
    }


# ========== 主程序入口 ==========

if __name__ == "__main__":
    import uvicorn
    
    print("=" * 50)
    print("🚀 启动后台任务系统")
    print("=" * 50)
    print("📖 API 文档：http://127.0.0.1:8040/docs")
    print("⚠️  确保 Redis 和 Celery Worker 已启动！")
    print("=" * 50)
    print("\n启动 Celery Worker:")
    print("celery -A stage2_advanced.chapter04_background_tasks.celery_app worker --loglevel=info")
    print("\n启动 Celery Beat (定时任务):")
    print("celery -A stage2_advanced.chapter04_background_tasks.celery_app beat --loglevel=info")
    print("=" * 50)
    
    uvicorn.run(
        "main:app" if "." not in __name__ else "stage2_advanced.chapter04_background_tasks.main:app",
        host="0.0.0.0",
        port=8040,
        reload=True
    )

