from fastapi import APIRouter, Depends, HTTPException, Query, status
from sqlalchemy.ext.asyncio import AsyncSession
from typing import List, Optional
from datetime import datetime

from app.api.deps import get_current_user, get_db
from app.models.user import User
from app.services.crawler_service import CrawlerService
from app.schemas.crawler import (
    CrawlerJobCreate, CrawlerJobUpdate, CrawlerJobResponse, CrawlerJobListResponse,
    CrawlerJobDetailResponse, ScheduledCrawlerTaskCreate, ScheduledCrawlerTaskUpdate,
    ScheduledCrawlerTaskResponse, CrawlerExecuteRequest, CrawlerExecuteResponse,
    CrawlerJobStatus
)
from app.core.logging import get_logger

logger = get_logger(__name__)
router = APIRouter()


@router.post("/jobs", response_model=CrawlerJobResponse, status_code=status.HTTP_201_CREATED)
async def create_crawler_job(
    job_data: CrawlerJobCreate,
    current_user: User = Depends(get_current_user),
    db: AsyncSession = Depends(get_db)
):
    """创建爬虫任务"""
    service = CrawlerService(db)
    job = await service.create_job(job_data, int(current_user.id))  # type: ignore
    return job


@router.get("/jobs", response_model=CrawlerJobListResponse)
async def get_crawler_jobs(
    skip: int = Query(0, ge=0, description="跳过的记录数"),
    limit: int = Query(100, ge=1, le=1000, description="返回的记录数"),
    status: Optional[CrawlerJobStatus] = Query(None, description="任务状态过滤"),
    current_user: User = Depends(get_current_user),
    db: AsyncSession = Depends(get_db)
):
    """获取爬虫任务列表"""
    service = CrawlerService(db)
    jobs, total = await service.get_jobs(int(current_user.id), skip, limit, status)  # type: ignore
    
    return CrawlerJobListResponse(
        jobs=[CrawlerJobResponse.model_validate(job) for job in jobs],  # type: ignore
        total=total,
        page=skip // limit + 1,
        size=len(jobs)
    )


@router.get("/jobs/{job_id}", response_model=CrawlerJobDetailResponse)
async def get_crawler_job(
    job_id: int,
    current_user: User = Depends(get_current_user),
    db: AsyncSession = Depends(get_db)
):
    """获取爬虫任务详情"""
    service = CrawlerService(db)
    job = await service.get_job(job_id, int(current_user.id))  # type: ignore
    
    if not job:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="Crawler job not found"
        )
    
    return job


@router.put("/jobs/{job_id}", response_model=CrawlerJobResponse)
async def update_crawler_job(
    job_id: int,
    job_data: CrawlerJobUpdate,
    current_user: User = Depends(get_current_user),
    db: AsyncSession = Depends(get_db)
):
    """更新爬虫任务"""
    service = CrawlerService(db)
    job = await service.update_job(job_id, int(current_user.id), job_data)  # type: ignore
    
    if not job:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="Crawler job not found"
        )
    
    return job


@router.delete("/jobs/{job_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_crawler_job(
    job_id: int,
    current_user: User = Depends(get_current_user),
    db: AsyncSession = Depends(get_db)
):
    """删除爬虫任务"""
    service = CrawlerService(db)
    success = await service.delete_job(job_id, int(current_user.id))  # type: ignore
    
    if not success:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="Crawler job not found"
        )


@router.post("/jobs/{job_id}/execute")
async def execute_crawler_job(
    job_id: int,
    current_user: User = Depends(get_current_user),
    db: AsyncSession = Depends(get_db)
):
    """执行爬虫任务"""
    service = CrawlerService(db)
    result = await service.execute_job(job_id, int(current_user.id))  # type: ignore
    
    if not result.get("success", False):
        raise HTTPException(
            status_code=status.HTTP_400_BAD_REQUEST,
            detail=result.get("error", "Failed to execute crawler job")
        )
    
    return result


@router.post("/execute", response_model=CrawlerExecuteResponse)
async def execute_immediate_crawl(
    request: CrawlerExecuteRequest,
    current_user: User = Depends(get_current_user),
    db: AsyncSession = Depends(get_db)
):
    """立即执行爬虫（不保存任务）"""
    service = CrawlerService(db)
    result = await service.execute_immediate(request)
    return CrawlerExecuteResponse(**result)


# 定时任务相关端点
@router.post("/scheduled-tasks", response_model=ScheduledCrawlerTaskResponse, status_code=status.HTTP_201_CREATED)
async def create_scheduled_crawler_task(
    task_data: ScheduledCrawlerTaskCreate,
    current_user: User = Depends(get_current_user),
    db: AsyncSession = Depends(get_db)
):
    """创建定时爬虫任务"""
    service = CrawlerService(db)
    task = await service.create_scheduled_task(task_data, int(current_user.id))  # type: ignore
    return task


@router.get("/scheduled-tasks", response_model=List[ScheduledCrawlerTaskResponse])
async def get_scheduled_crawler_tasks(
    skip: int = Query(0, ge=0, description="跳过的记录数"),
    limit: int = Query(100, ge=1, le=1000, description="返回的记录数"),
    is_active: Optional[bool] = Query(None, description="是否激活过滤"),
    current_user: User = Depends(get_current_user),
    db: AsyncSession = Depends(get_db)
):
    """获取定时爬虫任务列表"""
    service = CrawlerService(db)
    tasks, _ = await service.get_scheduled_tasks(int(current_user.id), skip, limit, is_active)  # type: ignore
    return tasks


@router.get("/scheduled-tasks/{task_id}", response_model=ScheduledCrawlerTaskResponse)
async def get_scheduled_crawler_task(
    task_id: int,
    current_user: User = Depends(get_current_user),
    db: AsyncSession = Depends(get_db)
):
    """获取定时爬虫任务详情"""
    service = CrawlerService(db)
    task = await service.get_scheduled_task(task_id, int(current_user.id))  # type: ignore
    
    if not task:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="Scheduled crawler task not found"
        )
    
    return task


@router.put("/scheduled-tasks/{task_id}", response_model=ScheduledCrawlerTaskResponse)
async def update_scheduled_crawler_task(
    task_id: int,
    task_data: ScheduledCrawlerTaskUpdate,
    current_user: User = Depends(get_current_user),
    db: AsyncSession = Depends(get_db)
):
    """更新定时爬虫任务"""
    service = CrawlerService(db)
    task = await service.update_scheduled_task(task_id, int(current_user.id), task_data)  # type: ignore
    
    if not task:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="Scheduled crawler task not found"
        )
    
    return task


@router.delete("/scheduled-tasks/{task_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_scheduled_crawler_task(
    task_id: int,
    current_user: User = Depends(get_current_user),
    db: AsyncSession = Depends(get_db)
):
    """删除定时爬虫任务"""
    service = CrawlerService(db)
    success = await service.delete_scheduled_task(task_id, int(current_user.id))  # type: ignore
    
    if not success:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="Scheduled crawler task not found"
        )


@router.get("/health")
async def crawler_health_check():
    """爬虫服务健康检查"""
    from app.tools.crawler import crawler_tool
    from app.tools.optimized_crawler import optimized_crawler_tool
    
    return {
        "status": "healthy",
        "crawl4ai_available": crawler_tool.is_available(),
        "performance_metrics": optimized_crawler_tool.get_performance_metrics(),
        "timestamp": datetime.utcnow().isoformat()
    }