from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks, status
from fastapi.responses import PlainTextResponse
from typing import List, Optional
from sqlalchemy.orm import Session

from app.schemas import job, common
from app.db.session import get_db
# 修改导入方式
from app.crud.crud_spider import CRUDSpider
from app.crud.crud_job import crud_job
from app.services.job_service import job_service

# 创建一个新的实例
crud_spider = CRUDSpider()

router = APIRouter()

@router.post("/run/{spider_id}", response_model=job.JobRead, status_code=status.HTTP_202_ACCEPTED)
def run_spider(
    spider_id: str,
    background_tasks: BackgroundTasks,
    db: Session = Depends(get_db)
):
    """手动触发一次爬虫运行(后台任务)"""
    # 检查爬虫是否存在
    db_spider = crud_spider.get(db, id=spider_id)
    if not db_spider:
        raise HTTPException(status_code=404, detail="爬虫不存在")
    
    # 检查是否有正在运行的任务
    active_jobs = crud_job.get_active_jobs(db, spider_id=spider_id)
    if active_jobs:
        raise HTTPException(status_code=409, detail="爬虫已在运行中")
    
    # 创建新任务
    job_data = job.JobCreate(
        spider_id=spider_id,
        job_type=job.JobType.SPIDER_RUN
    )
    
    new_job = crud_job.create(db, obj_in=job_data)
    
    # 在后台执行爬虫
    background_tasks.add_task(
        job_service.run_spider_job,
        spider_id=spider_id,
        job_id=new_job.id
    )
    
    return new_job

@router.get("/", response_model=List[job.JobRead])
def get_jobs(
    spider_id: Optional[str] = None,
    status: Optional[job.JobStatus] = None,
    skip: int = 0,
    limit: int = 20,
    db: Session = Depends(get_db)
):
    """获取运行任务列表(Job历史记录)"""
    filters = {}
    if spider_id:
        filters["spider_id"] = spider_id
    if status:
        filters["status"] = status
    
    return crud_job.get_multi(db, skip=skip, limit=limit, filters=filters)

@router.get("/{job_id}", response_model=job.JobRead)
def get_job(
    job_id: str,
    db: Session = Depends(get_db)
):
    """获取特定运行任务(Job)的状态和详细信息"""
    db_job = crud_job.get(db, id=job_id)
    if not db_job:
        raise HTTPException(status_code=404, detail="任务不存在")
    return db_job

@router.get("/{job_id}/logs", response_class=PlainTextResponse)
def get_job_logs(
    job_id: str,
    since: Optional[int] = None,
    tail: Optional[int] = 100,
    db: Session = Depends(get_db)
):
    """获取特定运行任务(Job)的日志"""
    db_job = crud_job.get(db, id=job_id)
    if not db_job:
        raise HTTPException(status_code=404, detail="任务不存在")
    
    try:
        logs = job_service.get_job_logs(job_id, since=since, tail=tail)
        return logs
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"获取日志失败: {str(e)}")

@router.post("/{job_id}/stop", response_model=common.Msg)
def stop_job(
    job_id: str,
    db: Session = Depends(get_db)
):
    """尝试停止一个正在运行的任务(Job)"""
    db_job = crud_job.get(db, id=job_id)
    if not db_job:
        raise HTTPException(status_code=404, detail="任务不存在")
    
    # 检查任务是否可以停止
    if db_job.status not in [job.JobStatus.QUEUED, job.JobStatus.STARTING, job.JobStatus.RUNNING]:
        raise HTTPException(status_code=409, detail="任务不在运行中或无法停止")
    
    try:
        success = job_service.stop_job(job_id)
        if success:
            return {"message": "停止请求已发送"}
        else:
            raise HTTPException(status_code=500, detail="停止任务失败")
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"停止任务时发生错误: {str(e)}")