from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from app.db.postgres import get_session
from app.models import DailyTopic, CrawlingTask
from app.schemas.social_task import TaskCreate, TaskRead, TaskRunResult
from app.services.fames_spider.orchestrator import run_crawling_task
from app.services.fames_spider.adapters.weibo import WeiboAdapter
from app.services.fames_spider.adapters.xhs import XhsAdapter
from app.services.fames_spider.adapters.douyin import DouyinAdapter
from app.services.fames_spider.adapters.kuaishou import KuaishouAdapter
from app.services.fames_spider.adapters.bilibili import BilibiliAdapter
from app.services.fames_spider.adapters.zhihu import ZhihuAdapter
from app.services.fames_spider.adapters.tieba import TiebaAdapter
from app.services.fames_spider.session import build_platform_auth
import uuid
import json


router = APIRouter(prefix="/social/tasks", tags=["social-tasks"])


@router.post("/topics/{topic_id}")
async def create_tasks(topic_id: str, payload: TaskCreate, session: AsyncSession = Depends(get_session)):
    topic = await session.get(DailyTopic, topic_id)
    if not topic:
        raise HTTPException(status_code=404, detail="话题不存在")
    created = []
    for plat in payload.platforms:
        task_code = uuid.uuid4().hex
        obj = CrawlingTask(
            task_id=task_code,
            topic_code=topic.topic_id,
            topic_ref_id=topic.id,
            platform=str(plat),
            search_keywords_json=json.dumps(payload.keywords or []),
            task_status='pending',
            start_time=None,
            end_time=None,
            total_crawled=0,
            success_count=0,
            error_count=0,
            error_message=None,
            config_params_json=json.dumps({"accounts": payload.accounts or []}),
            scheduled_date=(payload.scheduled_date or '1970-01-01'),
        )
        session.add(obj)
        created.append(task_code)
    await session.flush()
    await session.commit()
    return {"task_ids": created}


@router.get("/", response_model=list[TaskRead])
async def list_tasks(session: AsyncSession = Depends(get_session)):
    result = await session.execute(select(CrawlingTask).order_by(CrawlingTask.created_at.desc()))
    items = result.scalars().all()
    return [TaskRead.model_validate(x) for x in items]


@router.post("/{task_id}/run", response_model=TaskRunResult)
async def run_task(task_id: str, session: AsyncSession = Depends(get_session)):
    obj_q = await session.execute(select(CrawlingTask).where(CrawlingTask.task_id == task_id))
    obj = obj_q.scalars().first()
    if not obj:
        raise HTTPException(status_code=404, detail="任务不存在")
    headers, cookie_dict = await build_platform_auth(obj.platform)
    adapters = {
        'weibo': WeiboAdapter(headers=headers),
        'xhs': XhsAdapter(headers=headers, cookies=cookie_dict),
        'douyin': DouyinAdapter(headers=headers),
        'kuaishou': KuaishouAdapter(headers=headers),
        'bilibili': BilibiliAdapter(headers=headers),
        'zhihu': ZhihuAdapter(headers=headers),
        'tieba': TiebaAdapter(headers=headers),
    }
    kw = []
    accs = []
    try:
        kw = json.loads(obj.search_keywords_json or '[]')
    except Exception:
        kw = []
    try:
        cfg = json.loads(obj.config_params_json or '{}')
        accs = cfg.get('accounts') or []
    except Exception:
        accs = []
    count = await run_crawling_task(session, task_id, adapters, None, None, {obj.platform: accs}, kw)
    obj.total_crawled = (obj.total_crawled or 0) + count
    obj.task_status = 'completed'
    await session.flush()
    await session.refresh(obj)
    await session.commit()
    return TaskRunResult(task_id=task_id, status=obj.task_status, total_crawled=obj.total_crawled or 0, success_count=obj.success_count or 0, error_count=obj.error_count or 0)
