from __future__ import annotations

import asyncio
import logging
from copy import deepcopy
from dataclasses import dataclass
from datetime import datetime, timezone
from email.utils import parsedate_to_datetime
from typing import Any, Awaitable, Callable, Dict, List, Optional
from urllib.parse import parse_qsl, urlencode, urljoin, urlparse

import httpx
from sqlalchemy import select

from ..config import get_settings
from ..db import SessionLocal
from ..models import RSSFeedItem

logger = logging.getLogger(__name__)
settings = get_settings()

NewItemsCallback = Callable[[List[RSSFeedItem]], Awaitable[None]]


@dataclass
class RSSHubFeed:
    name: str
    path: str


class RSSHubIngestor:
    """后台任务：轮询 RSSHub 源并将新条目写入数据库。"""

    def __init__(
        self,
        feeds: List[RSSHubFeed],
        *,
        base_url: str,
        poll_interval: int = 300,
        request_timeout: int = 10,
        items_per_fetch: int = 50,
        on_new_items: Optional[NewItemsCallback] = None,
    ) -> None:
        self._feeds = feeds
        self._feeds_by_name = {feed.name: feed for feed in feeds}
        self._base_url = base_url.rstrip("/")
        self._poll_interval = max(poll_interval, 30)
        self._request_timeout = max(request_timeout, 5)
        self._items_per_fetch = max(items_per_fetch, 1)
        self._on_new_items = on_new_items

        self._task: Optional[asyncio.Task] = None
        self._stop_event = asyncio.Event()
        self._lock = asyncio.Lock()
        self._last_status: Dict[str, Dict[str, Any]] = {}
        self._last_polled_at: Optional[datetime] = None

    @property
    def enabled(self) -> bool:
        return bool(self._feeds)

    @property
    def running(self) -> bool:
        return self._task is not None and not self._task.done()

    def list_feeds(self) -> List[Dict[str, str]]:
        return [{"name": feed.name, "path": feed.path} for feed in self._feeds]

    def get_status(self) -> Dict[str, Any]:
        return {
            "last_polled_at": self._last_polled_at,
            "feeds": deepcopy(self._last_status),
        }

    def set_on_new_items(self, callback: Optional[NewItemsCallback]) -> None:
        self._on_new_items = callback

    async def start(self) -> None:
        if not self.enabled:
            logger.info("RSSHubIngestor 未启用（未配置 feeds）")
            return
        if self._task and not self._task.done():
            logger.info("RSSHubIngestor 已在运行")
            return
        self._stop_event.clear()
        self._task = asyncio.create_task(self._run())
        logger.info(
            "RSSHubIngestor 已启动，feed=%s，poll_interval=%ss",
            [f.name for f in self._feeds],
            self._poll_interval,
        )

    async def stop(self) -> None:
        self._stop_event.set()
        if self._task:
            self._task.cancel()
            try:
                await self._task
            except asyncio.CancelledError:
                pass
            self._task = None
        logger.info("RSSHubIngestor 已停止")

    async def _run(self) -> None:
        client_timeout = httpx.Timeout(self._request_timeout)
        async with httpx.AsyncClient(timeout=client_timeout) as client:
            while not self._stop_event.is_set():
                try:
                    async with self._lock:
                        await self._poll(client, self._feeds)
                except Exception as exc:
                    logger.error("RSSHubIngestor 轮询失败: %s", exc, exc_info=True)
                try:
                    await asyncio.wait_for(
                        self._stop_event.wait(), timeout=self._poll_interval
                    )
                except asyncio.TimeoutError:
                    continue

    async def fetch_now(self, feed_name: Optional[str] = None) -> Dict[str, Dict[str, Any]]:
        if not self.enabled:
            return {}

        feeds: List[RSSHubFeed]
        if feed_name:
            feed = self._feeds_by_name.get(feed_name)
            if not feed:
                raise ValueError(f"Feed not found: {feed_name}")
            feeds = [feed]
        else:
            feeds = self._feeds

        if not feeds:
            return {}

        client_timeout = httpx.Timeout(self._request_timeout)
        async with httpx.AsyncClient(timeout=client_timeout) as client:
            async with self._lock:
                return await self._poll(client, feeds)

    async def _poll(self, client: httpx.AsyncClient, feeds: List[RSSHubFeed]) -> Dict[str, Dict[str, Any]]:
        summary: Dict[str, Dict[str, Any]] = {}
        for feed in feeds:
            url = self._build_feed_url(feed.path)
            new_count = 0
            total = 0
            error: Optional[str] = None
            try:
                resp = await client.get(url)
                resp.raise_for_status()
                payload = resp.json()
            except Exception as exc:
                logger.warning(
                    "抓取 RSSHub 失败 feed=%s url=%s err=%s", feed.name, url, exc
                )
                error = str(exc)
                summary[feed.name] = {
                    "new_items": 0,
                    "fetched_items": 0,
                    "error": error,
                }
                continue

            items = self._extract_items(payload)
            if not items:
                summary[feed.name] = {
                    "new_items": 0,
                    "fetched_items": 0,
                    "error": None,
                }
                continue
            total = len(items)

            new_records = await self._persist_items(feed.name, items)
            if new_records and self._on_new_items:
                try:
                    await self._on_new_items(new_records)
                except Exception as exc:
                    logger.error("分发 RSS 新内容失败 feed=%s err=%s", feed.name, exc, exc_info=True)
            new_count = len(new_records)
            summary[feed.name] = {
                "new_items": new_count,
                "fetched_items": total,
                "error": error,
            }

        if summary:
            self._last_status.update(summary)
            self._last_polled_at = datetime.now(timezone.utc)
        return summary

    def _build_feed_url(self, path: str) -> str:
        if path.startswith("http://") or path.startswith("https://"):
            base = path
        else:
            normalized = path.lstrip("/")
            base = urljoin(f"{self._base_url}/", normalized)

        parsed = urlparse(base)
        query = dict(parse_qsl(parsed.query))
        query.setdefault("format", "json")
        query.setdefault("limit", str(self._items_per_fetch))
        new_query = urlencode(query)
        return parsed._replace(query=new_query).geturl()

    def _extract_items(self, payload: Dict[str, Any]) -> List[Dict[str, Any]]:
        if not payload:
            return []
        candidates: List[Dict[str, Any]] = []
        raw_items = payload.get("items") or payload.get("item") or []
        for item in raw_items:
            if isinstance(item, dict):
                candidates.append(item)
        return candidates

    async def _persist_items(
        self, source: str, items: List[Dict[str, Any]]
    ) -> List[RSSFeedItem]:
        normalized = [self._normalize_item(item) for item in items]
        normalized = [item for item in normalized if item.get("guid")]
        if not normalized:
            return []

        guids = [item["guid"] for item in normalized]
        async with SessionLocal() as session:
            stmt = (
                select(RSSFeedItem.guid)
                .where(RSSFeedItem.source == source)
                .where(RSSFeedItem.guid.in_(guids))
            )
            result = await session.execute(stmt)
            existing = set(result.scalars().all())

            to_create: List[RSSFeedItem] = []
            for item in normalized:
                if item["guid"] in existing:
                    continue
                record = RSSFeedItem(
                    source=source,
                    guid=item["guid"],
                    title=item.get("title"),
                    link=item.get("link"),
                    author=item.get("author"),
                    summary=item.get("summary"),
                    published_at=item.get("published_at"),
                    raw=item.get("raw"),
                )
                session.add(record)
                to_create.append(record)

            if not to_create:
                await session.rollback()
                return []

            await session.commit()
            logger.info("RSSHub 新增 %s 条 feed=%s", len(to_create), source)
            return to_create

    def _normalize_item(self, item: Dict[str, Any]) -> Dict[str, Any]:
        guid = (
            item.get("guid")
            or item.get("id")
            or item.get("link")
            or item.get("title")
        )
        published = self._parse_datetime(
            item.get("pubDate")
            or item.get("published")
            or item.get("date")
            or item.get("updated")
        )
        summary = item.get("description") or item.get("content")
        if isinstance(summary, list):
            summary = " ".join(
                entry for entry in summary if isinstance(entry, str)
            )
        elif isinstance(summary, dict):
            summary = summary.get("value") or summary.get("content")

        return {
            "guid": str(guid).strip() if guid else None,
            "title": item.get("title"),
            "link": item.get("link"),
            "author": item.get("author") or item.get("creator"),
            "summary": summary,
            "published_at": published,
            "raw": item,
        }

    def _parse_datetime(self, value: Any) -> Optional[datetime]:
        if value is None:
            return None
        if isinstance(value, datetime):
            return value if value.tzinfo else value.replace(tzinfo=timezone.utc)
        if isinstance(value, (int, float)):
            return datetime.fromtimestamp(float(value), tz=timezone.utc)
        if isinstance(value, str):
            value = value.strip()
            if not value:
                return None
            # ISO8601 处理
            try:
                dt = datetime.fromisoformat(value.replace("Z", "+00:00"))
                return dt if dt.tzinfo else dt.replace(tzinfo=timezone.utc)
            except ValueError:
                pass
            try:
                dt = parsedate_to_datetime(value)
                if dt:
                    return dt if dt.tzinfo else dt.replace(tzinfo=timezone.utc)
            except Exception:  # noqa: BLE001
                return None
        return None


def _build_ingestor(on_new_items: Optional[NewItemsCallback] = None) -> RSSHubIngestor:
    feeds: List[RSSHubFeed] = []
    if settings.rsshub_enabled:
        feeds = [RSSHubFeed(name=name, path=path) for name, path in settings.rsshub_feed_list]
    return RSSHubIngestor(
        feeds,
        base_url=settings.rsshub_base_url,
        poll_interval=settings.rsshub_poll_interval,
        request_timeout=settings.rsshub_request_timeout,
        items_per_fetch=settings.rsshub_items_per_fetch,
        on_new_items=on_new_items,
    )


rss_ingestor = _build_ingestor()
