from __future__ import annotations

import json
from dataclasses import dataclass, asdict
from datetime import datetime, timezone, timedelta
from pathlib import Path
from typing import Dict, List

from .events import read_owner_events, index_by_id
from .paths import get_home


def _parse_iso(s: str) -> datetime:
    try:
        if s.endswith("Z"):
            s = s[:-1] + "+00:00"
        return datetime.fromisoformat(s)
    except Exception:
        return datetime.now(timezone.utc)


def _cache_path(owner: str, target: str) -> Path:
    base = get_home() / "index" / "notifications" / owner
    base.mkdir(parents=True, exist_ok=True)
    return base / f"{target}.json"


def _load(owner: str, target: str) -> Dict:
    p = _cache_path(owner, target)
    if not p.exists():
        return {"last_created_at": "1970-01-01T00:00:00Z", "items": []}
    try:
        return json.loads(p.read_text(encoding="utf-8"))
    except Exception:
        return {"last_created_at": "1970-01-01T00:00:00Z", "items": []}


def _save(owner: str, target: str, data: Dict) -> None:
    p = _cache_path(owner, target)
    p.write_text(json.dumps(data, ensure_ascii=False, indent=2), encoding="utf-8")


def _build_notifications_for_events(events, owner: str, target: str) -> List[Dict[str, str]]:
    by_id = index_by_id(events)
    items: List[Dict[str, str]] = []

    def add(item_type: str, actor: str, created_at: str, summary: str, source_id: str):
        items.append({
            "type": item_type,
            "actor": actor,
            "created_at": created_at,
            "summary": summary,
            "source_id": source_id,
        })

    for e in events:
        t = e.type
        if t in ("comment", "reaction", "task_comment"):
            parent_id = e.headers.get("in_reply_to", "")
            parent = by_id.get(parent_id)
            if parent and parent.actor == target and e.actor != target:
                if t == "comment":
                    add("comment", e.actor, e.created_at, e.content[:140].replace("\n", " "), e.id)
                elif t == "reaction":
                    add("reaction", e.actor, e.created_at, f"{e.headers.get('reaction','like')} on {parent_id}", e.id)
                elif t == "task_comment":
                    add("task_comment", e.actor, e.created_at, e.content[:140].replace("\n", " "), e.id)

        mentions = e.headers.get("mentions", "")
        body = e.content
        needle = f"@{target}"
        if needle in mentions or needle in body:
            if e.actor != target:
                add("mention", e.actor, e.created_at, body[:140].replace("\n", " "), e.id)

    return items


def update_notifications_cache(memory_dir: Path, owner: str, target: str) -> Dict:
    cache = _load(owner, target)
    last_ts = _parse_iso(cache.get("last_created_at", "1970-01-01T00:00:00Z"))
    # Gather events newer than last_ts
    events = read_owner_events(memory_dir, owner, None)
    new_events = [e for e in events if _parse_iso(e.created_at) > last_ts]
    if not new_events:
        return cache
    # Build new items and merge de-duplicated by source_id
    new_items = _build_notifications_for_events(new_events, owner, target)
    existing = {it.get("source_id"): it for it in cache.get("items", []) if it.get("source_id")}
    for it in new_items:
        existing[it["source_id"]] = it
    # keep at most 2000 by created_at desc
    merged = list(existing.values())
    merged.sort(key=lambda x: x.get("created_at", ""), reverse=True)
    merged = merged[:2000]
    cache["items"] = merged
    # update last_created_at to max of seen events
    max_ts = max((_parse_iso(e.created_at) for e in events), default=last_ts)
    cache["last_created_at"] = max_ts.isoformat().replace("+00:00", "Z")
    _save(owner, target, cache)
    return cache


def get_notifications_within(memory_dir: Path, owner: str, target: str, last_hours: int) -> List[Dict[str, str]]:
    cache = update_notifications_cache(memory_dir, owner, target)
    since = datetime.now(timezone.utc) - timedelta(hours=last_hours)
    out = [it for it in cache.get("items", []) if _parse_iso(it.get("created_at", "1970-01-01T00:00:00Z")) >= since]
    # Return newest first
    out.sort(key=lambda x: x.get("created_at", ""), reverse=True)
    # Strip source_id in presentation
    for it in out:
        it.pop("source_id", None)
    return out

