from __future__ import annotations

import json
import os
import time
import logging
from datetime import datetime
from typing import Any, Dict

from fastapi import FastAPI, HTTPException, Request, Response
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse
from prometheus_fastapi_instrumentator import Instrumentator
from sqlalchemy import Column, DateTime, Integer, String, create_engine, text
from sqlalchemy.dialects.sqlite import JSON as SQLITE_JSON
from sqlalchemy.engine import Engine
from sqlalchemy.exc import OperationalError
from sqlalchemy.orm import declarative_base, sessionmaker, Session
import httpx
from urllib.parse import urlparse
import secrets

QWEN_API_BASE = os.getenv("QWEN_API_BASE", "https://dashscope.aliyuncs.com/compatible-mode/v1")
# Do not provide a hardcoded default key; empty means "not configured" and will use stub
QWEN_API_KEY = os.getenv("QWEN_API_KEY", "")
QWEN_MODEL = os.getenv("QWEN_MODEL", "qwen3-30b-a3b")


# Default SQLite path under repo/server
DEFAULT_DB_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'dev.db'))
DB_URL = os.getenv("DB_URL") or f"sqlite:///{DEFAULT_DB_PATH}"

# -------- logging setup --------
LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO").upper()
logging.basicConfig(
    level=getattr(logging, LOG_LEVEL, logging.INFO),
    format="%(asctime)s %(levelname)s [%(name)s] %(message)s",
)
logger = logging.getLogger("ai_trip_backend")

import os

# Reduce noisy per-request access logs in journald (configurable)
ACCESS_LOG_LEVEL = os.getenv("ACCESS_LOG_LEVEL", "WARNING").upper()
try:
    logging.getLogger("uvicorn.access").setLevel(getattr(logging, ACCESS_LOG_LEVEL, logging.WARNING))
except Exception:
    pass
def _ensure_sqlite_dir(db_url: str) -> None:
    try:
        if not db_url.lower().startswith("sqlite:"):
            return
        # Skip memory DB
        if ":memory:" in db_url:
            return
        parsed = urlparse(db_url)
        # Absolute path: sqlite:////abs/path.db -> startswith 'sqlite:////'
        if db_url.startswith("sqlite:////"):
            file_path = parsed.path  # '/abs/path.db'
        # Relative path: sqlite:///rel/path.db
        elif db_url.startswith("sqlite:///"):
            file_path = parsed.path.lstrip("/")  # 'rel/path.db'
            file_path = os.path.abspath(file_path)
        else:
            # Other forms not handled
            return
        dir_path = os.path.dirname(file_path)
        if dir_path:
            os.makedirs(dir_path, exist_ok=True)
    except Exception as e:
        logger.warning("Failed ensuring SQLite dir for %s: %s", db_url, e)

# Ensure DB directory exists for SQLite URLs (default or provided)
_ensure_sqlite_dir(DB_URL)

# SQLAlchemy setup
engine: Engine = create_engine(DB_URL, connect_args={"check_same_thread": False})
SessionLocal = sessionmaker(bind=engine, autoflush=False, autocommit=False)
Base = declarative_base()


class KV(Base):
    __tablename__ = "kv"
    id = Column(Integer, primary_key=True)
    k = Column(String(64), unique=True, index=True, nullable=False)
    v = Column(SQLITE_JSON, nullable=True)
    updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)


def init_db():
    Base.metadata.create_all(bind=engine)
    # PRAGMA settings for SQLite
    with engine.connect() as conn:
        try:
            conn.execute(text("PRAGMA foreign_keys=ON"))
            conn.execute(text("PRAGMA journal_mode=WAL"))
            conn.execute(text("PRAGMA busy_timeout=5000"))
        except OperationalError:
            pass
    logger.info("Database initialized (SQLite) at %s", DB_URL)

# -------- storage quota and eviction (lightweight) --------
# Global storage budget for backend DB file (SQLite only; other DB engines skip file-based checks)
STORAGE_BUDGET_BYTES = int(os.getenv("STORAGE_BUDGET_BYTES", str(15 * 1024 * 1024 * 1024)))  # default 15GB
STORAGE_BUDGET_MARGIN = float(os.getenv("STORAGE_BUDGET_MARGIN", "0.98"))  # start trimming when size > 98% budget

# Dataset-specific default limits (server-side)
MAX_MESSAGES_PER_THREAD = int(os.getenv("MAX_MESSAGES_PER_THREAD", "200"))
MAX_REMOTE_ITINS = int(os.getenv("MAX_REMOTE_ITINS", "200"))

# Aggressive trim levels if DB exceeds budget (apply in order, then stop)
AGGRESSIVE_LEVELS = [
    {"per_thread": 100, "remote_itins": 100},
    {"per_thread": 50, "remote_itins": 50},
]


def _sqlite_file_path_from_url(db_url: str) -> str | None:
    try:
        if not db_url.lower().startswith("sqlite:"):
            return None
        if ":memory:" in db_url:
            return None
        parsed = urlparse(db_url)
        if db_url.startswith("sqlite:////"):
            return parsed.path
        elif db_url.startswith("sqlite:///"):
            return os.path.abspath(parsed.path.lstrip("/"))
    except Exception:
        return None
    return None


def _current_db_size_bytes() -> int:
    try:
        p = _sqlite_file_path_from_url(DB_URL)
        if not p:
            return 0
        if not os.path.exists(p):
            return 0
        return os.path.getsize(p)
    except Exception:
        return 0


def _prune_chats_map(data: dict, per_thread_limit: int) -> dict:
    if not isinstance(data, dict):
        return data
    out: dict[str, list] = {}
    for k, arr in data.items():
        if isinstance(arr, list):
            out[k] = arr[-per_thread_limit:]
        else:
            out[k] = arr
    return out


def _prune_remote_itins_map(data: dict, max_count: int) -> dict:
    if not isinstance(data, dict):
        return data
    try:
        items = list(data.items())
        items.sort(key=lambda kv: (kv[1] or {}).get("updatedAt", 0), reverse=True)
        kept = items[:max_count]
        return {k: v for k, v in kept}
    except Exception:
        return data


def _prune_value_for_key(key: str, value: Any, per_thread_limit: int, max_remote_itins: int) -> Any:
    if key == CHATS_KEY:
        return _prune_chats_map(value if isinstance(value, dict) else {}, per_thread_limit)
    if key == GROUP_CHATS_KEY:
        return _prune_chats_map(value if isinstance(value, dict) else {}, per_thread_limit)
    if key == CHAT_REMOTE_ITINERARIES_KEY:
        return _prune_remote_itins_map(value if isinstance(value, dict) else {}, max_remote_itins)
    return value


def _enforce_budget(session: Session) -> None:
    try:
        size = _current_db_size_bytes()
        if size <= int(STORAGE_BUDGET_BYTES * STORAGE_BUDGET_MARGIN):
            return
        logger.warning("DB size %s exceeds margin of budget %s; applying aggressive trims", size, STORAGE_BUDGET_BYTES)
        # Apply levels progressively and stop when within margin
        for lvl in AGGRESSIVE_LEVELS:
            # Prune remote itineraries first (likely heavier JSON)
            val = get_kv(session, CHAT_REMOTE_ITINERARIES_KEY, {})
            val2 = _prune_remote_itins_map(val if isinstance(val, dict) else {}, lvl.get("remote_itins", 50))
            if val2 != val:
                set_kv(session, CHAT_REMOTE_ITINERARIES_KEY, val2)

            # Then prune group chats and direct chats
            g = get_kv(session, GROUP_CHATS_KEY, {})
            g2 = _prune_chats_map(g if isinstance(g, dict) else {}, lvl.get("per_thread", 50))
            if g2 != g:
                set_kv(session, GROUP_CHATS_KEY, g2)

            c = get_kv(session, CHATS_KEY, {})
            c2 = _prune_chats_map(c if isinstance(c, dict) else {}, lvl.get("per_thread", 50))
            if c2 != c:
                set_kv(session, CHATS_KEY, c2)

            size = _current_db_size_bytes()
            if size <= int(STORAGE_BUDGET_BYTES * STORAGE_BUDGET_MARGIN):
                logger.info("DB size now within margin after trims: %s bytes", size)
                return
        # After all levels, log current size
        logger.error("DB size still above margin after aggressive trims: %s bytes (budget=%s)", size, STORAGE_BUDGET_BYTES)
    except Exception as e:
        logger.warning("Budget enforcement failed: %s", e)


def get_kv(session, key: str, default: Any) -> Any:
    row: KV | None = session.query(KV).filter(KV.k == key).first()
    if row is None:
        return default
    return row.v if row.v is not None else default


def set_kv(session, key: str, value: Any) -> None:
    row: KV | None = session.query(KV).filter(KV.k == key).first()
    now = datetime.utcnow()
    if row is None:
        row = KV(k=key, v=value, updated_at=now)
        session.add(row)
    else:
        row.v = value
        row.updated_at = now
    session.commit()


app = FastAPI(title="AI Trip Planner Backend (FastAPI + SQLite)")
# CORS origins: read from env FRONTEND_ORIGINS (comma separated), fallback to localhost and optional public IP
origins_env = os.getenv("FRONTEND_ORIGINS", "").strip()
frontend_origins = []
if origins_env:
    frontend_origins = [o.strip() for o in origins_env.split(",") if o.strip()]
else:
    # defaults
    frontend_origins = [
        "http://127.0.0.1:8000",
        "http://localhost:8000",
        "http://47.99.192.240",  # public server IP
    ]
app.add_middleware(
    CORSMiddleware,
    allow_origins=frontend_origins,
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

# Register metrics middleware before app starts
Instrumentator().instrument(app).expose(app)


@app.on_event("startup")
def on_startup():
    init_db()
    logger.info("App startup complete. CORS: %s", ", ".join(frontend_origins))
    if QWEN_API_KEY:
        logger.info("Qwen configured: base=%s model=%s", QWEN_API_BASE, QWEN_MODEL)
    else:
        logger.warning("QWEN_API_KEY not set; /ai/itinerary/generate will return a stub response")


@app.get("/health")
def health():
    return {"status": "ok"}


def json_body(request: Request) -> Dict[str, Any]:
    # Helper to load JSON with empty-body tolerance
    try:
        return {} if request.headers.get("content-length") in (None, "0") else json.loads(request._body.decode() if isinstance(request._body, (bytes, bytearray)) else request._body)  # type: ignore[attr-defined]
    except Exception:
        return {}


# Simple helpers to align with frontend keys/localStorage schema
ITIN_KEY = "itinerariesV2"
CHATS_KEY = "chatsV1"
GROUP_CHATS_KEY = "groupChatsV1"
COMPANIONS_KEY = "companionsV1"
GROUPS_KEY = "groupsV1"
USERS_KEY = "usersV1"
# Remote itinerary snapshots for chat sync
CHAT_REMOTE_ITINERARIES_KEY = "chatRemoteItinerariesV1"
# Companion invites (pending acceptance)
COMPANION_INVITES_KEY = "companionInvitesV1"  # list of invites {id, fromUserId, toUserId, status, createdAt, decidedAt?}
# Feedbacks list (append-only, capped)
FEEDBACKS_KEY = "feedbacksV1"
# RUM analytics (traffic) keys
RUM_BY_DAY_KEY = "rumByDayV1"  # { YYYYMMDD: { pv:int, uv:int, paths: { path: count } } }
RUM_UV_DAY_PREFIX = "rumUvDayV1:"  # per-day map of sid -> lastSeenTs to compute UV
# Third-party API metrics
TPM_BY_DAY_KEY = "thirdpartyByDayV1"  # { YYYYMMDD: { provider: { endpoint: { total:int, ok:int, fail:int, sumMs:int } } } }
# AI generation logs (append-only, capped) list of objects
AI_GEN_LOGS_KEY = "aiGenLogsV1"
MAX_AI_GEN_LOG_ITEMS = int(os.getenv("MAX_AI_GEN_LOG_ITEMS", "500"))
# Legacy global session key (unused now, kept for backward-compat comment)
SESSION_KEY = "currentUserId"

SESSION_COOKIE = "sid"
# Admin whitelist (userId set via /auth/session); comma-separated
_env_admins = [x.strip() for x in os.getenv("ADMIN_USER_IDS", "").split(",") if x.strip()]
ADMIN_USER_IDS = set(_env_admins or [])
# Optional admin password; when set, admin endpoints require BOTH: userId in ADMIN_USER_IDS and valid password
ADMIN_PASSWORD = os.getenv("ADMIN_PASSWORD", "").strip()

def _is_secure(request: Request) -> bool:
    # Behind proxies (nginx), prefer X-Forwarded-Proto
    proto = request.headers.get("x-forwarded-proto") or request.url.scheme
    return (proto or "").lower() == "https"

def _get_or_create_sid(request: Request, response: Response) -> str:
    sid = request.cookies.get(SESSION_COOKIE)
    if not sid:
        sid = secrets.token_urlsafe(16)
        # 30 days
        response.set_cookie(
            key=SESSION_COOKIE,
            value=sid,
            max_age=30*24*3600,
            httponly=True,
            secure=_is_secure(request),
            samesite="lax",
            path="/",
        )
    return sid


# -------- rate limiting for AI generate --------
AI_RATE_LIMIT_WINDOW_SECS = int(os.getenv("AI_RATE_LIMIT_WINDOW_SECS", "3600"))
AI_RATE_LIMIT_MAX = int(os.getenv("AI_RATE_LIMIT_MAX", "15"))
# Comma-separated whitelist of userIds (e.g., phone numbers). Hardcode two defaults plus env additions.
_default_whitelist = {"15857119350", "18267159528"}
_env_whitelist = set([w.strip() for w in os.getenv("AI_RATE_LIMIT_WHITELIST", "").split(",") if w.strip()])
AI_RATE_LIMIT_WHITELIST = _default_whitelist.union(_env_whitelist)


def _get_user_id_by_sid(session: Session, sid: str) -> str | None:
    try:
        return get_kv(session, f"session:{sid}", None)
    except Exception:
        return None


def enforce_ai_rate_limit(request: Request, response: Response) -> None:
    """Enforce per user-or-sid rate limit for AI itinerary generate endpoint."""
    now = int(time.time())
    window_start = now - AI_RATE_LIMIT_WINDOW_SECS
    with SessionLocal() as s:
        sid = _get_or_create_sid(request, response)
        user_id = _get_user_id_by_sid(s, sid)
        # If user is whitelisted, skip limits
        if user_id and str(user_id) in AI_RATE_LIMIT_WHITELIST:
            return
        # Choose key: prefer userId, otherwise sid
        ident = str(user_id) if user_id is not None else f"sid:{sid}"
        rk = f"rate:ai:{ident}"
        arr = get_kv(s, rk, [])
        if not isinstance(arr, list):
            arr = []
        # prune old entries
        arr2 = [int(t) for t in arr if isinstance(t, (int, float)) and int(t) >= window_start]
        if len(arr2) >= AI_RATE_LIMIT_MAX:
            # do not record this attempt; just reject
            raise HTTPException(status_code=429, detail="AI generate rate limit exceeded. Please retry later.")
        # record and persist
        arr2.append(now)
        set_kv(s, rk, arr2)


def get_session():
    db = SessionLocal()
    try:
        yield db
    finally:
        db.close()


def route_get_set(list_key: str, default_value: Any):
    async def get_handler(request: Request):
        with SessionLocal() as s:
            data = get_kv(s, list_key, default_value)
            # Suppress GET logs to avoid noise during frontend polling
            # Increment poll counter since last write
            try:
                pk = f"polls_since_put:{list_key}"
                cnt = get_kv(s, pk, 0)
                try:
                    cnt = int(cnt)
                except Exception:
                    cnt = 0
                set_kv(s, pk, cnt + 1)
            except Exception:
                pass
            return JSONResponse(content=data)

    async def put_handler(request: Request):
        body = await request.json()
        with SessionLocal() as s:
            # Lightweight prune per dataset key before persisting
            pruned = _prune_value_for_key(
                list_key,
                body,
                per_thread_limit=MAX_MESSAGES_PER_THREAD,
                max_remote_itins=MAX_REMOTE_ITINS,
            )
            set_kv(s, list_key, pruned)
            # After write, enforce global storage budget if needed
            _enforce_budget(s)
            # Log updates at INFO level
            try:
                size_hint = len(pruned) if hasattr(pruned, "__len__") else "?"
            except Exception:
                size_hint = "?"
            # Read and reset poll counter
            polls_key = f"polls_since_put:{list_key}"
            try:
                polls = int(get_kv(s, polls_key, 0) or 0)
            except Exception:
                polls = 0
            try:
                set_kv(s, polls_key, 0)
            except Exception:
                pass
            logger.debug("PUT %s <- size=%s, polls_since_last_update=%s", list_key, size_hint, polls)
            return JSONResponse(content={"ok": True})

    return get_handler, put_handler


def route_get_set_scoped(base_key: str, default_value: Any):
    """Create GET/PUT handlers that scope data by current userId (or fallback to SID).
    Prevents different users from seeing each other's lists for sensitive datasets like itineraries.
    Key format: f"{base_key}:{ident}" where ident is userId or "sid:<sid>".
    """
    async def get_handler(request: Request, response: Response):
        with SessionLocal() as s:
            sid = _get_or_create_sid(request, response)
            user_id = _get_user_id_by_sid(s, sid)
            ident = str(user_id) if user_id is not None else f"sid:{sid}"
            key = f"{base_key}:{ident}"
            data = get_kv(s, key, default_value)
            # Suppress GET logs to avoid noise during frontend polling
            # Increment poll counter (scoped)
            try:
                pk = f"polls_since_put:{key}"
                cnt = get_kv(s, pk, 0)
                try:
                    cnt = int(cnt)
                except Exception:
                    cnt = 0
                set_kv(s, pk, cnt + 1)
            except Exception:
                pass
            return JSONResponse(content=data)

    async def put_handler(request: Request, response: Response):
        body = await request.json()
        with SessionLocal() as s:
            sid = _get_or_create_sid(request, response)
            user_id = _get_user_id_by_sid(s, sid)
            ident = str(user_id) if user_id is not None else f"sid:{sid}"
            key = f"{base_key}:{ident}"
            # Lightweight prune before persisting
            pruned = _prune_value_for_key(
                base_key,
                body,
                per_thread_limit=MAX_MESSAGES_PER_THREAD,
                max_remote_itins=MAX_REMOTE_ITINS,
            )
            set_kv(s, key, pruned)
            _enforce_budget(s)
            # Log updates at INFO level
            try:
                size_hint = len(pruned) if hasattr(pruned, "__len__") else "?"
            except Exception:
                size_hint = "?"
            # Read and reset poll counter (scoped)
            polls_key = f"polls_since_put:{key}"
            try:
                polls = int(get_kv(s, polls_key, 0) or 0)
            except Exception:
                polls = 0
            try:
                set_kv(s, polls_key, 0)
            except Exception:
                pass
            logger.debug("PUT %s <- ident=%s size=%s, polls_since_last_update=%s", base_key, ident, size_hint, polls)
            return JSONResponse(content={"ok": True})

    return get_handler, put_handler


def _require_admin(request: Request, response: Response, s: Session) -> str:
    """Require admin access.
    Rules:
    - If ADMIN_PASSWORD is set: require session userId in ADMIN_USER_IDS AND password provided via
      either header 'X-Admin-Password: <pwd>' or 'Authorization: Bearer <pwd>'.
    - If ADMIN_PASSWORD is empty: fall back to userId whitelist only (legacy; less secure).
    Returns the admin userId string on success.
    """
    sid = _get_or_create_sid(request, response)
    user_id = get_kv(s, f"session:{sid}", None)
    uid = str(user_id) if user_id is not None else None

    if not uid or uid not in ADMIN_USER_IDS:
        raise HTTPException(status_code=403, detail="admin only")

    if ADMIN_PASSWORD:
        # Accept either custom header or Bearer
        supplied = request.headers.get("x-admin-password") or ""
        if not supplied:
            auth = request.headers.get("authorization") or ""
            if auth.lower().startswith("bearer "):
                supplied = auth.split(" ", 1)[1].strip()
        if not supplied:
            raise HTTPException(status_code=401, detail="admin password required")
        if supplied != ADMIN_PASSWORD:
            raise HTTPException(status_code=403, detail="invalid admin password")
    return uid


@app.get("/admin/summary")
def admin_summary(request: Request, response: Response):
    with SessionLocal() as s:
        admin_uid = _require_admin(request, response, s)
        # Collect datasets
        itins = get_kv(s, ITIN_KEY, []) or []
        chats = get_kv(s, CHATS_KEY, {}) or {}
        gchats = get_kv(s, GROUP_CHATS_KEY, {}) or {}
        comps = get_kv(s, COMPANIONS_KEY, []) or []
        groups = get_kv(s, GROUPS_KEY, []) or []
        users = get_kv(s, USERS_KEY, []) or []
        remotes = get_kv(s, CHAT_REMOTE_ITINERARIES_KEY, {}) or {}
        feedbacks = get_kv(s, FEEDBACKS_KEY, []) or []

        # Counts
        chats_threads = len(chats) if isinstance(chats, dict) else 0
        chats_msgs = 0
        if isinstance(chats, dict):
            for arr in chats.values():
                if isinstance(arr, list):
                    chats_msgs += len(arr)
        gchats_groups = len(gchats) if isinstance(gchats, dict) else 0
        gchats_msgs = 0
        if isinstance(gchats, dict):
            for arr in gchats.values():
                if isinstance(arr, list):
                    gchats_msgs += len(arr)

        # Rate limiter stats (best-effort)
        now = int(time.time())
        window_start = now - AI_RATE_LIMIT_WINDOW_SECS
        rate_rows = s.query(KV).filter(KV.k.like("rate:ai:%")).all()
        rate_idents = len(rate_rows)
        rate_hits_window = 0
        for row in rate_rows:
            try:
                arr = row.v or []
                if isinstance(arr, list):
                    rate_hits_window += sum(1 for t in arr if isinstance(t, (int, float)) and int(t) >= window_start)
            except Exception:
                pass

        size_bytes = _current_db_size_bytes()

        # Third-party metrics summary (last 7 days)
        tpm_by = get_kv(s, TPM_BY_DAY_KEY, {}) or {}
        if not isinstance(tpm_by, dict):
            tpm_by = {}
        day_keys = sorted([k for k in tpm_by.keys() if isinstance(k, str)])
        last_days = day_keys[-7:]
        thirdparty_days: list[dict] = []
        top_map: dict[tuple[str, str], int] = {}
        for d in last_days:
            prov_map = tpm_by.get(d) or {}
            if not isinstance(prov_map, dict):
                prov_map = {}
            providers: list[dict] = []
            for prov in sorted(prov_map.keys()):
                ep_map = prov_map.get(prov) or {}
                if not isinstance(ep_map, dict):
                    ep_map = {}
                total = 0
                for ep, m in ep_map.items():
                    try:
                        t = int((m or {}).get("total", 0))
                    except Exception:
                        t = 0
                    total += t
                    try:
                        top_map[(prov, ep)] = top_map.get((prov, ep), 0) + t
                    except Exception:
                        pass
                providers.append({"provider": prov, "total": total})
            thirdparty_days.append({"day": d, "providers": providers})
        top_sorted = sorted(top_map.items(), key=lambda kv: kv[1], reverse=True)[:20]
        thirdparty_summary = {
            "days": thirdparty_days,
            "topEndpoints": [
                {"provider": prov, "endpoint": ep, "total": cnt}
                for (prov, ep), cnt in top_sorted
            ],
        }
        return {
            "admin": admin_uid,
            "db": {
                "sizeBytes": size_bytes,
                "budgetBytes": STORAGE_BUDGET_BYTES,
                "margin": STORAGE_BUDGET_MARGIN,
            },
            "datasets": {
                "itineraries": {"count": len(itins) if isinstance(itins, list) else 0},
                "chats": {"threads": chats_threads, "messages": chats_msgs},
                "groupChats": {"groups": gchats_groups, "messages": gchats_msgs},
                "chatRemoteItineraries": {"count": len(remotes) if isinstance(remotes, dict) else 0},
                "companions": {"count": len(comps) if isinstance(comps, list) else 0},
                "groups": {"count": len(groups) if isinstance(groups, list) else 0},
                "users": {"count": len(users) if isinstance(users, list) else 0},
                "feedbacks": {"count": len(feedbacks) if isinstance(feedbacks, list) else 0},
                "aiRate": {"idents": rate_idents, "hitsLastWindow": rate_hits_window},
                # Traffic summary (last 7 days PV/UV and top paths)
                "traffic": (lambda: (
                    (lambda _by: (
                        (lambda _days: {
                            "days": [
                                {"day": d, "pv": int(((_by.get(d) or {}).get("pv") or 0)), "uv": int(((_by.get(d) or {}).get("uv") or 0))}
                                for d in list(reversed(_days))
                            ],
                            "topPaths": (lambda _agg: [
                                {"path": p, "count": c} for p, c in sorted(_agg.items(), key=lambda kv: kv[1], reverse=True)[:20]
                            ])((lambda _acc: (
                                [ _acc.__setitem__(p, _acc.get(p, 0) + int(c))
                                  or _acc for d in _days for p, c in (((_by.get(d) or {}).get("paths") or {}).items() if isinstance(((_by.get(d) or {}).get("paths") or {}), dict) else []) ] and _acc
                            ))({}) )
                        }) (sorted([k for k in _by.keys() if isinstance(k, str)], reverse=True)[:7])
                    )) (get_kv(s, RUM_BY_DAY_KEY, {}) or {})
                ))(),
                # Third-party metrics (last 7 days, totals and top endpoints per provider)
                "thirdparty": thirdparty_summary,
            },
            "config": {
                "perThreadLimit": MAX_MESSAGES_PER_THREAD,
                "remoteItinsLimit": MAX_REMOTE_ITINS,
                "aiRateMax": AI_RATE_LIMIT_MAX,
                "aiRateWindowSecs": AI_RATE_LIMIT_WINDOW_SECS,
            },
        }


@app.get("/admin/aigen/logs")
def admin_ai_gen_logs(request: Request, response: Response, limit: int = 100):
    """Return recent AI itinerary generation logs (admin only).
    Query params: limit (default 100, max 500)
    """
    if limit <= 0:
        limit = 50
    if limit > 500:
        limit = 500
    with SessionLocal() as s:
        _require_admin(request, response, s)
        logs = get_kv(s, AI_GEN_LOGS_KEY, [])
        if not isinstance(logs, list):
            logs = []
        return list(reversed(logs[-limit:]))


# -------- Feedback endpoints --------
MAX_FEEDBACK_ITEMS = int(os.getenv("MAX_FEEDBACK_ITEMS", "2000"))


@app.get("/feedback")
def get_feedback(request: Request):
    with SessionLocal() as s:
        data = get_kv(s, FEEDBACKS_KEY, [])
        return JSONResponse(content=data)


@app.post("/feedback")
async def post_feedback(request: Request):
    payload = await request.json()
    with SessionLocal() as s:
        sid = _get_or_create_sid(request, Response())
        user_id = get_kv(s, f"session:{sid}", None)
        ua = request.headers.get("user-agent", "")
        item = {
            "ts": int(time.time()),
            "userId": user_id,
            "sid": sid,
            "ua": ua,
            "text": (payload or {}).get("text", "")[:4000],
            "meta": (payload or {}).get("meta", {}),
        }
        arr = get_kv(s, FEEDBACKS_KEY, [])
        if not isinstance(arr, list):
            arr = []
        arr.append(item)
        # Cap total items (keep latest)
        if len(arr) > MAX_FEEDBACK_ITEMS:
            arr = arr[-MAX_FEEDBACK_ITEMS:]
        set_kv(s, FEEDBACKS_KEY, arr)
        _enforce_budget(s)
        return {"ok": True}

# -------- Companion Invite Endpoints --------
def _current_user_id(request: Request, response: Response, s: Session) -> int | None:
    sid = _get_or_create_sid(request, response)
    uid = get_kv(s, f"session:{sid}", None)
    try:
        return int(uid) if uid is not None else None
    except Exception:
        return None

def _load_users(s: Session) -> list[dict]:
    users = get_kv(s, USERS_KEY, [])
    return users if isinstance(users, list) else []

def _find_user_by_phone(s: Session, phone: str):
    for u in _load_users(s):
        if str(u.get("phone")) == str(phone):
            return u
    return None

def _load_invites(s: Session) -> list[dict]:
    inv = get_kv(s, COMPANION_INVITES_KEY, [])
    return inv if isinstance(inv, list) else []

def _save_invites(s: Session, inv: list[dict]):
    # Cap invites to last 500 to avoid unbounded growth
    if len(inv) > 500:
        inv = inv[-500:]
    set_kv(s, COMPANION_INVITES_KEY, inv)

@app.get("/companions/invites")
def list_companion_invites(request: Request, response: Response):
    with SessionLocal() as s:
        uid = _current_user_id(request, response, s)
        if uid is None:
            return []
        inv = _load_invites(s)
        # Return invites where user is sender or recipient (lightweight filter)
        out = [i for i in inv if str(i.get("fromUserId")) == str(uid) or str(i.get("toUserId")) == str(uid)]
        return out

@app.post("/companions/invites")
async def create_companion_invite(request: Request, response: Response):
    payload = await request.json()
    with SessionLocal() as s:
        uid = _current_user_id(request, response, s)
        if uid is None:
            raise HTTPException(status_code=401, detail="not logged in")
        phone = str((payload or {}).get("phone") or "").strip()
        if not phone:
            raise HTTPException(status_code=400, detail="phone required")
        # Resolve target user by phone
        target = _find_user_by_phone(s, phone)
        if not target:
            raise HTTPException(status_code=404, detail="user_not_found")
        to_uid = target.get("id")
        if str(to_uid) == str(uid):
            raise HTTPException(status_code=400, detail="cannot_invite_self")
        # Prevent duplicate pending invite in either direction
        inv = _load_invites(s)
        for i in inv:
            if i.get("status") == "pending" and (
                (str(i.get("fromUserId")) == str(uid) and str(i.get("toUserId")) == str(to_uid)) or
                (str(i.get("fromUserId")) == str(to_uid) and str(i.get("toUserId")) == str(uid))
            ):
                return {"ok": True, "inviteId": i.get("id"), "duplicate": True}
        new_inv = {
            "id": int(time.time()*1000),
            "fromUserId": uid,
            "toUserId": to_uid,
            "status": "pending",
            "createdAt": int(time.time()*1000),
        }
        inv.append(new_inv)
        _save_invites(s, inv)
        return {"ok": True, "inviteId": new_inv["id"], "status": "pending"}

def _load_companions_raw(s: Session) -> list[dict]:
    comps = get_kv(s, COMPANIONS_KEY, [])
    return comps if isinstance(comps, list) else []

def _save_companions_raw(s: Session, arr: list[dict]):
    set_kv(s, COMPANIONS_KEY, arr)

def _already_companions(s: Session, a: int, b: int) -> bool:
    comps = _load_companions_raw(s)
    for c in comps:
        try:
            if str(c.get("ownerUserId")) == str(a) and str(c.get("userId")) == str(b):
                return True
        except Exception:
            continue
    return False

def _add_companion_pair(s: Session, a: int, b: int):
    if _already_companions(s, a, b):
        return
    comps = _load_companions_raw(s)
    users = _load_users(s)
    def _user(uid):
        for u in users:
            if str(u.get("id")) == str(uid):
                return u
        return None
    ua = _user(a) or {}
    ub = _user(b) or {}
    now = int(time.time()*1000)
    comps.append({
        "id": f"comp-{a}-{b}-{now}",
        "userId": b,
        "name": ub.get("nickname") or ub.get("phone") or f"用户{b}",
        "contact": ub.get("phone") or "",
        "ownerUserId": a,
        "createdAt": now,
    })
    comps.append({
        "id": f"comp-{b}-{a}-{now}",
        "userId": a,
        "name": ua.get("nickname") or ua.get("phone") or f"用户{a}",
        "contact": ua.get("phone") or "",
        "ownerUserId": b,
        "createdAt": now,
    })
    _save_companions_raw(s, comps)

@app.post("/companions/invites/{invite_id}/accept")
def accept_companion_invite(invite_id: int, request: Request, response: Response):
    with SessionLocal() as s:
        uid = _current_user_id(request, response, s)
        if uid is None:
            raise HTTPException(status_code=401, detail="not logged in")
        inv = _load_invites(s)
        updated = False
        for i in inv:
            if str(i.get("id")) == str(invite_id):
                if i.get("status") != "pending":
                    return {"ok": True, "status": i.get("status")}
                if str(i.get("toUserId")) != str(uid):
                    raise HTTPException(status_code=403, detail="not recipient")
                i["status"] = "accepted"
                i["decidedAt"] = int(time.time()*1000)
                updated = True
                try:
                    _add_companion_pair(s, int(i.get("fromUserId")), int(i.get("toUserId")))
                except Exception:
                    pass
                break
        if updated:
            _save_invites(s, inv)
        return {"ok": True, "status": "accepted"}

@app.post("/companions/invites/{invite_id}/decline")
def decline_companion_invite(invite_id: int, request: Request, response: Response):
    with SessionLocal() as s:
        uid = _current_user_id(request, response, s)
        if uid is None:
            raise HTTPException(status_code=401, detail="not logged in")
        inv = _load_invites(s)
        for i in inv:
            if str(i.get("id")) == str(invite_id):
                if i.get("status") != "pending":
                    return {"ok": True, "status": i.get("status")}
                if str(i.get("toUserId")) != str(uid):
                    raise HTTPException(status_code=403, detail="not recipient")
                i["status"] = "declined"
                i["decidedAt"] = int(time.time()*1000)
                _save_invites(s, inv)
                return {"ok": True, "status": "declined"}
        raise HTTPException(status_code=404, detail="invite_not_found")


# -------- Traffic RUM collection --------
RUM_RETENTION_DAYS = int(os.getenv("RUM_RETENTION_DAYS", "60"))
RUM_MAX_PATHS_PER_DAY = int(os.getenv("RUM_MAX_PATHS_PER_DAY", "200"))
RUM_MAX_UV_PER_DAY = int(os.getenv("RUM_MAX_UV_PER_DAY", "200000"))


@app.post("/rum/collect")
async def rum_collect(request: Request):
    """Lightweight traffic collection: increments PV/UV and per-path counts by day.
    Stores only aggregated counts. UV is approximated by unique sid per day.
    """
    try:
        try:
            payload = await request.json()
        except Exception:
            payload = {}
        path = str((payload or {}).get("path") or "/")[:300]
        # Respect very small privacy footprint: ignore referrer if too long
        ref = str((payload or {}).get("ref") or "")[:300]
        # Title not stored; kept for future extensibility
        _ = (payload or {}).get("title")

        # Identify visitor by sid cookie
        sid = _get_or_create_sid(request, Response())
        now = datetime.utcnow()
        day = now.strftime("%Y%m%d")
        with SessionLocal() as s:
            by = get_kv(s, RUM_BY_DAY_KEY, {})
            if not isinstance(by, dict):
                by = {}
            day_obj = by.get(day) or {"pv": 0, "uv": 0, "paths": {}}
            # PV
            try:
                day_obj["pv"] = int(day_obj.get("pv", 0)) + 1
            except Exception:
                day_obj["pv"] = 1
            # Paths
            paths = day_obj.get("paths") or {}
            if not isinstance(paths, dict):
                paths = {}
            if path:
                try:
                    paths[path] = int(paths.get(path, 0)) + 1
                except Exception:
                    paths[path] = 1
            # Cap distinct paths per day (keep top counts)
            if len(paths) > RUM_MAX_PATHS_PER_DAY:
                items = sorted(paths.items(), key=lambda kv: kv[1], reverse=True)[:RUM_MAX_PATHS_PER_DAY]
                paths = {k: v for k, v in items}
            day_obj["paths"] = paths
            # UV by sid
            uv_key = f"{RUM_UV_DAY_PREFIX}{day}"
            uv_map = get_kv(s, uv_key, {})
            if not isinstance(uv_map, dict):
                uv_map = {}
            if sid not in uv_map:
                if len(uv_map) < RUM_MAX_UV_PER_DAY:
                    uv_map[sid] = int(time.time())
            else:
                uv_map[sid] = int(time.time())
            day_obj["uv"] = len(uv_map)
            by[day] = day_obj
            # Retention: keep last N days
            days_sorted = sorted([k for k in by.keys() if isinstance(k, str)], reverse=True)
            keep = set(days_sorted[:RUM_RETENTION_DAYS])
            by = {k: by[k] for k in days_sorted if k in keep}
            # Persist
            set_kv(s, uv_key, uv_map)
            set_kv(s, RUM_BY_DAY_KEY, by)
            _enforce_budget(s)
        return {"ok": True}
    except Exception as e:
        logger.warning("/rum/collect failed: %s", e)
        # Non-fatal
        return {"ok": True}


# -------- Third-party API metrics collection --------
TPM_RETENTION_DAYS = int(os.getenv("TPM_RETENTION_DAYS", "60"))


@app.post("/metrics/thirdparty")
async def metrics_thirdparty(request: Request):
    """Collect third-party API call metrics.
    Body: { provider: 'amap'|'gaode'|string, endpoint: string, ms?: number, ok?: boolean }
    Aggregates per day per provider per endpoint: total, ok, fail, sumMs.
    """
    try:
        try:
            payload = await request.json()
        except Exception:
            payload = {}
        provider = str((payload or {}).get("provider") or "").strip()[:50] or "unknown"
        endpoint = str((payload or {}).get("endpoint") or "").strip()[:120] or "/"
        ms = int(float((payload or {}).get("ms") or 0))
        ok = bool((payload or {}).get("ok"))

        now = datetime.utcnow()
        day = now.strftime("%Y%m%d")
        with SessionLocal() as s:
            by = get_kv(s, TPM_BY_DAY_KEY, {})
            if not isinstance(by, dict):
                by = {}
            day_obj = by.get(day) or {}
            prov_map = day_obj.get(provider) or {}
            ep = prov_map.get(endpoint) or {"total": 0, "ok": 0, "fail": 0, "sumMs": 0}
            ep["total"] = int(ep.get("total", 0)) + 1
            ep["ok"] = int(ep.get("ok", 0)) + (1 if ok else 0)
            ep["fail"] = int(ep.get("fail", 0)) + (0 if ok else 1)
            ep["sumMs"] = int(ep.get("sumMs", 0)) + max(0, ms)
            prov_map[endpoint] = ep
            day_obj[provider] = prov_map
            by[day] = day_obj
            # Retention
            days_sorted = sorted([k for k in by.keys() if isinstance(k, str)], reverse=True)
            keep = set(days_sorted[:TPM_RETENTION_DAYS])
            by = {k: by[k] for k in days_sorted if k in keep}
            set_kv(s, TPM_BY_DAY_KEY, by)
            _enforce_budget(s)
        return {"ok": True}
    except Exception as e:
        logger.warning("/metrics/thirdparty failed: %s", e)
        return {"ok": True}


# Itineraries (scoped per userId or sid)
get_itins, put_itins = route_get_set_scoped(ITIN_KEY, [])
app.add_api_route("/itineraries", get_itins, methods=["GET"])
app.add_api_route("/itineraries", put_itins, methods=["PUT"])

# Chats (direct)
get_chats, put_chats = route_get_set(CHATS_KEY, {})
app.add_api_route("/chats", get_chats, methods=["GET"])
app.add_api_route("/chats", put_chats, methods=["PUT"])

# Group chats
get_gchats, put_gchats = route_get_set(GROUP_CHATS_KEY, {})
app.add_api_route("/groupChats", get_gchats, methods=["GET"])
app.add_api_route("/groupChats", put_gchats, methods=["PUT"])

# Companions
get_comp, put_comp = route_get_set(COMPANIONS_KEY, [])
app.add_api_route("/companions", get_comp, methods=["GET"])
app.add_api_route("/companions", put_comp, methods=["PUT"])

# Groups
get_groups, put_groups = route_get_set(GROUPS_KEY, [])
app.add_api_route("/groups", get_groups, methods=["GET"])
app.add_api_route("/groups", put_groups, methods=["PUT"])

# Users
get_users, put_users = route_get_set(USERS_KEY, [])
app.add_api_route("/users", get_users, methods=["GET"])
app.add_api_route("/users", put_users, methods=["PUT"])

# Chat Remote Itinerary snapshots (server-backed so peers/devices can see updates)
get_remote_itins, put_remote_itins = route_get_set(CHAT_REMOTE_ITINERARIES_KEY, {})
app.add_api_route("/chatRemoteItineraries", get_remote_itins, methods=["GET"])
app.add_api_route("/chatRemoteItineraries", put_remote_itins, methods=["PUT"])


# Auth session (simple current user id store; aligns with frontend api.js)
@app.get("/auth/session")
def get_session_user(request: Request, response: Response):
    """Per-client session. Identify by cookie sid; store mapping session:<sid> -> userId."""
    sid = _get_or_create_sid(request, response)
    with SessionLocal() as s:
        user_id = get_kv(s, f"session:{sid}", None)
        return {"userId": user_id}


@app.put("/auth/session")
async def set_session_user(request: Request, response: Response):
    body = await request.json()
    user_id = body.get("userId")
    sid = _get_or_create_sid(request, response)
    with SessionLocal() as s:
        set_kv(s, f"session:{sid}", user_id)
    return {"ok": True}


def _hash_pwd_compatible(pwd: str) -> str:
    # Match frontend hashPwd: btoa(unescape(encodeURIComponent(p))).split('').reverse().join('')
    try:
        import base64
        b = pwd.encode("utf-8")
        b64 = base64.b64encode(b).decode("ascii")
        return b64[::-1]
    except Exception:
        return ""


@app.post("/auth/login")
async def auth_login(request: Request, response: Response):
    body = await request.json()
    phone = str(body.get("phone", "")).strip()
    password = str(body.get("password", ""))
    if not phone or not password:
        raise HTTPException(status_code=400, detail="missing phone or password")
    with SessionLocal() as s:
        users = get_kv(s, USERS_KEY, [])
        if not isinstance(users, list):
            users = []
        user = None
        for u in users:
            try:
                if str(u.get("phone")) == phone:
                    user = u
                    break
            except Exception:
                continue
        if not user:
            raise HTTPException(status_code=400, detail="user_not_found")
        hp = _hash_pwd_compatible(password)
        if user.get("passwordHash") != hp:
            raise HTTPException(status_code=401, detail="invalid_password")
        # set session and return
        sid = _get_or_create_sid(request, response)
        set_kv(s, f"session:{sid}", user.get("id"))
        return {"ok": True, "userId": user.get("id"), "user": {"id": user.get("id"), "nickname": user.get("nickname"), "phone": user.get("phone")}}


@app.post("/auth/logout")
async def auth_logout(request: Request, response: Response):
    sid = _get_or_create_sid(request, response)
    with SessionLocal() as s:
        set_kv(s, f"session:{sid}", None)
    return {"ok": True}

@app.post("/auth/register")
async def auth_register(request: Request, response: Response):
    """Simple user registration.
    Body: { phone: str, password: str, nickname?: str }
    Constraints:
      - phone unique
      - password length >= 6
      - phone format: mainland CN 11 digits starting with 1 (frontend parity)
    Auto login (set session) on success.
    """
    body = await request.json()
    phone = str(body.get("phone", "")).strip()
    password = str(body.get("password", ""))
    nickname = str(body.get("nickname") or "").strip() or phone
    if not phone or not password:
        raise HTTPException(status_code=400, detail="missing phone or password")
    if not phone.isdigit() or len(phone) != 11 or not phone.startswith("1"):
        raise HTTPException(status_code=400, detail="invalid_phone")
    if len(password) < 6:
        raise HTTPException(status_code=400, detail="weak_password")
    with SessionLocal() as s:
        users = get_kv(s, USERS_KEY, [])
        if not isinstance(users, list):
            users = []
        for u in users:
            try:
                if str(u.get("phone")) == phone:
                    raise HTTPException(status_code=400, detail="phone_exists")
            except Exception:
                continue
        new_id = int(time.time()*1000)
        user = {
            "id": new_id,
            "phone": phone,
            "nickname": nickname,
            "passwordHash": _hash_pwd_compatible(password),
            "createdAt": int(time.time()*1000)
        }
        users.append(user)
        set_kv(s, USERS_KEY, users)
        # set session
        sid = _get_or_create_sid(request, response)
        set_kv(s, f"session:{sid}", user.get("id"))
        return {"ok": True, "userId": user.get("id"), "user": {"id": user.get("id"), "nickname": user.get("nickname"), "phone": user.get("phone")}}


# 404 fallback for unmatched routes (optional)
@app.exception_handler(HTTPException)
async def http_exception_handler(request: Request, exc: HTTPException):
    return JSONResponse(status_code=exc.status_code, content={"detail": exc.detail})


# -------- AI itinerary generation --------
def build_itinerary_prompt(query: str | None, days: int | None, preferences: dict | None) -> str:
    pref = preferences or {}
    days_text = f"{days}" if (isinstance(days, int) and days > 0) else "auto"
    user_query = (query or "生成一次中国境内自驾旅行行程").strip()
    return (
        "You are an itinerary planning assistant. Produce ONLY a JSON object that matches the schema below.\n"
        "The JSON must be strictly valid and contain no comments or trailing text.\n\n"
        "Schema: {\n"
        "  \"id\": string | number, // temporary id, can be timestamp\n"
        "  \"name\": string,\n"
        "  \"createdAt\": number, // epoch millis\n"
        "  \"days\": [\n"
        "    { \"events\": [\n"
        "      {\n"
        "        \"type\": \"drive\" | \"activity\" | \"accommodation\",\n"
        "        \"text\": string,\n"
        "        \"subtype\": \"food\" | \"spot\" | null, // for activity\n"
        "        \"from\": string | null, // for drive\n"
        "        \"to\": string | null,   // for drive\n"
        "        \"distance\": string | null, // like '120.5 km'\n"
        "        \"duration\": string | null, // like '2.5 h'\n"
        "        \"fromCoords\": [number, number] | null, // [lat, lng] optional\n"
        "        \"toCoords\": [number, number] | null    // [lat, lng] optional\n"
        "      }\n"
        "    ] }\n"
        "  ]\n"
        "}\n\n"
        f"User request: {user_query}\n"
        f"Days: {days_text}\n"
        f"Preferences (free-form JSON): {json.dumps(pref, ensure_ascii=False)}\n\n"
        "Constraints:\n"
        "- Keep driving events as 'drive' with from/to names; activities as 'activity' with subtype 'food' or 'spot'; accommodations as 'accommodation'.\n"
        "- If unknown coordinates, set fromCoords/toCoords to null.\n"
        "- Distance and duration are optional strings; leave null if not computed.\n"
        "- Ensure days is a non-empty array; events per day may be empty.\n"
        "- Return ONLY the JSON."
    )


async def call_qwen(prompt: str) -> dict:
    if not QWEN_API_KEY:
        # Fallback stub if no key: return a trivial 2-day itinerary
        now = int(datetime.utcnow().timestamp() * 1000)
        logger.info("Returning STUB itinerary (no QWEN_API_KEY)")
        return {
            "id": now,
            "name": "示例行程",
            "createdAt": now,
            "days": [
                {"events": [
                    {"type": "drive", "text": "杭州 → 黄山", "from": "杭州", "to": "黄山", "subtype": None, "distance": None, "duration": None, "fromCoords": None, "toCoords": None},
                    {"type": "activity", "text": "宏村南湖书院", "subtype": "spot", "from": None, "to": None, "distance": None, "duration": None, "fromCoords": None, "toCoords": None},
                    {"type": "accommodation", "text": "黄山国际大酒店", "subtype": None, "from": None, "to": None, "distance": None, "duration": None, "fromCoords": None, "toCoords": None}
                ]},
                {"events": [
                    {"type": "drive", "text": "黄山 → 杭州", "from": "黄山", "to": "杭州", "subtype": None, "distance": None, "duration": None, "fromCoords": None, "toCoords": None}
                ]}
            ]
        }

    headers = {
        "Authorization": f"Bearer {QWEN_API_KEY}",
        "Content-Type": "application/json",
    }
    # OpenAI-compatible payload
    payload = {
        "model": QWEN_MODEL,
        "messages": [
            {"role": "system", "content": "You are a helpful itinerary planner."},
            {"role": "user", "content": prompt},
        ],
        "temperature": 0.3,
        # DashScope requires disabling thinking for non-streaming requests on A3/A3B models
        "enable_thinking": False,
        "result_format": "message",
        "stream": False,
    }
    url = f"{QWEN_API_BASE}/chat/completions"
    prompt_len = len(prompt)
    t0 = time.time()
    logger.info(
        "Calling Qwen model=%s base=%s prompt_len=%s",
        QWEN_MODEL,
        QWEN_API_BASE,
        prompt_len,
    )
    try:
        async with httpx.AsyncClient(timeout=90) as client:
            r = await client.post(url, headers=headers, json=payload)
            took = (time.time() - t0) * 1000
            logger.info("Qwen HTTP %s in %.0fms", r.status_code, took)
            if r.status_code >= 400:
                try:
                    logger.error("Qwen error body: %s", r.text[:1000])
                except Exception:
                    pass
            r.raise_for_status()
            data = r.json()
            # Prefer OpenAI-compatible shape
            content = None
            if isinstance(data, dict):
                try:
                    content = data["choices"][0]["message"]["content"]
                except Exception:
                    # Fallback to Responses API shape
                    try:
                        content = data["output"]["choices"][0]["message"]["content"]
                    except Exception:
                        logger.error("Qwen response parse error: %s", json.dumps(data)[:500])
                        raise HTTPException(502, detail="Invalid response from model")
            # content may be string JSON
            try:
                logger.debug("Qwen content (trunc): %s", (content if isinstance(content, str) else str(content))[:1000])
            except Exception:
                pass
            try:
                obj = json.loads(content)
                return obj
            except Exception:
                logger.error("Qwen content is not valid JSON: %s", str(content)[:500])
                raise HTTPException(502, detail="Model did not return valid JSON")
    except httpx.HTTPError as e:
        logger.exception("Qwen HTTP error: %s", e)
        raise HTTPException(502, detail="Upstream model HTTP error")


def coerce_itinerary_shape(obj: dict) -> dict:
    # Minimal shaping to satisfy frontend expectations
    now = int(datetime.utcnow().timestamp() * 1000)
    if not isinstance(obj, dict):
        obj = {}
    obj.setdefault("id", now)
    obj.setdefault("name", "行程")
    obj.setdefault("createdAt", now)
    days = obj.get("days")
    if not isinstance(days, list) or len(days) == 0:
        days = [{"events": []}]
    norm_days = []
    for d in days:
        evs = (d or {}).get("events")
        if not isinstance(evs, list):
            evs = []
        norm_events = []
        for ev in evs:
            if not isinstance(ev, dict):
                continue
            t = ev.get("type")
            if t not in ("drive", "activity", "accommodation"):
                continue
            norm_events.append({
                "type": t,
                "text": ev.get("text"),
                "subtype": ev.get("subtype"),
                "from": ev.get("from"),
                "to": ev.get("to"),
                "distance": ev.get("distance"),
                "duration": ev.get("duration"),
                "fromCoords": ev.get("fromCoords"),
                "toCoords": ev.get("toCoords"),
            })
        norm_days.append({"events": norm_events})
    obj["days"] = norm_days
    return obj


# --- Geocoding and enrichment for drive events ---
# Local POI dictionary to avoid external requests for common places
POI_DB: dict[str, tuple[float, float]] = {
    "杭州": (30.274085, 120.15507),
    "黄山": (30.132000, 118.166000),
    "黄山风景区": (30.132000, 118.166000),
    "宏村": (29.853000, 117.997000),
    "桐庐": (29.793000, 119.693000),
    # Common Hangzhou attractions
    "西溪湿地": (30.273000, 120.051000),
    "灵隐寺": (30.242000, 120.097000),
    "宋城": (30.210000, 120.095000),
    "西湖": (30.259000, 120.153000),
}

GEOCODER_PROVIDER = os.getenv("GEOCODER_PROVIDER", "none").lower()  # 'none' | 'nominatim'
ENRICH_DRIVE_STATS = os.getenv("ENRICH_DRIVE_STATS", "1") not in ("0", "false", "False")


def _haversine_meters(a: tuple[float, float], b: tuple[float, float]) -> float:
    import math
    lat1, lon1 = a
    lat2, lon2 = b
    R = 6371000.0
    phi1 = math.radians(lat1)
    phi2 = math.radians(lat2)
    dphi = math.radians(lat2 - lat1)
    dlambda = math.radians(lon2 - lon1)
    x = (math.sin(dphi/2) ** 2) + math.cos(phi1) * math.cos(phi2) * (math.sin(dlambda/2) ** 2)
    return 2 * R * math.atan2(math.sqrt(x), math.sqrt(1 - x))


async def _geocode_name(name: str) -> tuple[float, float] | None:
    name = (name or "").strip()
    if not name:
        return None
    # 1) local POI
    if name in POI_DB:
        return POI_DB[name]
    # 2) DB cache
    try:
        with SessionLocal() as s:
            cached = get_kv(s, f"geocode:{name}", None)
            if isinstance(cached, (list, tuple)) and len(cached) == 2:
                try:
                    lat = float(cached[0])
                    lng = float(cached[1])
                    return (lat, lng)
                except Exception:
                    pass
    except Exception:
        pass
    # 3) Optional external geocoding
    if GEOCODER_PROVIDER == "nominatim":
        try:
            url = "https://nominatim.openstreetmap.org/search"
            params = {"format": "json", "q": name, "countrycodes": "cn", "limit": 1, "addressdetails": 0}
            headers = {"User-Agent": "ai-trip-planner/1.0 (backend)"}
            async with httpx.AsyncClient(timeout=15) as client:
                r = await client.get(url, params=params, headers=headers)
                if r.status_code == 200:
                    arr = r.json()
                    if isinstance(arr, list) and arr:
                        lat = float(arr[0]["lat"])  # type: ignore[index]
                        lon = float(arr[0]["lon"])  # type: ignore[index]
                        # Cache
                        try:
                            with SessionLocal() as s:
                                set_kv(s, f"geocode:{name}", [lat, lon])
                        except Exception:
                            pass
                        return (lat, lon)
        except Exception:
            # best-effort
            pass
    return None


async def _osrm_distance_duration(a: tuple[float, float], b: tuple[float, float]) -> tuple[str, str]:
    """Return (kmStr, hoursStr) using OSRM, fallback to haversine at 60km/h."""
    try:
        url = f"https://router.project-osrm.org/route/v1/driving/{a[1]},{a[0]};{b[1]},{b[0]}?overview=false&geometries=geojson&steps=false"
        async with httpx.AsyncClient(timeout=20) as client:
            r = await client.get(url)
            if r.status_code == 200:
                data = r.json()
                route = (data.get("routes") or [{}])[0]
                dist = float(route.get("distance", 0.0))  # meters
                dur = float(route.get("duration", 0.0))   # seconds
                if dist > 0 and dur > 0:
                    return (f"{dist/1000:.1f} km", f"{dur/3600:.2f} h")
    except Exception:
        pass
    meters = _haversine_meters(a, b)
    km = meters / 1000.0
    h = km / 60.0
    return (f"{km:.1f} km", f"{h:.2f} h")


async def enrich_itinerary_coords(itin: dict) -> dict:
    days = (itin or {}).get("days") or []
    if not isinstance(days, list):
        return itin
    changed = False
    for d in days:
        evs = (d or {}).get("events") or []
        if not isinstance(evs, list):
            continue
        for ev in evs:
            if not isinstance(ev, dict):
                continue
            if ev.get("type") != "drive":
                continue
            # Validate model-provided coords; if missing or far off from geocoded name, correct them.
            fc = ev.get("fromCoords")
            tc = ev.get("toCoords")
            from_name = (ev.get("from") or "").strip()
            to_name = (ev.get("to") or "").strip()
            a = (await _geocode_name(from_name)) if from_name else None
            b = (await _geocode_name(to_name)) if to_name else None
            def _norm(c):
                try:
                    if isinstance(c, (list, tuple)) and len(c) == 2:
                        lat = float(c[0]); lng = float(c[1]);
                        # Fix obvious lat/lng swap if values look inverted (lng ~70-140 CN, lat ~3-54)
                        if (lng < -90 or lng > 180 or lat < -90 or lat > 90) or (70 <= lat <= 140 and 0 <= lng <= 60):
                            return (lng, lat)
                        return (lat, lng)
                except Exception:
                    pass
                return None
            fc_norm = _norm(fc)
            tc_norm = _norm(tc)
            # Thresholds: if distance between model coords and geocoded name exceeds 20km, correct to geocoded
            THRESH_M = 20000.0
            if a:
                if not fc_norm or _haversine_meters(fc_norm, a) > THRESH_M:
                    ev["fromCoords"] = [a[0], a[1]]
                    fc_norm = a
                    changed = True
            if b:
                if not tc_norm or _haversine_meters(tc_norm, b) > THRESH_M:
                    ev["toCoords"] = [b[0], b[1]]
                    tc_norm = b
                    changed = True
            # If still missing both but we have names, set from scratch
            if (not fc_norm or not tc_norm) and a and b:
                ev["fromCoords"] = [a[0], a[1]]
                ev["toCoords"] = [b[0], b[1]]
                changed = True
            # Compute distance/duration if requested and still missing
            if ENRICH_DRIVE_STATS and a and b and not (ev.get("distance") and ev.get("duration")):
                try:
                    km_str, h_str = await _osrm_distance_duration(a, b)
                    ev["distance"] = ev.get("distance") or km_str
                    ev["duration"] = ev.get("duration") or h_str
                except Exception:
                    pass
    # No need to persist server-side; just return enriched
    return itin


@app.post("/ai/itinerary/generate")
async def ai_generate(request: Request, payload: Dict[str, Any]):
    """
    Request body: { query?: string, days?: number, preferences?: object }
    Response: an itinerary JSON matching the frontend structure.
    """
    # Enforce per-user/per-sid rate limit with whitelist
    enforce_ai_rate_limit(request, Response())
    query = payload.get("query")
    days = payload.get("days")
    preferences = payload.get("preferences")
    def _trunc(s: Any, n: int = 80) -> str:
        try:
            t = str(s)
        except Exception:
            t = ""
        return t if len(t) <= n else t[:n] + "…"
    ct = request.headers.get("content-type")
    logger.info("/ai/itinerary/generate ct=%s query='%s' days=%s prefs_keys=%s",
                ct, _trunc(query), days,
                list((preferences or {}).keys()) if isinstance(preferences, dict) else [])
    prompt = build_itinerary_prompt(query, days, preferences)
    obj = await call_qwen(prompt)
    shaped = coerce_itinerary_shape(obj)
    # Enrich drive events with coordinates (and optional stats) before returning
    try:
        shaped = await enrich_itinerary_coords(shaped)
    except Exception as e:
        logger.warning("enrich_itinerary_coords failed: %s", e)
    try:
        logger.debug("Shaped itinerary (trunc): %s", json.dumps(shaped, ensure_ascii=False)[:1200])
    except Exception:
        pass
    # quick summary for logs
    try:
        dcount = len(shaped.get("days", []) or [])
        evcount = sum(len((d or {}).get("events", []) or []) for d in shaped.get("days", []) or [])
    except Exception:
        dcount = evcount = "?"
    logger.info("AI itinerary generated: days=%s events=%s", dcount, evcount)
    # Persist generation log (best-effort, non-blocking on failure)
    try:
        with SessionLocal() as s:
            sid = _get_or_create_sid(request, Response())
            user_id = _get_user_id_by_sid(s, sid)
            logs = get_kv(s, AI_GEN_LOGS_KEY, [])
            if not isinstance(logs, list):
                logs = []
            # Keep response summary trimmed to reduce storage
            def _summ_itin(itin: dict) -> dict:
                try:
                    days_arr = (itin or {}).get("days") or []
                    if isinstance(days_arr, list):
                        # Only include first 2 events of first 1 day for preview
                        preview_days = []
                        for di, d in enumerate(days_arr[:1]):
                            evs = (d or {}).get("events") or []
                            preview_days.append({"events": evs[:2]})
                    else:
                        preview_days = []
                    return {
                        "id": itin.get("id"),
                        "name": itin.get("name"),
                        "daysCount": len(days_arr) if isinstance(days_arr, list) else 0,
                        "previewDays": preview_days,
                    }
                except Exception:
                    return {}
            entry = {
                "ts": int(time.time()),
                "sid": sid,
                "userId": user_id,
                "query": (query or "")[:200],
                "days": days,
                "prefKeys": list((preferences or {}).keys()) if isinstance(preferences, dict) else [],
                "result": _summ_itin(shaped),
            }
            logs.append(entry)
            if len(logs) > MAX_AI_GEN_LOG_ITEMS:
                logs = logs[-MAX_AI_GEN_LOG_ITEMS:]
            set_kv(s, AI_GEN_LOGS_KEY, logs)
    except Exception as e:
        logger.debug("store ai gen log failed: %s", e)
    return shaped


@app.post("/debug/echo")
async def debug_echo(request: Request):
    raw = await request.body()
    ct = request.headers.get("content-type")
    txt = None
    try:
        txt = raw.decode("utf-8")
    except Exception:
        txt = raw.decode("utf-8", errors="replace")
    try:
        parsed = json.loads(txt)
    except Exception as e:
        parsed = {"error": str(e)}
    logger.info("/debug/echo ct=%s bytes=%s", ct, len(raw))
    return {"content_type": ct, "raw": txt, "json": parsed}


if __name__ == "__main__":
    import uvicorn

    uvicorn.run("app.main:app", host="0.0.0.0", port=9000, reload=True)
