from __future__ import annotations

import sqlite3
from dataclasses import dataclass
from typing import Optional, List, Tuple
import json
import time
from pathlib import Path

from .validator import validate_payload


SCHEMA = """
create table if not exists mail_queue (
  id integer primary key autoincrement,
  payload text not null,
  status text not null default 'pending', -- pending|processing|done|failed
  tries integer not null default 0,
  max_retries integer not null default 3,
  error text,
  idempotency_key text,
  priority integer not null default 0,
  created_at integer not null,
  updated_at integer not null
);

create unique index if not exists idx_mail_queue_idem on mail_queue(idempotency_key);

create table if not exists mail_receipt (
  id integer primary key autoincrement,
  queue_id integer not null,
  provider_id text,
  status text not null,
  created_at integer not null
);

create table if not exists mail_dlq (
  id integer primary key autoincrement,
  original_queue_id integer,
  payload text not null,
  error text,
  created_at integer not null
);

create table if not exists mail_suppression (
  email text primary key,
  reason text,
  created_at integer not null
);
"""


@dataclass
class QueueItem:
    id: int
    payload: dict
    tries: int
    max_retries: int
    priority: int
    idempotency_key: str | None


def now_ts() -> int:
    return int(time.time())


class MailQueue:
    def __init__(self, db_path: str) -> None:
        self._db_path = db_path
        # Auto-create parent directory if it doesn't exist
        db_dir = Path(self._db_path).parent
        if db_dir != Path("."):
            db_dir.mkdir(parents=True, exist_ok=True)
        self._conn = sqlite3.connect(self._db_path, check_same_thread=False)
        self._conn.execute("PRAGMA journal_mode=WAL;")
        self._conn.executescript(SCHEMA)
        self._conn.commit()

    def enqueue(self, payload: dict, max_retries: int = 3, *, idempotency_key: str | None = None, priority: int = 0) -> int:
        # Validate payload before enqueueing
        validate_payload(payload)

        cur = self._conn.cursor()
        try:
            cur.execute(
                "insert into mail_queue(payload, status, tries, max_retries, error, idempotency_key, priority, created_at, updated_at) values (?, 'pending', 0, ?, NULL, ?, ?, ?, ?)",
                (json.dumps(payload, ensure_ascii=False), max_retries, idempotency_key, priority, now_ts(), now_ts()),
            )
            self._conn.commit()
            return cur.lastrowid
        except sqlite3.IntegrityError:
            if idempotency_key:
                cur.execute("select id from mail_queue where idempotency_key=?", (idempotency_key,))
                row = cur.fetchone()
                if row:
                    return int(row[0])
            raise

    def fetch_batch(self, batch_size: int) -> List[QueueItem]:
        cur = self._conn.cursor()
        cur.execute(
            "select id, payload, tries, max_retries, priority, idempotency_key from mail_queue where status='pending' order by priority desc, id asc limit ?",
            (batch_size,),
        )
        rows = cur.fetchall()
        items: List[QueueItem] = []
        for rid, payload, tries, max_retries, prio, idem in rows:
            items.append(QueueItem(id=rid, payload=json.loads(payload), tries=tries, max_retries=max_retries, priority=prio, idempotency_key=idem))
        # 标记 processing
        if rows:
            ids = [r[0] for r in rows]
            qmarks = ",".join(["?"] * len(ids))
            cur.execute(f"update mail_queue set status='processing', updated_at=? where id in ({qmarks})", [now_ts(), *ids])
            self._conn.commit()
        return items

    def mark_done(self, qid: int, provider_id: Optional[str]) -> None:
        cur = self._conn.cursor()
        cur.execute("update mail_queue set status='done', updated_at=? where id=?", (now_ts(), qid))
        cur.execute(
            "insert into mail_receipt(queue_id, provider_id, status, created_at) values (?, ?, 'sent', ?)",
            (qid, provider_id, now_ts()),
        )
        self._conn.commit()

    def mark_failed(self, qid: int, error: str, can_retry: bool) -> None:
        cur = self._conn.cursor()
        if can_retry:
            cur.execute(
                "update mail_queue set status='pending', tries=tries+1, error=?, updated_at=? where id=?",
                (error, now_ts(), qid),
            )
        else:
            # 写入失败并落入DLQ
            cur.execute("select payload from mail_queue where id=?", (qid,))
            row = cur.fetchone()
            payload = row[0] if row else None
            cur.execute(
                "update mail_queue set status='failed', tries=tries+1, error=?, updated_at=? where id=?",
                (error, now_ts(), qid),
            )
            if payload is not None:
                cur.execute(
                    "insert into mail_dlq(original_queue_id, payload, error, created_at) values (?, ?, ?, ?)",
                    (qid, payload, error, now_ts()),
                )
        self._conn.commit()

    # ---- 查询接口 ----
    def list_recent(self, limit: int = 50):
        cur = self._conn.cursor()
        cur.execute(
            "select id, status, tries, max_retries, substr(payload,1,400), created_at, updated_at from mail_queue order by id desc limit ?",
            (limit,),
        )
        return cur.fetchall()

    def list_by_status(self, status: str, limit: int = 50):
        cur = self._conn.cursor()
        cur.execute(
            "select id, status, tries, max_retries, substr(payload,1,400), created_at, updated_at from mail_queue where status=? order by id desc limit ?",
            (status, limit),
        )
        return cur.fetchall()

    def search_by_recipient(self, email: str, limit: int = 50):
        # 由于 payload 为JSON文本，使用 like 作为简易搜索；生产可考虑FTS/外置日志
        cur = self._conn.cursor()
        pattern = f'%"{email}"%'
        cur.execute(
            "select id, status, tries, max_retries, substr(payload,1,400), created_at, updated_at from mail_queue where payload like ? order by id desc limit ?",
            (pattern, limit),
        )
        return cur.fetchall()

    def list_receipts(self, limit: int = 100):
        cur = self._conn.cursor()
        cur.execute(
            "select r.id, r.queue_id, r.provider_id, r.status, r.created_at from mail_receipt r order by r.id desc limit ?",
            (limit,),
        )
        return cur.fetchall()

    # ---- DLQ 管理 ----
    def list_dlq(self, limit: int = 100):
        cur = self._conn.cursor()
        cur.execute("select id, original_queue_id, substr(payload,1,400), error, created_at from mail_dlq order by id desc limit ?", (limit,))
        return cur.fetchall()

    def requeue_from_dlq(self, dlq_id: int, max_retries: int | None = None, priority: int | None = None) -> int:
        cur = self._conn.cursor()
        cur.execute("select payload from mail_dlq where id=?", (dlq_id,))
        row = cur.fetchone()
        if not row:
            raise ValueError("DLQ id 不存在")
        payload = json.loads(row[0])
        return self.enqueue(payload, max_retries=max_retries or 3, priority=priority or 0)

    # ---- 抑制列表 ----
    def add_suppression(self, email: str, reason: str = "manual") -> None:
        cur = self._conn.cursor()
        cur.execute(
            "insert or replace into mail_suppression(email, reason, created_at) values (?, ?, ?)",
            (email.lower(), reason, now_ts()),
        )
        self._conn.commit()

    def remove_suppression(self, email: str) -> None:
        cur = self._conn.cursor()
        cur.execute("delete from mail_suppression where email=?", (email.lower(),))
        self._conn.commit()

    def list_suppressions(self, limit: int = 100):
        cur = self._conn.cursor()
        cur.execute("select email, reason, created_at from mail_suppression order by created_at desc limit ?", (limit,))
        return cur.fetchall()

    def get_suppressed_set(self) -> set[str]:
        cur = self._conn.cursor()
        cur.execute("select email from mail_suppression")
        return {r[0] for r in cur.fetchall()}


