#!/usr/bin/env python3
"""
tasks_cli.py

Utility helpers for maintaining task catalogues (REGISTER_TASK / REGISTER_META)
used by the project Makefiles.
"""

from __future__ import annotations

import argparse
import glob
import re
from pathlib import Path
from typing import Dict, Iterable, List, Optional, Set


STATUS_ALLOWED = {"todo", "wip", "done", "blocked", "waiting", "optional"}


REGISTER_TASK_PATTERN = re.compile(
    r"""
    ^\$\(
    \s*call\s+
    REGISTER_TASK
    ,
    (?P<id>[^,]+)
    ,
    (?P<status>[^,]+)
    ,
    (?P<rest>.+)
    $
    """,
    re.VERBOSE,
)

REGISTER_META_PATTERN = re.compile(
    r"""
    ^\$\(
    \s*call\s+
    REGISTER_META
    ,
    (?P<id>[^,]+)
    ,
    (?P<owner>[^,]*)
    ,
    (?P<due>[^,]*)
    ,
    (?P<prio>[^,]*)
    ,
    (?P<tags>[^,]*)
    ,
    (?P<crit>[^,]*)
    ,
    (?P<evid>[^,]*)
    \)
    \s*$
    """,
    re.VERBOSE,
)

REGISTER_SUCCESS_PATTERN = re.compile(
    r"""
    ^\$\(
    \s*call\s+
    REGISTER_SUCCESS
    ,
    (?P<id>[^,]+)
    ,
    (?P<path>[^)]*)
    \)
    \s*$
    """,
    re.VERBOSE,
)


class TaskUpdateError(RuntimeError):
    pass


def discover_task_files(root: Path) -> List[Path]:
    if root.is_file():
        return [root]
    if not root.exists():
        raise TaskUpdateError(f"Path not found: {root}")
    return sorted(p for p in root.rglob("*.mk") if p.is_file())


def collect_tasks(files: Iterable[Path]) -> Dict[str, Dict[str, str]]:
    tasks: Dict[str, Dict[str, str]] = {}
    for path in files:
        raw_lines = path.read_text().splitlines(keepends=True)
        for idx, line in enumerate(raw_lines):
            match = REGISTER_TASK_PATTERN.match(line.strip())
            if not match:
                continue
            parts = match.group("rest").split(",", 3)
            task_id = match.group("id")
            tasks[task_id] = {
                "id": task_id,
                "status": match.group("status"),
                "class": parts[0] if parts else "",
                "stream": parts[1] if len(parts) > 1 else "",
                "title": parts[2] if len(parts) > 2 else "",
                "ref": parts[3] if len(parts) > 3 else "",
                "owner": "",
                "due": "",
                "prio": "",
                "tags": "",
                "crit": "",
                "evid": "",
                "success": "",
                "file": path,
                "line": idx,
            }

    for path in files:
        raw_lines = path.read_text().splitlines()
        for line in raw_lines:
            match = REGISTER_META_PATTERN.match(line.strip())
            if not match:
                continue
            task_id = match.group("id")
            if task_id not in tasks:
                continue
            tasks[task_id].update(
                {
                    "owner": match.group("owner"),
                    "due": match.group("due"),
                    "prio": match.group("prio"),
                    "tags": match.group("tags"),
                    "crit": match.group("crit"),
                    "evid": match.group("evid"),
                }
            )

    for path in files:
        raw_lines = path.read_text().splitlines()
        for line in raw_lines:
            match = REGISTER_SUCCESS_PATTERN.match(line.strip())
            if not match:
                continue
            task_id = match.group("id")
            if task_id not in tasks:
                continue
            tasks[task_id]["success"] = match.group("path").strip()

    return tasks


def split_tokens(raw: str) -> List[str]:
    return [token for token in raw.split() if token]


def expand_evidence_pattern(bases: Iterable[Path], pattern: str) -> List[Path]:
    candidate = Path(pattern)
    search_patterns: List[str]
    if candidate.is_absolute():
        search_patterns = [str(candidate)]
    else:
        search_patterns = [str((base / pattern).resolve()) for base in bases]

    matches: List[Path] = []
    for search_pattern in search_patterns:
        matches.extend(Path(match).resolve() for match in glob.glob(search_pattern, recursive=True))

    # Preserve order while removing duplicates
    unique: Dict[Path, None] = {}
    for match in matches:
        unique.setdefault(match, None)
    return list(unique.keys())


def update_status(
    files: Iterable[Path],
    task_id: str,
    status: Optional[str],
    owner: Optional[str],
    due: Optional[str],
    prio: Optional[str],
    tags: Optional[str],
    crit: Optional[str],
    evid: Optional[str],
) -> Path:
    target_file: Optional[Path] = None
    lines: List[str] = []
    for path in files:
        raw_lines = path.read_text().splitlines(keepends=True)
        for idx, line in enumerate(raw_lines):
            match = REGISTER_TASK_PATTERN.match(line.strip())
            if not match:
                continue
            if match.group("id") != task_id:
                continue
            target_file = path
            lines = raw_lines
            break
        if target_file is not None:
            break

    if target_file is None:
        raise TaskUpdateError(f"Task '{task_id}' not found under {files}")

    changed = False
    for idx, line in enumerate(lines):
        match = REGISTER_TASK_PATTERN.match(line.strip())
        if not match or match.group("id") != task_id:
            continue
        if status:
            if status not in STATUS_ALLOWED:
                raise TaskUpdateError(
                    f"Invalid status '{status}'. Allowed values: {sorted(STATUS_ALLOWED)}"
                )
            new_line = (
                f"$(call REGISTER_TASK,{task_id},{status},{match.group('rest')}\n"
            )
            if new_line != line:
                lines[idx] = new_line
                changed = True
        break

    meta_updates = {
        "owner": owner,
        "due": due,
        "prio": prio,
        "tags": tags,
        "crit": crit,
        "evid": evid,
    }
    meta_updates = {k: v for k, v in meta_updates.items() if v is not None}

    if meta_updates:
        meta_found = False
        for idx, line in enumerate(lines):
            match = REGISTER_META_PATTERN.match(line.strip())
            if not match or match.group("id") != task_id:
                continue
            meta_found = True
            updated = {
                "owner": owner if owner is not None else match.group("owner"),
                "due": due if due is not None else match.group("due"),
                "prio": prio if prio is not None else match.group("prio"),
                "tags": tags if tags is not None else match.group("tags"),
                "crit": crit if crit is not None else match.group("crit"),
                "evid": evid if evid is not None else match.group("evid"),
            }
            lines[idx] = (
                "$(call REGISTER_META,"
                f"{task_id},{updated['owner']},{updated['due']},"
                f"{updated['prio']},{updated['tags']},{updated['crit']},"
                f"{updated['evid']})\n"
            )
            changed = True
            break
        if not meta_found:
            insertion_line = (
                "$(call REGISTER_META,"
                f"{task_id},{owner or ''},{due or ''},"
                f"{prio or ''},{tags or ''},{crit or ''},{evid or ''})\n"
            )
            # append immediately after REGISTER_TASK
            for idx, line in enumerate(lines):
                match = REGISTER_TASK_PATTERN.match(line.strip())
                if not match or match.group("id") != task_id:
                    continue
                lines.insert(idx + 1, insertion_line)
                changed = True
                break

    if changed:
        target_file.write_text("".join(lines))

    return target_file


def cmd_set(args: argparse.Namespace) -> None:
    root = Path(args.root)
    files = discover_task_files(root)
    updated_file = update_status(
        files=files,
        task_id=args.task,
        status=args.status,
        owner=args.owner,
        due=args.due,
        prio=args.prio,
        tags=args.tags,
        crit=args.crit,
        evid=args.evid,
    )
    print(
        f"[tasks-cli] updated {args.task} "
        f"{'(status -> ' + args.status + ') ' if args.status else ''}"
        f"({updated_file})"
    )


def cmd_order(args: argparse.Namespace) -> None:
    root = Path(args.root)
    files = discover_task_files(root)
    tasks = collect_tasks(files)
    statuses = {s.strip() for s in args.statuses.split(",") if s.strip()}
    if not statuses:
        statuses = set(STATUS_ALLOWED)

    pending = [task for task in tasks.values() if task["status"] in statuses]

    prio_rank = {"P0": 0, "P1": 1, "P2": 2, "P3": 3, "P4": 4, "P5": 5}
    class_rank = {"essential": 0, "optional": 1}

    def due_rank(value: str) -> int:
        return int(value.replace("-", "")) if value else 99999999

    pending.sort(
        key=lambda task: (
            class_rank.get(task["class"], 1),
            prio_rank.get(task["prio"], 9),
            due_rank(task["due"]),
            task["id"],
        )
    )

    print(" ".join(task["id"] for task in pending))


def cmd_gate(args: argparse.Namespace) -> None:
    root = Path(args.root)
    repo = Path(args.repo).resolve()
    extra_repos = [
        Path(token).resolve()
        for token in split_tokens(args.extra_repos)
        if token.strip()
    ]
    search_roots: List[Path] = []
    for candidate in [repo, *extra_repos]:
        if candidate not in search_roots:
            search_roots.append(candidate)

    files = discover_task_files(root)
    tasks = collect_tasks(files)

    statuses: Set[str] = {s.strip() for s in args.statuses.split(",") if s.strip()}
    if not statuses:
        statuses = {"done"}

    required_tags = set(split_tokens(args.tags))
    permitted_streams = set(split_tokens(args.streams))
    explicit_tasks = set(split_tokens(args.tasks))
    all_ids = set(tasks.keys())

    failures: List[str] = []
    validated: List[tuple[str, int]] = []

    for task in tasks.values():
        if statuses and task["status"] not in statuses:
            continue
        if permitted_streams and task["stream"] not in permitted_streams:
            continue
        if explicit_tasks and task["id"] not in explicit_tasks:
            continue

        task_tags = set(split_tokens(task.get("tags", "")))
        if required_tags and not required_tags <= task_tags:
            continue

        evidence_raw = task.get("evid", "").strip()
        if not evidence_raw:
            failures.append(f"{task['id']}: missing EVID metadata")
            continue

        missing_patterns: List[str] = []
        artefact_count = 0
        for pattern in split_tokens(evidence_raw):
            matches = expand_evidence_pattern(search_roots, pattern)
            if not matches:
                missing_patterns.append(pattern)
            else:
                artefact_count += len(matches)

        if missing_patterns:
            failures.append(
                f"{task['id']}: missing evidence files for {', '.join(missing_patterns)}"
            )
        else:
            validated.append((task["id"], artefact_count))

    if explicit_tasks:
        missing_ids = sorted(explicit_tasks - all_ids)
        if missing_ids:
            failures.append(
                "Tasks not found in catalogue: " + ", ".join(missing_ids)
            )

    if not validated and not failures:
        print("[gate] no tasks matched the provided filters; nothing to verify")
        return

    if failures:
        for line in failures:
            print(f"[gate] ERROR: {line}")
        raise TaskUpdateError("evidence gate failed")

    print(f"[gate] OK ({len(validated)} tasks validated)")
    for task_id, artefacts in validated:
        print(f"[gate]   {task_id}: {artefacts} artefact(s)")


def cmd_autodone(args: argparse.Namespace) -> None:
    root = Path(args.root)
    repo = Path(args.repo).resolve()
    extra_roots = [
        Path(token).resolve()
        for token in split_tokens(args.extra_success_roots)
        if token.strip()
    ]
    success_default = Path(args.success_dir)

    files = discover_task_files(root)
    tasks = collect_tasks(files)

    statuses = {s.strip() for s in args.statuses.split(",") if s.strip()} or set(STATUS_ALLOWED)

    class_rank = {"essential": 0, "optional": 1}
    prio_rank = {f"P{i}": i for i in range(10)}

    def due_rank(value: str) -> int:
        return int(value.replace("-", "")) if value else 99999999

    success_roots: List[Path] = []
    for candidate in [repo, *extra_roots]:
        if candidate not in success_roots:
            success_roots.append(candidate)
    if not success_roots:
        success_roots.append(repo)

    def success_path(task: Dict[str, str]) -> Path:
        override = task.get("success", "").strip()
        path = Path(override) if override else success_default / f"{task['id']}.md"
        if path.is_absolute():
            return path
        for base in success_roots:
            candidate = (base / path).resolve()
            if candidate.exists():
                return candidate
        return (success_roots[0] / path).resolve()

    changed = 0

    while True:
        pending = [task for task in tasks.values() if task.get("status", "") in statuses]
        pending.sort(
            key=lambda task: (
                class_rank.get(task.get("class", "optional"), 1),
                prio_rank.get(task.get("prio", "P9"), 9),
                due_rank(task.get("due", "")),
                task["id"],
            )
        )

        next_task = None
        for task in pending:
            if task.get("status", "").strip() != "done":
                next_task = task
                break

        if not next_task:
            break

        indicator = success_path(next_task)
        if not indicator.exists():
            break

        update_status(
            files=files,
            task_id=next_task["id"],
            status="done",
            owner=None,
            due=None,
            prio=None,
            tags=None,
            crit=None,
            evid=None,
        )
        tasks[next_task["id"]]["status"] = "done"
        print(f"[autodone] {next_task['id']} -> done (indicator {indicator})")
        changed += 1

    if not changed:
        print("[autodone] no changes detected")


def build_parser() -> argparse.ArgumentParser:
    parser = argparse.ArgumentParser(description="Task catalogue maintenance helpers")
    subparsers = parser.add_subparsers(dest="command", required=True)

    set_parser = subparsers.add_parser("set", help="Update task status / metadata")
    set_parser.add_argument("--root", default=".", type=Path, help="Directory to scan")
    set_parser.add_argument("--task", required=True, help="Task identifier (e.g. FLEX_P51)")
    set_parser.add_argument(
        "--status",
        choices=sorted(STATUS_ALLOWED),
        help="New status value (todo, wip, done, blocked, waiting, optional)",
    )
    set_parser.add_argument("--owner", help="Owner shorthand")
    set_parser.add_argument("--due", help="Due date YYYY-MM-DD")
    set_parser.add_argument("--prio", help="Priority bucket (e.g. P1)")
    set_parser.add_argument("--tags", help="Space-separated tags")
    set_parser.add_argument("--crit", help="Acceptance criteria (short string)")
    set_parser.add_argument("--evid", help="Evidence path/glob")

    order_parser = subparsers.add_parser(
        "order", help="Return pending task IDs sorted by priority and due date"
    )
    order_parser.add_argument("--root", default=".", type=Path)
    order_parser.add_argument(
        "--statuses",
        default="todo,wip,waiting,blocked,optional",
        help="Comma-separated statuses to include",
    )

    gate_parser = subparsers.add_parser(
        "gate", help="Validate evidence artefacts for matching tasks"
    )
    gate_parser.add_argument("--root", default=".", type=Path, help="Catalogue directory")
    gate_parser.add_argument(
        "--repo",
        default=".",
        type=Path,
        help="Repository root used to resolve evidence paths",
    )
    gate_parser.add_argument(
        "--statuses",
        default="done",
        help="Comma-separated statuses to validate (default: done)",
    )
    gate_parser.add_argument(
        "--tags",
        default="",
        help="Space-separated tags that must be present on the task",
    )
    gate_parser.add_argument(
        "--streams",
        default="",
        help="Space-separated stream codes to include (e.g., SP5 SP6)",
    )
    gate_parser.add_argument(
        "--tasks",
        default="",
        help="Explicit task IDs to validate (space-separated)",
    )
    gate_parser.add_argument(
        "--extra-repos",
        default="",
        help="Additional repository roots (space-separated) searched for evidence",
    )

    autodone_parser = subparsers.add_parser(
        "autodone", help="Mark tasks done when success indicators exist"
    )
    autodone_parser.add_argument("--root", default=".", type=Path, help="Catalogue directory")
    autodone_parser.add_argument("--repo", default=".", type=Path, help="Repository root")
    autodone_parser.add_argument("--success-dir", default="logs/success", help="Directory containing success indicator logs")
    autodone_parser.add_argument(
        "--statuses",
        default="todo,wip,waiting,blocked,optional",
        help="Comma-separated statuses to consider when walking the queue",
    )
    autodone_parser.add_argument(
        "--extra-success-roots",
        default="",
        help="Additional roots (space-separated) searched for success indicators after --repo",
    )

    return parser


def main(argv: Optional[List[str]] = None) -> None:
    parser = build_parser()
    args = parser.parse_args(argv)
    try:
        if args.command == "set":
            cmd_set(args)
        elif args.command == "order":
            cmd_order(args)
        elif args.command == "gate":
            cmd_gate(args)
        elif args.command == "autodone":
            cmd_autodone(args)
        else:
            parser.error(f"Unknown command {args.command}")
    except TaskUpdateError as exc:
        parser.exit(1, f"[tasks-cli] error: {exc}\n")


if __name__ == "__main__":
    main()
