
"""Utility helpers for reading application log files for the WebUI."""

from __future__ import annotations

import asyncio
import logging
import os
import time
from datetime import datetime
from pathlib import Path
from typing import AsyncGenerator, Dict, List, Optional, Tuple, Union

from ..models import LogEntry, LogFileInfo, LogLevel


LOG_DIR_ENV = "ALMR_LOG_DIR"
DEFAULT_LOG_SUBDIR = "log"
USER_DATA_LOG_DIR = Path.home() / ".alist-mikananirss" / "log"
DEFAULT_MAX_FILES = 4
DEFAULT_MAX_ENTRIES = 1200
DEFAULT_PAGE_LIMIT = 500
DEFAULT_STREAM_SNAPSHOT_LIMIT = 200
logger = logging.getLogger(__name__)


def resolve_log_dir(candidate: Optional[Union[str, Path]] = None) -> Path:
    """Return an existing log directory, trying several safe defaults."""

    search_order: List[Path] = []
    if candidate:
        search_order.append(Path(candidate))

    env_value = os.environ.get(LOG_DIR_ENV)
    if env_value:
        search_order.append(Path(env_value))

    search_order.append(Path.cwd() / DEFAULT_LOG_SUBDIR)
    search_order.append(USER_DATA_LOG_DIR)

    resolved: Optional[Path] = None
    for path in search_order:
        try:
            path.mkdir(parents=True, exist_ok=True)
            if path.is_dir():
                resolved = path
                break
        except OSError:
            continue

    if not resolved:
        raise RuntimeError("Unable to resolve log directory; check permissions and environment configuration.")

    return resolved


class LogService:
    """Read and parse log files generated by the application."""

    def __init__(self, log_dir: Optional[Union[str, Path]] = None) -> None:
        self.log_dir = resolve_log_dir(log_dir)

    async def get_log_files(self) -> List[LogFileInfo]:
        files: List[LogFileInfo] = []
        for path in sorted(self.log_dir.glob("*.log"), key=lambda p: p.stat().st_mtime, reverse=True):
            if not path.is_file():
                continue
            stat = path.stat()
            files.append(
                LogFileInfo(
                    name=path.name,
                    path=str(path.resolve()),
                    size=stat.st_size,
                    modified_time=datetime.fromtimestamp(stat.st_mtime).isoformat(),
                    log_level=self._probe_primary_level(path),
                )
            )
        return files

    async def get_log_content(
        self,
        file: str,
        limit: int = 100,
        offset: int = 0,
        level: Optional[str] = None,
        search: Optional[str] = None,
    ) -> Dict[str, object]:
        resolved_limit = min(max(limit, 1), DEFAULT_PAGE_LIMIT)
        resolved_offset = max(offset, 0)
        started_at = time.perf_counter()

        entries = await asyncio.to_thread(self._load_entries, file)
        filtered = self._filter_entries(entries, level=level, search=search)

        slice_end = resolved_offset + resolved_limit
        page_entries = filtered[resolved_offset:slice_end]
        duration_ms = int((time.perf_counter() - started_at) * 1000)
        if duration_ms > 300:
            logger.info(
                "log_content_served",
                extra={
                    "action": "logs_query",
                    "file": file,
                    "duration_ms": duration_ms,
                    "limit": resolved_limit,
                    "offset": resolved_offset,
                    "filtered_total": len(filtered),
                },
            )
        return {
            "entries": [entry.model_dump() for entry in page_entries],
            "total": len(filtered),
            "has_more": slice_end < len(filtered),
        }

    async def get_recent_logs(self, limit: int = 10) -> List[Dict[str, object]]:
        resolved_limit = min(max(limit, 1), DEFAULT_PAGE_LIMIT)
        entries = await asyncio.to_thread(self._collect_all_entries, DEFAULT_MAX_FILES, DEFAULT_MAX_ENTRIES)
        recent = sorted(entries, key=self._entry_timestamp, reverse=True)[:resolved_limit]
        return [entry.model_dump() for entry in recent]

    async def search_logs(
        self,
        query: str,
        level: Optional[str] = None,
        limit: int = 100,
    ) -> Dict[str, object]:
        resolved_limit = min(max(limit, 1), DEFAULT_PAGE_LIMIT)
        started_at = time.perf_counter()
        entries = await asyncio.to_thread(self._collect_all_entries, DEFAULT_MAX_FILES, DEFAULT_MAX_ENTRIES)
        filtered = self._filter_entries(entries, level=level, search=query)
        filtered = sorted(filtered, key=self._entry_timestamp, reverse=True)
        limited = filtered[:resolved_limit]
        duration_ms = int((time.perf_counter() - started_at) * 1000)
        if duration_ms > 300:
            logger.info(
                "logs_search_completed",
                extra={
                    "action": "logs_search",
                    "duration_ms": duration_ms,
                    "query": query,
                    "level": level,
                    "matched": len(filtered),
                    "returned": len(limited),
                },
            )
        return {
            "entries": [entry.model_dump() for entry in limited],
            "total": len(filtered),
            "has_more": len(filtered) > len(limited),
        }

    async def get_tail_entries(
        self,
        file: str,
        limit: int = DEFAULT_STREAM_SNAPSHOT_LIMIT,
        level: Optional[str] = None,
        search: Optional[str] = None,
    ) -> Dict[str, object]:
        """Return the most recent log entries for bootstrapping live streams."""
        resolved_limit = min(max(limit, 1), max(DEFAULT_PAGE_LIMIT, DEFAULT_MAX_ENTRIES))
        entries = await asyncio.to_thread(self._load_entries, file)
        filtered = self._filter_entries(entries, level=level, search=search)
        tail = filtered[-resolved_limit:]
        return {
            "entries": [entry.model_dump() for entry in tail],
            "total": len(filtered),
            "has_more": len(filtered) > len(tail),
        }

    async def get_log_file_path(self, file_name: str) -> Optional[Path]:
        path = self._safe_join_log_dir(file_name)
        return path if path.exists() and path.is_file() else None

    def _get_log_file_path(self, file_name: str) -> Path:
        return self._safe_join_log_dir(file_name)

    def _safe_join_log_dir(self, file_name: str) -> Path:
        safe_name = Path(file_name).name
        path = (self.log_dir / safe_name).resolve()
        try:
            log_root = self.log_dir.resolve()
        except FileNotFoundError:  # pragma: no cover - unexpected deletion
            log_root = self.log_dir
        if log_root not in path.parents and path != log_root:
            raise FileNotFoundError(f"Log file '{file_name}' not found in log directory.")
        return path

    async def stream_log_entries(
        self,
        file: str,
        poll_interval: float = 1.0,
        start_at_end: bool = True,
        level: Optional[str] = None,
        search: Optional[str] = None,
    ) -> AsyncGenerator[LogEntry, None]:
        """Yield log entries as they are appended to the requested log file."""
        path = self._get_log_file_path(file)
        if not path.exists():
            raise FileNotFoundError(file)

        position = path.stat().st_size if start_at_end else 0

        try:
            while True:
                if not path.exists():
                    await asyncio.sleep(poll_interval)
                    path = self._get_log_file_path(file)
                    position = 0
                    continue

                size = path.stat().st_size
                if size < position:
                    position = 0

                lines, position = await asyncio.to_thread(
                    self._read_lines_from_position,
                    path,
                    position,
                )

                for line in lines:
                    parsed = self._parse_log_line(line)
                    if parsed and self.matches_filters(parsed, level=level, search=search):
                        yield parsed

                await asyncio.sleep(poll_interval)
        except asyncio.CancelledError:  # pragma: no cover - cooperative cancellation
            raise
        except GeneratorExit:  # pragma: no cover - generator closed by caller
            return

    def _read_lines_from_position(self, path: Path, position: int) -> Tuple[List[str], int]:
        """Read new lines from the file starting at the provided byte offset."""
        lines: List[str] = []
        new_position = position

        with path.open("r", encoding="utf-8", errors="ignore") as handle:
            handle.seek(position)
            for raw_line in handle:
                lines.append(raw_line.rstrip("\n"))
            new_position = handle.tell()

        return lines, new_position

    def matches_filters(
        self,
        entry: LogEntry,
        level: Optional[str] = None,
        search: Optional[str] = None,
    ) -> bool:
        """Public helper to test whether an entry satisfies level/search filters."""
        return self._matches_filters(entry, level, search, normalised=False)

    def _matches_filters(
        self,
        entry: LogEntry,
        level: Optional[str],
        search: Optional[str],
        normalised: bool = False,
    ) -> bool:
        level_upper = level if normalised else level.upper() if level else None
        search_lower = search if normalised else search.lower() if search else None
        if level_upper and (entry.level or "").upper() != level_upper:
            return False
        if search_lower:
            haystacks: List[str] = []
            if entry.message:
                haystacks.append(entry.message.lower())
            if entry.level:
                haystacks.append(entry.level.lower())
            if entry.module:
                haystacks.append(entry.module.lower())
            if search_lower not in " ".join(haystacks):
                return False
        return True

    def _filter_entries(
        self,
        entries: List[LogEntry],
        level: Optional[str] = None,
        search: Optional[str] = None,
    ) -> List[LogEntry]:
        level_upper = level.upper() if level else None
        search_lower = search.lower() if search else None
        filtered: List[LogEntry] = []
        for entry in entries:
            if self._matches_filters(entry, level_upper, search_lower, normalised=True):
                filtered.append(entry)
        return filtered

    def _collect_all_entries(self, max_files: int = DEFAULT_MAX_FILES, max_entries: Optional[int] = DEFAULT_MAX_ENTRIES) -> List[LogEntry]:
        entries: List[LogEntry] = []
        log_files = sorted(
            self.log_dir.glob("*.log"),
            key=lambda p: p.stat().st_mtime,
            reverse=True,
        )[:max_files]
        for path in log_files:
            if not path.is_file():
                continue
            entries.extend(self._load_entries_from_path(path))
            if max_entries and len(entries) >= max_entries:
                entries = entries[:max_entries]
                break
        return entries

    def _load_entries(self, file_name: str) -> List[LogEntry]:
        path = self._safe_join_log_dir(file_name)
        if not path.exists() or not path.is_file():
            raise FileNotFoundError(f"Log file '{file_name}' not found in {self.log_dir}")
        return self._load_entries_from_path(path)

    def _load_entries_from_path(self, path: Path) -> List[LogEntry]:
        entries: List[LogEntry] = []
        with path.open("r", encoding="utf-8", errors="ignore") as handle:
            for line in handle:
                line = line.rstrip()
                if not line:
                    continue
                parsed = self._parse_log_line(line)
                if parsed:
                    entries.append(parsed)
        return entries

    def _parse_log_line(self, line: str) -> Optional[LogEntry]:
        parts = [part.strip() for part in line.split("|", maxsplit=2)]
        if len(parts) == 3:
            timestamp_str, level_str, message = parts
            timestamp_iso = self._normalise_timestamp(timestamp_str)
            level = level_str.upper() if level_str else LogLevel.INFO.value
            return LogEntry(
                timestamp=timestamp_iso,
                level=level,
                message=message,
                module=None,
                line_number=None,
                thread_id=None,
            )

        return None

    def _normalise_timestamp(self, timestamp_str: str) -> str:
        try:
            parsed_dt = datetime.fromisoformat(timestamp_str)
        except ValueError:
            try:
                parsed_dt = datetime.strptime(timestamp_str, "%Y-%m-%d %H:%M:%S")
            except ValueError:
                parsed_dt = datetime.now()
        return parsed_dt.isoformat(timespec="seconds")

    def _entry_timestamp(self, entry: LogEntry) -> datetime:
        try:
            return datetime.fromisoformat(entry.timestamp)
        except ValueError:  # pragma: no cover - defensive
            return datetime.now()

    def _probe_primary_level(self, path: Path) -> Optional[str]:
        try:
            with path.open("r", encoding="utf-8", errors="ignore") as handle:
                for raw in handle:
                    parsed = self._parse_log_line(raw.rstrip())
                    if parsed:
                        return parsed.level
        except OSError:  # pragma: no cover - best effort
            return None
        return None


_log_service: Optional[LogService] = None


def get_log_service() -> LogService:
    global _log_service
    if _log_service is None:
        _log_service = LogService()
    return _log_service


log_service = get_log_service()
