import logging
from typing import Optional
import sys
from pathlib import Path
import threading
import time
import atexit
from datetime import datetime
import io
import zipfile

_logger: Optional[logging.Logger] = None


# 线程安全的按大小轮转文件写入器（文件名按秒级时间戳）
class RotatingTimestampFileWriter:
    def __init__(self, logs_dir: Path, max_bytes: int = 5 * 1024 * 1024, buffering: int = 64 * 1024):
        self.logs_dir = Path(logs_dir)
        self.max_bytes = int(max_bytes)
        self.buffering = int(buffering)
        self._lock = threading.RLock()
        self._file = None
        self._file_path = None
        self._bytes_written = 0
        self._encoding = "utf-8"
        self.logs_dir.mkdir(parents=True, exist_ok=True)
        self._open_new_file()

    def _timestamp_name(self) -> str:
        return datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + ".log"

    def _open_new_file(self) -> None:
        with self._lock:
            try:
                if self._file:
                    try:
                        self._file.flush()
                        self._file.close()
                    except Exception:
                        pass
                self._file_path = self.logs_dir / self._timestamp_name()
                # 二进制追加写入，带缓冲
                self._file = open(self._file_path, "ab", buffering=self.buffering)
                self._bytes_written = 0
            except Exception:
                # 如无法创建文件，降级为仅写控制台（保持程序可运行）
                self._file = None
                self._file_path = None
                self._bytes_written = 0

    @property
    def current_path(self) -> Path | None:
        return self._file_path

    def write(self, data: str | bytes) -> None:
        if not data:
            return
        encoded: bytes
        if isinstance(data, bytes):
            encoded = data
        else:
            encoded = data.encode(self._encoding, errors="replace")
        with self._lock:
            try:
                # 轮转检查
                if self._file and (self._bytes_written + len(encoded) > self.max_bytes):
                    self._open_new_file()
                if self._file:
                    self._file.write(encoded)
                    self._bytes_written += len(encoded)
            except Exception:
                # 写入失败不抛出，避免影响主流程
                pass

    def flush(self) -> None:
        with self._lock:
            try:
                if self._file:
                    self._file.flush()
            except Exception:
                pass

    def close(self) -> None:
        with self._lock:
            try:
                if self._file:
                    self._file.flush()
                    self._file.close()
            except Exception:
                pass

# Tee到原始终端与文件（保持原始格式）
class TeeStream(io.TextIOBase):
    def __init__(self, original_stream, file_writer: RotatingTimestampFileWriter):
        self._orig = original_stream
        self._writer = file_writer
        self._lock = threading.RLock()

    def write(self, s):
        # 保持与原始流一致的行为
        with self._lock:
            try:
                self._orig.write(s)
            except Exception:
                pass
            try:
                self._writer.write(s)
            except Exception:
                pass
        return len(s) if isinstance(s, (str, bytes)) else 0

    def flush(self):
        with self._lock:
            try:
                self._orig.flush()
            except Exception:
                pass
            try:
                self._writer.flush()
            except Exception:
                pass

    def isatty(self):
        try:
            return self._orig.isatty()
        except Exception:
            return False

# 日志捕获初始化（替换 stdout/stderr）
_teestream_installed = False
_teestream_stdout = None
_teestream_stderr = None
_rot_writer: RotatingTimestampFileWriter | None = None
_flush_thread = None
_stop_flush = threading.Event()


def init_console_logger(level: int = logging.INFO) -> logging.Logger:
    """Initialize a singleton console logger with a standardized format.

    Format: "YYYY-MM-DD HH:MM:SS | LEVEL | <op_type> | <detail>"
    We embed op_type and detail into the log message to keep LogRecord simple.
    """
    global _logger
    if _logger is not None:
        return _logger

    logger = logging.getLogger("voice_pet")
    logger.setLevel(level)
    logger.propagate = False

    # Avoid duplicate handlers if called multiple times
    if not any(isinstance(h, logging.StreamHandler) for h in logger.handlers):
        handler = logging.StreamHandler()
        formatter = logging.Formatter(
            fmt="%(asctime)s | %(levelname)s | %(message)s",
            datefmt="%Y-%m-%d %H:%M:%S",
        )
        handler.setFormatter(formatter)
        logger.addHandler(handler)

    _logger = logger
    return logger


def init_terminal_log_capture(max_bytes: int = 5 * 1024 * 1024, flush_interval_sec: float = 1.0, logs_dir: Path | None = None) -> Path | None:
    """捕获终端标准输出与错误到 logs 目录，按大小轮转。

    - 文件名：YYYY-MM-DD_HH-MM-SS.log
    - 线程安全写入 + 缓冲
    - 定时刷新缓冲区
    """
    global _teestream_installed, _teestream_stdout, _teestream_stderr, _rot_writer, _flush_thread
    if _teestream_installed:
        return _rot_writer.current_path if _rot_writer else None

    try:
        root_dir = Path(__file__).resolve().parents[1]
        logs_dir = logs_dir or (root_dir / "logs")
        _rot_writer = RotatingTimestampFileWriter(logs_dir=logs_dir, max_bytes=max_bytes, buffering=64 * 1024)
        _teestream_stdout = TeeStream(sys.stdout, _rot_writer)
        _teestream_stderr = TeeStream(sys.stderr, _rot_writer)
        sys.stdout = _teestream_stdout
        sys.stderr = _teestream_stderr
        _teestream_installed = True

        # 启动定时刷新线程
        _stop_flush.clear()
        def _flusher():
            while not _stop_flush.is_set():
                try:
                    if _rot_writer:
                        _rot_writer.flush()
                except Exception:
                    pass
                time.sleep(max(0.2, float(flush_interval_sec)))
        _flush_thread = threading.Thread(target=_flusher, name="log-flush-thread", daemon=True)
        _flush_thread.start()

        # 进程退出时清理
        def _cleanup():
            try:
                _stop_flush.set()
            except Exception:
                pass
            try:
                if _rot_writer:
                    _rot_writer.flush()
                    _rot_writer.close()
            except Exception:
                pass
        atexit.register(_cleanup)
        return _rot_writer.current_path if _rot_writer else None
    except Exception:
        # 如果初始化失败，保持原始 stdout/stderr
        return None

# 结构化事件日志（保持原样）
def log_event(op_type: str, detail: str, level: int = logging.INFO) -> None:
    """Log an operation with type and detail in standardized format.

    Example output:
    2025-10-20 09:30:01 | INFO | ASR_START | model=base, audio=data/audio/user_input.wav
    """
    logger = init_console_logger()
    logger.log(level, f"{op_type} | {detail}")


def get_logger() -> logging.Logger:
    return init_console_logger()

# 资源占用与当前日志状态
_def_stats = {
    "bytes_written": 0,
    "current_file": None,
}


def get_log_stats() -> dict:
    try:
        cur = _rot_writer.current_path if _rot_writer else None
        return {
            "current_file": str(cur) if cur else None,
            "bytes_written": getattr(_rot_writer, "_bytes_written", 0) if _rot_writer else 0,
        }
    except Exception:
        return dict(_def_stats)

# 可扩展：压缩历史日志（不包含当前活跃文件）
def compress_logs(archive_name: str = "archive.zip") -> Path | None:
    try:
        if not _rot_writer or not _rot_writer.current_path:
            return None
        logs_dir = _rot_writer.current_path.parent
        archive_path = logs_dir / archive_name
        current = _rot_writer.current_path.name
        with zipfile.ZipFile(archive_path, "w", compression=zipfile.ZIP_DEFLATED) as zf:
            for p in logs_dir.glob("*.log"):
                if p.name != current:
                    zf.write(p, arcname=p.name)
        return archive_path
    except Exception:
        return None