#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
日志查看器
提供实时日志查看、过滤、搜索等功能
"""

import os
import re
import time
import threading
import logging
from datetime import datetime
from typing import List, Dict, Any, Optional, Callable
from collections import deque
import queue


class LogLevel:
    """日志级别定义"""
    DEBUG = "DEBUG"
    INFO = "INFO"
    WARNING = "WARNING"
    ERROR = "ERROR"
    CRITICAL = "CRITICAL"

    ALL_LEVELS = [DEBUG, INFO, WARNING, ERROR, CRITICAL]


class LogEntry:
    """日志条目"""

    def __init__(self, timestamp: str, level: str, logger: str, message: str, raw_line: str = ""):
        self.timestamp = timestamp
        self.level = level
        self.logger = logger
        self.message = message
        self.raw_line = raw_line
        self.datetime = self._parse_timestamp(timestamp)

    def _parse_timestamp(self, timestamp_str: str) -> Optional[datetime]:
        """解析时间戳"""
        try:
            # 尝试解析常见的时间戳格式
            formats = [
                "%Y-%m-%d %H:%M:%S,%f",  # 2025-09-28 10:30:15,123
                "%Y-%m-%d %H:%M:%S.%f",  # 2025-09-28 10:30:15.123
                "%Y-%m-%d %H:%M:%S",     # 2025-09-28 10:30:15
            ]

            for fmt in formats:
                try:
                    return datetime.strptime(timestamp_str, fmt)
                except ValueError:
                    continue

            return None
        except:
            return None

    def to_dict(self) -> Dict[str, Any]:
        """转换为字典"""
        return {
            "timestamp": self.timestamp,
            "level": self.level,
            "logger": self.logger,
            "message": self.message,
            "datetime": self.datetime.isoformat() if self.datetime else None
        }

    def __str__(self) -> str:
        return f"[{self.timestamp}] {self.level} - {self.logger} - {self.message}"


class LogParser:
    """日志解析器"""

    def __init__(self):
        # 常见的日志格式正则表达式
        self.log_patterns = [
            # 格式: 2025-09-28 10:30:15,123 - logger_name - INFO - message
            re.compile(r'(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d+) - ([^-]+) - (\w+) - (.+)'),
            # 格式: 2025-09-28 10:30:15.123 - logger_name - INFO - message
            re.compile(r'(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d+) - ([^-]+) - (\w+) - (.+)'),
            # 格式: 2025-09-28 10:30:15 - logger_name - INFO - message
            re.compile(r'(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}) - ([^-]+) - (\w+) - (.+)'),
            # 格式: [2025-09-28 10:30:15] INFO - logger_name - message
            re.compile(r'\[(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})\] (\w+) - ([^-]+) - (.+)'),
        ]

    def parse_line(self, line: str) -> Optional[LogEntry]:
        """解析日志行"""
        line = line.strip()
        if not line:
            return None

        for pattern in self.log_patterns:
            match = pattern.match(line)
            if match:
                groups = match.groups()
                if len(groups) >= 4:
                    timestamp, logger, level, message = groups[0], groups[1].strip(), groups[2], groups[3]
                    return LogEntry(timestamp, level, logger, message, line)

        # 如果无法解析，创建一个简单的日志条目
        return LogEntry(
            timestamp=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
            level="UNKNOWN",
            logger="unknown",
            message=line,
            raw_line=line
        )


class LogViewer:
    """日志查看器"""

    def __init__(self, max_entries: int = 10000):
        self.max_entries = max_entries
        self.parser = LogParser()

        # 日志条目存储
        self.log_entries = deque(maxlen=max_entries)

        # 过滤器设置
        self.level_filter = set(LogLevel.ALL_LEVELS)  # 默认显示所有级别
        self.logger_filter = set()  # 空表示显示所有logger
        self.text_filter = ""  # 文本过滤
        self.time_filter_start = None
        self.time_filter_end = None

        # 实时监控
        self.monitoring = False
        self.monitor_thread = None
        self.log_files = []

        # 回调函数
        self.new_log_callback = None

        # 日志队列（用于线程安全）
        self.log_queue = queue.Queue()
        self.process_thread = None

        self.logger = logging.getLogger(__name__)

    def set_new_log_callback(self, callback: Callable[[LogEntry], None]):
        """设置新日志回调函数"""
        self.new_log_callback = callback

    def add_log_file(self, file_path: str):
        """添加要监控的日志文件"""
        if os.path.exists(file_path) and file_path not in self.log_files:
            self.log_files.append(file_path)
            self.logger.info(f"添加日志文件: {file_path}")

    def remove_log_file(self, file_path: str):
        """移除日志文件"""
        if file_path in self.log_files:
            self.log_files.remove(file_path)
            self.logger.info(f"移除日志文件: {file_path}")

    def load_log_file(self, file_path: str, tail_lines: int = 1000) -> int:
        """加载日志文件"""
        if not os.path.exists(file_path):
            self.logger.error(f"日志文件不存在: {file_path}")
            return 0

        loaded_count = 0
        try:
            with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
                lines = f.readlines()

                # 只加载最后N行
                if tail_lines > 0:
                    lines = lines[-tail_lines:]

                for line in lines:
                    entry = self.parser.parse_line(line)
                    if entry:
                        self.log_entries.append(entry)
                        loaded_count += 1

            self.logger.info(f"加载日志文件成功: {file_path}, 加载 {loaded_count} 条记录")

        except Exception as e:
            self.logger.error(f"加载日志文件失败: {file_path}, {e}")

        return loaded_count

    def add_log_entry(self, entry: LogEntry):
        """添加日志条目"""
        if entry:
            self.log_entries.append(entry)
            if self.new_log_callback:
                self.new_log_callback(entry)

    def add_log_line(self, line: str):
        """添加日志行"""
        entry = self.parser.parse_line(line)
        if entry:
            self.add_log_entry(entry)

    def get_filtered_logs(self, limit: int = None) -> List[LogEntry]:
        """获取过滤后的日志"""
        filtered_logs = []

        for entry in self.log_entries:
            # 级别过滤
            if entry.level not in self.level_filter:
                continue

            # Logger过滤
            if self.logger_filter and entry.logger not in self.logger_filter:
                continue

            # 文本过滤
            if self.text_filter:
                if self.text_filter.lower() not in entry.message.lower():
                    continue

            # 时间过滤
            if self.time_filter_start and entry.datetime:
                if entry.datetime < self.time_filter_start:
                    continue

            if self.time_filter_end and entry.datetime:
                if entry.datetime > self.time_filter_end:
                    continue

            filtered_logs.append(entry)

        # 限制返回数量
        if limit:
            filtered_logs = filtered_logs[-limit:]

        return filtered_logs

    def set_level_filter(self, levels: List[str]):
        """设置日志级别过滤器"""
        self.level_filter = set(levels)

    def set_logger_filter(self, loggers: List[str]):
        """设置Logger过滤器"""
        self.logger_filter = set(loggers)

    def set_text_filter(self, text: str):
        """设置文本过滤器"""
        self.text_filter = text

    def set_time_filter(self, start_time: Optional[datetime], end_time: Optional[datetime]):
        """设置时间过滤器"""
        self.time_filter_start = start_time
        self.time_filter_end = end_time

    def clear_filters(self):
        """清除所有过滤器"""
        self.level_filter = set(LogLevel.ALL_LEVELS)
        self.logger_filter = set()
        self.text_filter = ""
        self.time_filter_start = None
        self.time_filter_end = None

    def clear_logs(self):
        """清空日志"""
        self.log_entries.clear()

    def start_monitoring(self):
        """开始实时监控"""
        if not self.monitoring:
            self.monitoring = True

            # 启动日志处理线程
            self.process_thread = threading.Thread(target=self._process_log_queue, daemon=True)
            self.process_thread.start()

            # 启动文件监控线程
            self.monitor_thread = threading.Thread(target=self._monitor_log_files, daemon=True)
            self.monitor_thread.start()

            self.logger.info("开始日志监控")

    def stop_monitoring(self):
        """停止实时监控"""
        if self.monitoring:
            self.monitoring = False

            if self.monitor_thread:
                self.monitor_thread.join(timeout=5)

            if self.process_thread:
                self.process_thread.join(timeout=5)

            self.logger.info("停止日志监控")

    def _monitor_log_files(self):
        """监控日志文件变化"""
        file_positions = {}

        # 初始化文件位置
        for file_path in self.log_files:
            if os.path.exists(file_path):
                file_positions[file_path] = os.path.getsize(file_path)

        while self.monitoring:
            try:
                for file_path in self.log_files[:]:  # 复制列表避免修改问题
                    if not os.path.exists(file_path):
                        continue

                    current_size = os.path.getsize(file_path)
                    last_position = file_positions.get(file_path, 0)

                    if current_size > last_position:
                        # 文件有新内容
                        with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
                            f.seek(last_position)
                            new_lines = f.readlines()

                            for line in new_lines:
                                self.log_queue.put(line.strip())

                        file_positions[file_path] = current_size

                    elif current_size < last_position:
                        # 文件被截断或重新创建
                        file_positions[file_path] = 0

                time.sleep(1)  # 每秒检查一次

            except Exception as e:
                self.logger.error(f"监控日志文件异常: {e}")
                time.sleep(5)

    def _process_log_queue(self):
        """处理日志队列"""
        while self.monitoring:
            try:
                # 批量处理日志
                lines = []
                timeout = 0.1

                # 获取第一条日志（阻塞等待）
                try:
                    line = self.log_queue.get(timeout=timeout)
                    lines.append(line)
                except queue.Empty:
                    continue

                # 获取更多日志（非阻塞）
                while len(lines) < 100:  # 批量处理最多100条
                    try:
                        line = self.log_queue.get_nowait()
                        lines.append(line)
                    except queue.Empty:
                        break

                # 处理日志行
                for line in lines:
                    if line:
                        self.add_log_line(line)

            except Exception as e:
                self.logger.error(f"处理日志队列异常: {e}")
                time.sleep(1)

    def search_logs(self, pattern: str, use_regex: bool = False) -> List[LogEntry]:
        """搜索日志"""
        results = []

        try:
            if use_regex:
                regex = re.compile(pattern, re.IGNORECASE)
                for entry in self.log_entries:
                    if regex.search(entry.message) or regex.search(entry.logger):
                        results.append(entry)
            else:
                pattern_lower = pattern.lower()
                for entry in self.log_entries:
                    if (pattern_lower in entry.message.lower() or
                        pattern_lower in entry.logger.lower()):
                        results.append(entry)

        except Exception as e:
            self.logger.error(f"搜索日志失败: {e}")

        return results

    def export_logs(self, file_path: str, filtered: bool = True) -> bool:
        """导出日志到文件"""
        try:
            logs = self.get_filtered_logs() if filtered else list(self.log_entries)

            with open(file_path, 'w', encoding='utf-8') as f:
                for entry in logs:
                    f.write(entry.raw_line + '\n')

            self.logger.info(f"导出日志成功: {file_path}, 共 {len(logs)} 条")
            return True

        except Exception as e:
            self.logger.error(f"导出日志失败: {e}")
            return False

    def get_statistics(self) -> Dict[str, Any]:
        """获取日志统计信息"""
        stats = {
            "total_entries": len(self.log_entries),
            "level_counts": {},
            "logger_counts": {},
            "time_range": {"start": None, "end": None}
        }

        # 统计级别分布
        for level in LogLevel.ALL_LEVELS:
            stats["level_counts"][level] = 0

        # 统计logger分布
        for entry in self.log_entries:
            # 级别统计
            if entry.level in stats["level_counts"]:
                stats["level_counts"][entry.level] += 1
            else:
                stats["level_counts"][entry.level] = 1

            # Logger统计
            if entry.logger in stats["logger_counts"]:
                stats["logger_counts"][entry.logger] += 1
            else:
                stats["logger_counts"][entry.logger] = 1

            # 时间范围
            if entry.datetime:
                if stats["time_range"]["start"] is None or entry.datetime < stats["time_range"]["start"]:
                    stats["time_range"]["start"] = entry.datetime
                if stats["time_range"]["end"] is None or entry.datetime > stats["time_range"]["end"]:
                    stats["time_range"]["end"] = stats["time_range"]["end"]

        return stats


if __name__ == "__main__":
    # 测试日志查看器
    logging.basicConfig(level=logging.INFO)

    viewer = LogViewer()

    # 添加一些测试日志
    test_logs = [
        "2025-09-28 10:30:15,123 - test_logger - INFO - 测试日志消息1",
        "2025-09-28 10:30:16,124 - test_logger - WARNING - 测试警告消息",
        "2025-09-28 10:30:17,125 - another_logger - ERROR - 测试错误消息",
        "2025-09-28 10:30:18,126 - test_logger - DEBUG - 调试消息",
    ]

    for log_line in test_logs:
        viewer.add_log_line(log_line)

    print("所有日志:")
    for entry in viewer.get_filtered_logs():
        print(entry)

    print("\n只显示ERROR级别:")
    viewer.set_level_filter([LogLevel.ERROR])
    for entry in viewer.get_filtered_logs():
        print(entry)

    print("\n统计信息:")
    print(viewer.get_statistics())