#!/usr/bin/env python3
import argparse
import configparser
import logging
import logging.handlers
import signal
import sys
import time
import json
import subprocess
import threading
from pathlib import Path
from typing import Optional, Dict, Any


class PacketCaptureService:
    #######################################
    # 初始化数据包捕获服务
    #######################################
    def __init__(self, config_path: str = "capture.conf"):
        # 先设置基础日志（在加载完整配置前）
        self._setup_basic_logging()

        # 然后加载配置
        self.config = self.load_config(config_path)

        # 最后设置完整的日志系统
        self.setup_logging()

        self.process: Optional[subprocess.Popen] = None
        self.running = False
        self.stats = {
            "packets_captured": 0,
            "bytes_sent": 0,
            "start_time": None,
            "last_restart": None,
        }

    #######################################
    # 设置基础日志（在加载配置前使用）
    #######################################
    def _setup_basic_logging(self):
        self.logger = logging.getLogger("PacketCapture")
        self.logger.setLevel(logging.INFO)

        # 临时控制台handler
        console_handler = logging.StreamHandler()
        formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
        console_handler.setFormatter(formatter)

        self.logger.addHandler(console_handler)

    #######################################
    # 加载配置文件
    #######################################
    def load_config(self, config_path: str) -> Dict[str, Any]:
        config = configparser.ConfigParser()

        # 默认配置
        defaults = {
            "interface": "any",
            "kafka_broker": "localhost:9092",
            "kafka_topic": "mysql-packets",
            "capture_filter": "port 3306",  # 捕获过滤器
            "display_filter": "mysql",  # 显示过滤器
            "log_level": "INFO",
            "log_file": "capture.log",
            "max_retries": "3",
            "retry_delay": "5",
            "buffer_size": "10000",
            "tshark_path": "tshark",
            "kcat_path": "kcat",
        }

        if Path(config_path).exists():
            config.read(config_path)
            self.logger.info(f"加载配置文件: {config_path}")
        else:
            self.logger.warning(f"配置文件 {config_path} 不存在，使用默认配置")

        # 获取配置值
        cfg = {}
        for key, default in defaults.items():
            try:
                # 从配置文件的capture段读取，如果不存在则使用默认值
                cfg[key] = config.get("capture", key, fallback=default)
            except Exception as e:
                self.logger.warning(
                    f"读取配置 {key} 时出错: {e}，使用默认值: {default}"
                )
                cfg[key] = default

        # 类型转换
        try:
            cfg["max_retries"] = int(cfg["max_retries"])
            cfg["retry_delay"] = int(cfg["retry_delay"])
            cfg["buffer_size"] = int(cfg["buffer_size"])
        except ValueError as e:
            self.logger.warning(f"配置类型转换错误: {e}，使用默认值")
            cfg["max_retries"] = 3
            cfg["retry_delay"] = 5
            cfg["buffer_size"] = 10000

        return cfg

    #######################################
    # 设置完整的日志系统
    #######################################
    def setup_logging(self):
        # 移除之前的基础handler
        for handler in self.logger.handlers[:]:
            self.logger.removeHandler(handler)

        # 设置日志级别
        log_level = getattr(logging, self.config["log_level"].upper(), logging.INFO)
        self.logger.setLevel(log_level)

        # 文件handler
        try:
            file_handler = logging.handlers.RotatingFileHandler(
                self.config["log_file"],
                maxBytes=10 * 1024 * 1024,  # 10MB
                backupCount=5,
            )
            file_formatter = logging.Formatter(
                "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
            )
            file_handler.setFormatter(file_formatter)
            self.logger.addHandler(file_handler)
        except Exception as e:
            self.logger.warning(f"无法创建文件日志: {e}")

        # 控制台handler
        console_handler = logging.StreamHandler()
        console_formatter = logging.Formatter(
            "%(asctime)s - %(levelname)s - %(message)s"
        )
        console_handler.setFormatter(console_formatter)
        self.logger.addHandler(console_handler)

    #######################################
    # 验证依赖工具是否存在
    #######################################
    def validate_dependencies(self):
        tools = [
            (self.config["tshark_path"], "tshark"),
            (self.config["kcat_path"], "kcat"),
        ]

        for tool_path, tool_name in tools:
            try:
                result = subprocess.run(
                    (
                        [tool_path, "--version"]
                        if tool_name == "tshark"
                        else [tool_path, "-h"]
                    ),
                    capture_output=True,
                    text=True,
                    timeout=5,
                )
                if result.returncode == 0:
                    self.logger.info(f"{tool_name} 验证成功")
                else:
                    self.logger.error(f"{tool_name} 返回错误: {result.stderr}")
                    return False
            except subprocess.TimeoutExpired:
                self.logger.error(f"{tool_name} 验证超时")
                return False
            except FileNotFoundError:
                self.logger.error(f"{tool_name} 未找到，路径: {tool_path}")
                return False
            except Exception as e:
                self.logger.error(f"验证 {tool_name} 时发生错误: {e}")
                return False

        return True

    #######################################
    # 检查 Kafka 连接
    #######################################
    def check_kafka_connection(self):
        try:
            self.logger.info(f"检查 Kafka 连接: {self.config['kafka_broker']}")

            # 使用 kcat 测试连接
            test_cmd = [
                self.config["kcat_path"],
                "-b",
                self.config["kafka_broker"],
                "-L",  # 列出 topic（需要 broker 连接）
                "-t",
                "test-connection-topic",  # 任意 topic
                "-X",
                "socket.timeout.ms=5000",  # 设置超时
                "-X",
                "session.timeout.ms=5000",
            ]

            result = subprocess.run(
                test_cmd,
                capture_output=True,
                text=True,
                timeout=10,
            )

            if result.returncode == 0:
                self.logger.info("Kafka 连接成功")
                return True
            else:
                self.logger.error(f"Kafka 连接失败: {result.stderr}")
                return False

        except subprocess.TimeoutExpired:
            self.logger.error("Kafka 连接超时")
            return False
        except Exception as e:
            self.logger.error(f"检查 Kafka 连接时发生错误: {e}")
            return False

    #######################################
    # 构建tshark命令
    #######################################
    def build_tshark_command(self):
        tshark_cmd = [
            self.config["tshark_path"],
            "-i",
            self.config["interface"],
            "-l",  # 实时输出
        ]

        # 添加捕获过滤器（如果指定且不为none/空）
        capture_filter = self.config.get("capture_filter", "").strip().lower()
        if capture_filter and capture_filter != "none":
            tshark_cmd.extend(["-f", self.config["capture_filter"]])

        # 添加显示过滤器（如果指定且不为none/空）
        display_filter = self.config.get("display_filter", "").strip().lower()
        if display_filter and display_filter != "none":
            tshark_cmd.extend(["-Y", self.config["display_filter"]])

        # 输出格式
        tshark_cmd.extend(["-T", "ek"])

        return tshark_cmd

    #######################################
    # 启动抓包进程
    #######################################
    def start_capture(self):
        # 检查命令工具的依赖关系
        if not self.validate_dependencies():
            self.logger.error("依赖检查失败，无法启动服务")
            return False

        # Kafka 连接检查
        if not self.check_kafka_connection():
            self.logger.error("Kafka 连接失败，服务无法启动")
            return False

        self.running = True
        self.stats["start_time"] = time.time()
        self.stats["last_restart"] = time.time()

        # 设置信号处理
        signal.signal(signal.SIGINT, self.signal_handler)
        signal.signal(signal.SIGTERM, self.signal_handler)

        retries = 0

        while self.running and retries < self.config["max_retries"]:
            try:
                self.logger.info(
                    f"启动抓包进程 (尝试 {retries + 1}/{self.config['max_retries']})"
                )

                # 构建tshark命令
                tshark_cmd = self.build_tshark_command()

                # 构建kcat命令
                kcat_cmd = [
                    self.config["kcat_path"],
                    "-b",
                    self.config["kafka_broker"],
                    "-t",
                    self.config["kafka_topic"],
                    "-P",  # 生产者模式
                ]

                self.logger.info(f"tshark命令: {' '.join(tshark_cmd)}")
                self.logger.info(f"kcat命令: {' '.join(kcat_cmd)}")

                # 在这里建立tshark与kcat之间的管道关系
                tshark_process = subprocess.Popen(
                    tshark_cmd,
                    stdout=subprocess.PIPE,  # 🔑 关键：tshark的stdout被重定向到管道
                    stderr=subprocess.PIPE,  # stderr也重定向到管道
                    text=True,
                    bufsize=1,  # 行缓冲
                    universal_newlines=True,
                )

                kcat_process = subprocess.Popen(
                    kcat_cmd,
                    stdin=tshark_process.stdout,  # 🔑 关键：kcat的stdin连接到tshark的stdout管道
                    stdout=subprocess.PIPE,  # stdout也重定向到管道
                    stderr=subprocess.PIPE,  # stderr重定向到管道
                    text=True,
                    bufsize=1,
                )

                # 确立kcat作为管道终端的主导地位
                self.process = kcat_process

                # 监控进程状态
                self.monitor_processes(tshark_process, kcat_process)

                # 如果进程正常退出且仍在运行状态，则准备重试
                if self.running:
                    retries += 1
                    if retries < self.config["max_retries"]:
                        self.logger.warning(
                            f"抓包进程异常退出，{self.config['retry_delay']}秒后重试"
                        )
                        time.sleep(self.config["retry_delay"])
                else:
                    break

            except Exception as e:
                self.logger.error(f"启动抓包进程时发生错误: {e}")
                retries += 1
                if self.running and retries < self.config["max_retries"]:
                    time.sleep(self.config["retry_delay"])

        if retries >= self.config["max_retries"]:
            self.logger.error("达到最大重试次数，服务停止")
            return False

        return True

    #######################################
    # 监控子进程状态
    #######################################
    def monitor_processes(
        self, tshark_process: subprocess.Popen, kcat_process: subprocess.Popen
    ):

        #######################################
        # 读取进程的标准错误输出
        #######################################
        def read_stderr(process, name):
            try:
                for line in process.stderr:
                    if line.strip():
                        self.logger.warning(f"{name}: {line.strip()}")
            except Exception as e:
                self.logger.debug(f"读取 {name} stderr 时出错: {e}")

        # 启动stderr读取线程
        tshark_stderr_thread = threading.Thread(
            target=read_stderr, args=(tshark_process, "tshark"), daemon=True
        )
        kcat_stderr_thread = threading.Thread(
            target=read_stderr, args=(kcat_process, "kcat"), daemon=True
        )

        tshark_stderr_thread.start()
        kcat_stderr_thread.start()

        # 等待进程结束
        while self.running:
            if tshark_process.poll() is not None:
                self.logger.error("tshark进程已退出")
                break
            if kcat_process.poll() is not None:
                self.logger.error("kcat进程已退出")
                break
            time.sleep(1)

        # 终止进程
        for process, name in [(tshark_process, "tshark"), (kcat_process, "kcat")]:
            if process.poll() is None:
                self.logger.info(f"终止{name}进程")
                process.terminate()

                # 等待进程完全结束
                try:
                    process.wait(timeout=5)
                except subprocess.TimeoutExpired:
                    self.logger.warning(f"{name}进程终止超时，强制杀死")
                    process.kill()

    #######################################
    # 信号处理函数
    #######################################
    def signal_handler(self, signum, frame):
        signame = signal.Signals(signum).name
        self.logger.info(f"接收到信号 {signame}({signum})，准备停止服务")
        self.stop()

    #######################################
    # 停止服务
    #######################################
    def stop(self):
        self.running = False
        if self.process and self.process.poll() is None:
            self.logger.info("终止抓包进程")
            self.process.terminate()
        self.log_stats()

    #######################################
    # 记录统计信息
    #######################################
    def log_stats(self):
        if self.stats["start_time"]:
            uptime = time.time() - self.stats["start_time"]
            self.logger.info(f"服务运行时间: {uptime:.2f} 秒")
        self.logger.info("服务已停止")


#######################################
# 主函数
#######################################
def main():
    parser = argparse.ArgumentParser(description="审计数据包捕获服务")
    parser.add_argument(
        "-c",
        "--config",
        default="capture.conf",
        help="配置文件路径 (默认: capture.conf)",
    )
    parser.add_argument("-i", "--interface", help="网络接口")
    parser.add_argument("-b", "--broker", help="Kafka Broker")
    parser.add_argument("-t", "--topic", help="Kafka Topic")
    parser.add_argument("--capture-filter", help="捕获过滤器 (BPF语法)")
    parser.add_argument("--display-filter", help="显示过滤器 (Wireshark语法)")
    parser.add_argument("-v", "--verbose", action="store_true", help="详细输出")

    args = parser.parse_args()

    # 创建服务实例
    service = PacketCaptureService(args.config)

    # 命令行参数优先于配置文件
    if args.interface:
        service.config["interface"] = args.interface
    if args.broker:
        service.config["kafka_broker"] = args.broker
    if args.topic:
        service.config["kafka_topic"] = args.topic
    if args.capture_filter:
        service.config["capture_filter"] = args.capture_filter
    if args.display_filter:
        service.config["display_filter"] = args.display_filter
    if args.verbose:
        service.config["log_level"] = "DEBUG"
        service.logger.setLevel(logging.DEBUG)

    service.logger.info("启动审计数据包捕获服务")
    service.logger.info(
        f"配置: 接口={service.config['interface']}, "
        f"Kafka={service.config['kafka_broker']}, "
        f"Topic={service.config['kafka_topic']}"
    )
    if service.config.get("capture_filter"):
        service.logger.info(f"捕获过滤器: {service.config['capture_filter']}")
    if service.config.get("display_filter"):
        service.logger.info(f"显示过滤器: {service.config['display_filter']}")

    try:
        if service.start_capture():
            service.logger.info("服务正常退出")
        else:
            service.logger.error("服务异常退出")
            sys.exit(1)
    except KeyboardInterrupt:
        service.logger.info("用户中断服务")
        service.stop()
    except Exception as e:
        service.logger.error(f"服务运行错误: {e}")
        sys.exit(1)


if __name__ == "__main__":
    main()
