# -*- coding:utf-8 -*-
import datetime
import logging
import logging.handlers
import multiprocessing
import optparse
import os
import socket
import subprocess
import sys
import time

import dpkt
import pcap
import psutil

APP_NAME = "Redis-Sniffer"
APP_VERSION = "1.0"
logger = logging.getLogger(APP_NAME)


class LoggerHelper(object):
    DEFAULT_LOGGER_NAME = "APP"
    DEFAULT_LOGGER_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), "logs")

    @classmethod
    def init_logger(cls, logger_name=None, logger_dir_path=None, logger_level=logging.INFO):
        """
        初始化logger
        :param logger_name: 日志名称, 对应的日志文件名为{logger_name}.log
        :param logger_dir_path: 日志目录, 默认为该文件的上级目录下的logs目录
        :param logger_level: 日志级别, 默认为INFO
        :return:
        """
        if logger_name is None or logger_name.strip() == "":
            logger_name = cls.DEFAULT_LOGGER_NAME
        if logger_dir_path is None or logger_dir_path.strip() == "":
            logger_dir_path = cls.DEFAULT_LOGGER_PATH
        if not os.path.exists(logger_dir_path):
            os.makedirs(logger_dir_path)
        my_logger = logging.getLogger(logger_name)
        logger_file_name = "{}.log".format(logger_name)
        log_file_path = os.path.join(logger_dir_path, logger_file_name)
        my_logger.setLevel(level=logging.DEBUG)
        logger_format = logging.Formatter("[%(asctime)s]-[%(levelname)s]: %(message)s")  # output format
        sh = logging.StreamHandler(stream=sys.stdout)  # output to standard output
        sh.setFormatter(logger_format)
        sh.setLevel(logger_level)
        my_logger.addHandler(sh)
        fh = logging.handlers.RotatingFileHandler(log_file_path, maxBytes=10 * 1024 * 1024, backupCount=5)
        fh.setFormatter(logger_format)
        fh.setLevel(logger_level)
        my_logger.addHandler(fh)

    @classmethod
    def get_logger(cls, logger_name=None):
        if logger_name is None or logger_name.strip() == "":
            logger_name = cls.DEFAULT_LOGGER_NAME
        return logging.getLogger(logger_name)


class RedisPacket(object):
    @classmethod
    def get_dst_client(cls, packet_info):
        return "{}:{}".format("%d.%d.%d.%d" % tuple(list(packet_info.data.dst)), packet_info.data.data.dport)

    @classmethod
    def get_src_client(cls, packet_info):
        return "{}:{}".format("%d.%d.%d.%d" % tuple(list(packet_info.data.src)), packet_info.data.data.sport)

    @classmethod
    def get_redis_cmd(cls, packet_info):
        try:
            packet_data = packet_info.data.data.data.decode("utf-8")
            if not packet_data.startswith("*"):
                return None
            cmd_lines = packet_data.splitlines()[0:10]

            if len(cmd_lines) >= 5:
                return "{} {}".format(cmd_lines[2], cmd_lines[4])
            return None
        except Exception as _:
            return None

    @classmethod
    def get_datetime_string(cls, datetime_ts):
        return datetime.datetime.fromtimestamp(datetime_ts).strftime("%Y-%m-%d %H:%m:%S.%f")

    @classmethod
    def get_redis_msg(cls, request_ts, request_client, request_cmd, request_data_size, response_ts, response_data_size):
        request_used_ms = response_ts - request_ts
        message = """{request_time}:::{request_used_ms}:::{request_client}:::{request_data_size}:::{response_data_size}:::{request_cmd}""".format(
            request_time=cls.get_datetime_string(request_ts),
            request_used_ms=format(request_used_ms, '.4f'),
            request_client=request_client,
            request_data_size=request_data_size,
            response_data_size=response_data_size,
            request_cmd=request_cmd,
        )
        return message


class LinuxHelper(object):
    @classmethod
    def get_ip_bind_eth_name(cls, bind_ip):
        bind_ip_map = dict()
        net_info = psutil.net_if_addrs()
        for eth_name in net_info.keys():
            for sub_item in net_info[eth_name]:
                host_ip = sub_item[1]
                bind_ip_map[host_ip] = eth_name
        if bind_ip in bind_ip_map.keys():
            return bind_ip_map[bind_ip]
        else:
            return None

    @classmethod
    def get_host_name_bind_ip(cls):
        try:
            return socket.gethostbyaddr(socket.gethostname())[2][0]
        except:
            return "127.0.0.1"

    @classmethod
    def append_file_data(cls, file_path, file_data):
        with open(file_path, 'a+') as fh:
            fh.writelines(file_data)
            fh.close()


class RedisSniffer(object):
    def __init__(self, eth_name, redis_host, redis_port, consume_worker_size, batch_package_size=10000, run_seconds=10):
        self.eth_name = eth_name
        self.redis_host = redis_host
        self.redis_port = redis_port
        self.run_seconds = run_seconds
        self.batch_package_size = batch_package_size
        self.work_time = datetime.datetime.now().strftime("%Y%m%d%H%m%S")
        self.consume_queues = list(multiprocessing.Queue(10) for _ in range(consume_worker_size))

    @classmethod
    def get_response_size(cls, response_data: bytes):
        if len(response_data) > 20:
            return len(response_data)
        try:
            response_lines = response_data.decode("utf-8").splitlines()
        except Exception as _:
            logger.warning("返回数据非utf-8格式,丢弃该数据")
            return -1
        if len(response_lines) == 0:
            return 0
        first_line = response_lines[0]
        if first_line.startswith("$"):
            return int(first_line.replace("$", ""))
        return len(response_data)

    @classmethod
    def consume_packages(cls, process_queue: multiprocessing.Queue, redis_client: str, consume_log_path):
        while True:
            try:
                queue_data = process_queue.get(block=True, timeout=10000)
            except:
                time.sleep(0.01)
                continue
            request_match_dict = dict()
            request_messages = list()
            package_items = queue_data["packet_list"]
            if package_items is None:
                logger.info("consumer exit...")
                return
            for package_item in package_items:
                packet_ts = package_item["packet_ts"]
                packet_info = package_item["packet_info"]
                packet_data = dpkt.ethernet.Ethernet(packet_info)
                src_client = RedisPacket.get_src_client(packet_data)
                dst_client = RedisPacket.get_dst_client(packet_data)
                # logger.debug("*" * 50)
                # logger.debug("{}==>{}".format(src_client, dst_client))
                # logger.debug(packet_data.data.data.data.decode("utf-8"))
                if dst_client == redis_client:
                    # 请求包
                    # logger.debug("this is request package")
                    packet_data_size = len(packet_data.data.data.data)
                    if packet_data_size > 0:
                        request_cmd = RedisPacket.get_redis_cmd(packet_data)
                        if request_cmd is not None:
                            request_match_dict[src_client] = (packet_ts, packet_data_size, request_cmd)
                    continue
                elif src_client == redis_client:
                    # 响应包
                    # logger.debug("this is response package")
                    request_info = request_match_dict.get(dst_client, None)
                    if request_info is None:
                        # logger.debug("not found match request")
                        continue
                    # 找到匹配包, 生成消息数据
                    request_message = RedisPacket.get_redis_msg(
                        request_ts=request_info[0],
                        request_client=dst_client,
                        request_data_size=request_info[1],
                        request_cmd=request_info[2],
                        response_ts=packet_ts,
                        response_data_size=cls.get_response_size(packet_data.data.data.data)
                    )
                    # logger.info(request_message)
                    request_messages.append(request_message)
                    # 清理掉请求包
                    request_match_dict.pop(dst_client)
            request_messages.append("\n")
            if len(request_messages) > 0:
                logger.info("found {} redis requests".format(len(request_messages)))
                LinuxHelper.append_file_data(consume_log_path, "\n".join(request_messages))
        pass

    @classmethod
    def product_packages(cls, eth_name: str, eth_filter: str, batch_package_size, consume_queues: list):
        sniffer = pcap.pcap(eth_name)
        sniffer.setfilter(eth_filter)
        packet_list = list()
        packet_count = 0
        consume_counter = 0
        consume_queue_size = len(consume_queues)
        if sniffer:
            for packet_ts, packet_info in sniffer:
                packet_list.append({"packet_ts": packet_ts, "packet_info": packet_info})
                packet_count += 1
                if packet_count == batch_package_size:
                    consume_queue_index = consume_counter % consume_queue_size
                    consume_queue = consume_queues[consume_queue_index]
                    if consume_queue.full():
                        logger.info("consume queue is full, drop this packages")
                    else:
                        consume_queue.put({"packet_list": packet_list})
                    packet_list = list()
                    consume_counter += 1
                    packet_count = 0
            pass
        pass

    def start_producer(self):
        eth_filter = "tcp port {} and host {}".format(self.redis_port, self.redis_host)
        produce_process = multiprocessing.Process(
            target=self.product_packages,
            args=(self.eth_name, eth_filter, self.batch_package_size, self.consume_queues)
        )
        produce_process.daemon = True
        produce_process.start()
        return produce_process

    def get_consumer_log_path(self, consumer_index):
        log_dir_path = os.path.join(os.path.dirname(__file__), "logs")
        if not os.path.exists(log_dir_path):
            os.makedirs(log_dir_path)
        log_file_name = "consume_{}_{}.log".format(self.work_time, consumer_index)
        return os.path.join(log_dir_path, log_file_name)

    def combine_consumer_log(self):
        cmd = "cat "
        for consumer_index in range(len(self.consume_queues)):
            consumer_log_path = self.get_consumer_log_path(consumer_index)
            cmd += "'" + consumer_log_path + "' "
        log_dir_path = os.path.join(os.path.dirname(__file__), "logs")
        total_log_path = os.path.join(log_dir_path, "consume_all.log")
        cmd += "> '" + total_log_path + "'"
        subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
        logger.info("捕获数据存放在文件：{}中".format(total_log_path))

    def start_consumers(self):
        consumers = list()
        for consumer_index in range(len(self.consume_queues)):
            consume_queue = self.consume_queues[consumer_index]
            redis_client = "{}:{}".format(self.redis_host, self.redis_port)
            consumer_log_path = self.get_consumer_log_path(consumer_index)
            process = multiprocessing.Process(
                target=self.consume_packages,
                args=(consume_queue, redis_client, consumer_log_path)
            )
            consumers.append(process)
            process.daemon = True
            process.start()
        return consumers

    def stop_consumers(self, consumers):
        for consumer_queue in self.consume_queues:
            consumer_queue.put({"packet_list": None})
        while True:
            is_all_down = True
            for consumer in consumers:
                if consumer.is_alive():
                    is_all_down = False
            if not is_all_down:
                logger.info("One consumer has not been stopped,please wait!")
                time.sleep(3)
            else:
                logger.info("All consumers has been stopped")
                break
        pass

    @classmethod
    def stop_producer(cls, producer):
        logger.info("try to stop producer")
        if producer.is_alive():
            producer.terminate()
        while producer.is_alive():
            time.sleep(1)
        pass
        logger.info("producer has been stopped")

    def start(self):
        logger.info("Sniffer start...")
        self.start_consumers()
        self.start_producer()
        stop_ts = time.time() + self.run_seconds
        while True:
            if time.time() > stop_ts:
                logger.info("Timeout, exit!")
                break
            else:
                logger.info("Sniffer is running")
                time.sleep(1)
        self.combine_consumer_log()
        logger.info("Sniffer exit...")


class RedisSnifferArgs(object):
    @classmethod
    def get_command_option(cls):
        usage = """
#=======================================================#
# Usage: python redis_sniffer.py --redis_host=HOST --redis_port=PORT --run_seconds=10
#=======================================================#
"""
        default_host_ip = LinuxHelper.get_host_name_bind_ip()
        parser = optparse.OptionParser(
            usage=usage,
            add_help_option=False,
            version="{} {}".format(APP_NAME, APP_VERSION)
        )
        parser.add_option(
            "--help",
            action="help",
            help="show help"
        )
        parser.add_option(
            "--redis_host",
            action="store",
            type="string",
            default=default_host_ip,
            dest="redis_host",
            help="redis host,default: %default"
        )
        parser.add_option(
            "--redis_port",
            type="int",
            dest="redis_port",
            default=6379,
            help="redis port, default: %default"
        )
        parser.add_option(
            "--run_seconds",
            dest="run_seconds",
            type="int",
            default=10,
            help="run seconds, default: %default"
        )
        parser.add_option(
            "--sniffer_works",
            type="int",
            dest="sniffer_works",
            default=2,
            help="sniffer workers, default: %default"
        )
        parser.add_option(
            "--batch_package_size",
            dest="batch_package_size",
            type="int",
            default=1000,
            help="handle packages every batch, default: %default"
        )
        (options, args) = parser.parse_args()
        return options


def main():
    LoggerHelper.init_logger(
        logger_dir_path=os.path.join(os.path.dirname(__file__), "logs"),
        logger_name=APP_NAME,
        logger_level=logging.INFO
    )
    options = RedisSnifferArgs.get_command_option()
    eth_name = LinuxHelper.get_ip_bind_eth_name(options.redis_host)
    if eth_name is None:
        logger.warning("please input the correct ip address")
        exit(-1)
    logger.info("your options:")
    logger.info(options)
    logger.info("use eth name：{}".format(eth_name))
    rs = RedisSniffer(
        eth_name=eth_name,
        redis_host=options.redis_host,
        redis_port=options.redis_port,
        consume_worker_size=options.sniffer_works,
        batch_package_size=options.batch_package_size,
        run_seconds=options.run_seconds
    )
    rs.start()


if __name__ == '__main__':
    main()
