# -*- coding: utf-8 -*-
# =============================================================================
#     FileName:
#         Desc:
#       Author: GGA
#        Email:
#     HomePage:
#      Version: 1.0.1
#   LastChange: 2020-12-20
#      History:
# =============================================================================
import os
import re
import json
import fire
import redis
import logging
import datetime
import pandas as pd
import hashlib
from decimal import Decimal
from utils.logger_helper import LoggerHelper

app_name = "redis_big_keys"
app_version = "1.0"
logger = LoggerHelper.get_logger()


class RedisRelayer(object):
    MONITOR_LINE_RE = re.compile(
        r"""^(?P<timestamp>[\d\.]+)\s\[(?P<db>\d+)\s\d+\.\d+\.\d+\.\d+:\d+]\s"(?P<command>\w+)"(\s"(?P<key>[^(?<!\\)"]+)(?<!\\)")?(\s(?P<args>.+))?$""",
        re.VERBOSE)

    def __init__(self, redis_host, redis_port, redis_auth, monitor_log_file,
                 redis_db=0, top_item_size=20, force_on_slave=True):
        self.redis_host = redis_host
        self.redis_port = redis_port
        self.redis_auth = redis_auth
        self.redis_db = redis_db
        self.monitor_log_file = monitor_log_file
        self.top_item_size = top_item_size
        self.force_on_slave = force_on_slave
        self.redis_conn = self._get_connection()
        self.last_timestamp = None
        self.query_info_dict = dict()

    def _get_connection(self):
        return redis.StrictRedis(
            host=self.redis_host, port=self.redis_port,
            password=self.redis_auth, db=self.redis_db
        )

    @classmethod
    def _get_item_size(cls, item):
        """
        计算Item的占用空间大小
        :param item:
        :return:
        """
        if item is None:
            return 4
        if isinstance(item, str):
            return len(item)
        if isinstance(item, bytes):
            return len(item)
        total_size = 0
        if isinstance(item, list):
            for sub_item in item:
                total_size += cls._get_item_size(item=sub_item)
        if isinstance(item, dict):
            for sub_item in item.values():
                total_size += cls._get_item_size(item=sub_item)
        return total_size

    @classmethod
    def _is_get_command(cls, command_name):
        command_name = str(command_name).upper().strip()
        if command_name in ["GET", "MGET", "HGET", "HMGET", "HGETALL"]:
            return True
        return False

    def _check_slave_server(self):
        if not self.force_on_slave:
            return True
        info = self.redis_conn.info("Replication")
        if info["role"] != "master":
            return True
        return False

    def _get_command_net_out_size(self, command_info):
        command_name = command_info["command"]
        command_args = [command_name]
        if not self._is_get_command(command_name=command_name):
            return 0
        if command_info["key"] is not None:
            command_args.append(command_info["key"])
        if command_info["args"] is not None:
            if isinstance(command_info["args"], str):
                command_args.append(command_info["args"])
            if isinstance(command_info["args"], list):
                for item in command_info["args"]:
                    isinstance(command_info["args"], item)
        command_db = command_info["db"]
        self.redis_conn.execute_command("SELECT", command_db)
        command_result = self.redis_conn.execute_command(*command_args)
        return self._get_item_size(command_result)

    @classmethod
    def _get_hash_value(cls, binary_data):
        hasher = hashlib.sha256()
        hasher.update(binary_data)
        return hasher.hexdigest()

    def _process_command(self, command_info):
        current_timestamp = int(Decimal(command_info["timestamp"]) * 1000 * 1000)
        if self.last_timestamp is None:
            exec_time = 0
        else:
            exec_time = current_timestamp - self.last_timestamp
        self.last_timestamp = current_timestamp
        net_in_size = self._get_item_size(item=command_info["args"])
        net_out_size = self._get_command_net_out_size(command_info=command_info)
        command_name = command_info["command"]
        command_key = command_info["key"]
        if command_key is None:
            return
        item_key = command_info["command"] + "_" + str(
            self._get_hash_value(command_key.encode("utf-8", errors="ignore")))
        if item_key not in self.query_info_dict:
            self.query_info_dict[item_key] = {
                "command_name": command_name,
                "command_key": command_key,
                "exec_command": command_name + " " + command_key,
                "exec_time": 0,
                "exec_count": 0,
                "net_in_size": 0,
                "net_out_size": 0
            }
        self.query_info_dict[item_key]["exec_count"] += 1
        self.query_info_dict[item_key]["exec_time"] += exec_time
        self.query_info_dict[item_key]["net_in_size"] += net_in_size
        self.query_info_dict[item_key]["net_out_size"] += net_out_size

    def _get_top_item_info(self, item_key):
        total_value = 0
        for item in self.query_info_dict.values():
            total_value += item[item_key]
        msg_list = list()
        msg_list.append("*" * 100)
        msg_list.append("sorted by {}".format(item_key))
        msg = "{:>15} {:>15} {:>15} {:>15} {:>15} {:>15}   {}".format(
            "exec_rate", "exec_count", "exec_time",
            "net_int", "net_out", "command_name", "command_key"
        )
        msg_list.append(msg)
        top_items = sorted(
            self.query_info_dict.values(),
            key=lambda x: x[item_key],
            reverse=True
        )[:self.top_item_size]
        for top_item in top_items:
            if total_value == 0:
                rate = "{:.3f}%".format(0)
            else:
                rate = "{:.3f}%".format(round(top_item[item_key] * 100 / total_value, 2))
            msg = "{:>15} {:>15} {:>15} {:>15} {:>15} {:>15}   {}".format(
                rate, top_item["exec_count"], top_item["exec_time"],
                top_item["net_in_size"], top_item["net_out_size"], top_item["command_name"], top_item["command_key"]
            )
            msg_list.append(msg)
        return msg_list

    def _show_monitor_status(self):
        msg_list = list()
        msg_list.extend(self._get_top_item_info(item_key="exec_time"))
        msg_list.extend(self._get_top_item_info(item_key="exec_count"))
        msg_list.extend(self._get_top_item_info(item_key="net_in_size"))
        msg_list.extend(self._get_top_item_info(item_key="net_out_size"))
        logger.info("\r\n".join(msg_list))

    def replay_monitor_logs(self):
        if not os.path.exists(self.monitor_log_file):
            logger.warning("指定的日志文件({})不存在".format(self.monitor_log_file))
            exit(1)
        if not self._check_slave_server():
            logger.warning("指定的Redis实例不是从节点".format(self.monitor_log_file))
            exit(1)
        line_counter = 0
        failed_counter = 0
        success_counter = 0
        with open(self.monitor_log_file, "r") as fh:
            for current_line in fh:
                line_counter += 1
                current_line = current_line.strip()
                if line_counter % 1000 == 0:
                    logger.info("处理到第{}行".format(line_counter))
                if current_line == "OK":
                    continue
                match_result = self.MONITOR_LINE_RE.match(current_line)
                if match_result:
                    try:
                        self._process_command(command_info=match_result.groupdict())
                        success_counter += 1
                    except Exception as _:
                        logger.debug("处理命令失败，命令为:{}".format(current_line))
                        failed_counter += 1
                else:
                    failed_counter += 1
        logger.info("处理完成,成功{}条,异常{}条.".format(success_counter, failed_counter))
        self._show_monitor_status()


def main(redis_host="127.0.0.1", redis_port=6389, redis_auth="",
         monitor_log_file="./logs/monitor.log", top_item_size=20, force_on_slave=True, with_debug=False):
    """
    Redis monitor日志重放，建议仅在集群Slave节点上运行，避免程序误操作修改数据。
    :param redis_host:  要重放的目标Redis地址
    :param redis_port:  要重放的目标Redis端口
    :param redis_auth:  要重放的目标Redis密码
    :param monitor_log_file: 要重放的日志文件
    :param top_item_size:  显示TOP请求的数量，默认20条。
    :param force_on_slave:  是否强制在从节点上执行，默认强制。
    :param with_debug:  是否启用DEBUG日志
    :return:
    """
    if with_debug:
        LoggerHelper.init_logger(logger_name=app_name, logger_level=logging.DEBUG)
    else:
        LoggerHelper.init_logger(logger_name=app_name, logger_level=logging.INFO)
    global logger
    logger = LoggerHelper.get_logger(logger_name=app_name)
    monitor = RedisRelayer(
        redis_host=redis_host, redis_port=redis_port,
        redis_auth=redis_auth,
        monitor_log_file=monitor_log_file,
        top_item_size=top_item_size,
        force_on_slave=force_on_slave
    )
    monitor.replay_monitor_logs()


if __name__ == '__main__':
    fire.Fire(main)
