# -*- coding: utf-8 -*-
# =============================================================================
#         Desc: 格式化MySQL慢日志并按照各维度进行排序输出。
#       Author: GGA
#        Email:
#     HomePage:
#      Version: 1.0.0
#   LastChange: 2020-12-20
#      History:
# =============================================================================
import datetime
import sys
import re
import os
import hashlib
import argparse
import logging
import heapq
from utils.sqlpf_helper import SQLFingerPrint
from utils.logger_helper import LoggerHelper
from utils.top_max_heap import TopMaxHeapItem, TopMaxHeap

default_datetime_format = "%Y-%m-%d %H:%M:%S"

logger = logging.getLogger()


class SlowLogConfig(object):
    QUERY_START_TAG = " Query\t"
    QUERY_TIME_PATTERN = r"^[0-9]{6} [0-9]{2}:[0-9]{2}:[0-9]{2}"
    QUERY_TIME_PATTERN2 = r"^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}"
    USE_DB_PATTERN = r"^use.*;"
    CHECK_SQL_LIST = [
        "select @@session.tx_read_only",
        "select 1",
        "set autocommit=1",
        "set autocommit=0",
        "commit"
    ]
    SQL_TOP_TYPES = [
        "query_time_ms", "examined_rows", "affected_rows",
        "lock_time_ms", "sent_rows", "sent_bytes"
    ]
    SQL_STATS_TYPES = {
        "query_time_ms": "查询时间",
        "examined_rows": "预估行数",
        "affected_rows": "影响行数",
        "lock_time_ms": "阻塞时间",
        "sent_rows": "返回行数",
        "sent_bytes": "返回字节"
    }
    TMP_CACHE_LOG_NUMBER = 10000


class SlowLogItem(object):
    def __init__(self):
        self.login_user = ""
        self.login_database = ""
        self.query_time = 0
        self.lock_time = 0
        self.sent_rows = 0
        self.examined_rows = 0
        self.affected_rows = 0
        self.sent_bytes = 0
        self.start_time = None
        self.stop_time = None
        self.raw_sql = ""
        self.sql_print = ""


class SlowLogFilter(object):
    def __init__(self, start_time, stop_time, min_examined_rows, min_affect_rows,
                 min_query_seconds, min_sent_rows, min_sent_bytes):
        self.start_time = start_time
        self.stop_time = stop_time
        self.min_examined_rows = min_examined_rows
        self.min_affect_rows = min_affect_rows
        self.min_query_seconds = min_query_seconds
        self.min_sent_rows = min_sent_rows
        self.min_sent_bytes = min_sent_bytes


class SlowLogHelper(object):
    def __init__(self, slow_log_path, sql_top_number, log_filter, keep_database_name=1):
        self.slow_log_path = slow_log_path
        self.sql_top_number = sql_top_number
        self.log_filter = log_filter
        self.keep_database_name = keep_database_name
        self.check_time_str = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
        self.sql_stats_map = {}
        self.sql_top_map = {}
        self.cache_log_items = []
        self.result_files = []
        self.result_path = self.get_result_dir_path()
        self.format_log_path = os.path.join(self.result_path, self.check_time_str + "_format.txt")
        self.big_sql_size = 1 * 1024 * 1024
        self.big_sql_list = []
        self.init_sql_top_map()

    @classmethod
    def append_file_content(cls, file_path, file_content):
        with open(file=file_path, encoding="utf-8", mode="a+") as fw:
            fw.write(file_content)

    @classmethod
    def spilt_sql_line_to_dict(cls, sql_line: str, log_item: dict):
        if not sql_line.startswith("# "):
            return
        if sql_line.startswith("# User@Host"):
            sql_line = sql_line.replace("User@Host:", "User:")
            sql_line = sql_line.replace(" @  ", "  Host: ")
        sql_line = sql_line.replace("# ", "")
        tmp_list = sql_line.split(sep="  ")
        for tmp_item in tmp_list:
            if tmp_item.find(":") >= 0:
                item_key = tmp_item.split(":")[0].strip()
                item_value = tmp_item.split(":")[1].strip()
                if item_value.find("[") >= 0:
                    item_value = item_value.split("[")[1].replace("]", "").strip()
                if item_key != "":
                    log_item[item_key] = item_value

    def format_sql_text(self, sql_text, database_name):
        tmp_sql_text = re.sub(
            r"\s+", " ",
            str(sql_text).replace("\r", " ").replace("\n", " ").replace("\t", " ")
        )
        tmp_sql_text = tmp_sql_text.strip().lower()
        is_valid_db_name = database_name.strip() != ""
        if is_valid_db_name and self.keep_database_name:
            tmp_sql_text = "use {}; ".format(database_name) + tmp_sql_text
        return tmp_sql_text

    @classmethod
    def get_log_item_print_info(cls, log_item: dict):
        return """
# start_time: {start_time}, stop_time: {stop_time}, query_time_ms: {query_time_ms}, lock_time_ms: {lock_time_ms}, is_killed: {is_killed}
# examined_rows: {examined_rows}, affected_rows: {affected_rows}, sent_rows: {sent_rows}, sent_bytes: {sent_bytes}, 
# database_name: {database_name}, login_user: {login_user}, login_host: {login_host}
# sql_pf_md5: {sql_pf_md5}
# sql_txt_md5: {sql_txt_md5}
# sql_text: {sql_text}
""".format(
            start_time=log_item["start_time"].strftime(default_datetime_format),
            stop_time=log_item["stop_time"].strftime(default_datetime_format),
            sql_txt_md5=hashlib.md5(log_item["sql_text"].encode(encoding='UTF-8')).hexdigest(),
            sql_text=log_item["sql_text"],
            sql_pf=log_item["sql_pf"],
            sql_pf_md5=log_item["sql_pf_md5"],
            query_time_ms=log_item["query_time_ms"],
            login_user=log_item["login_user"],
            login_host=log_item["login_host"],
            is_killed=log_item["is_killed"],
            database_name=log_item["database_name"],
            lock_time_ms=log_item["lock_time_ms"],
            sent_rows=log_item["sent_rows"],
            examined_rows=log_item["examined_rows"],
            affected_rows=log_item["affected_rows"],
            sent_bytes=log_item["sent_bytes"]
        )

    @classmethod
    def get_result_dir_path(cls):
        """
        获取检查结果目录路径
        :return:
        """
        result_path = os.path.join(os.path.curdir, "logs")
        if not os.path.exists(result_path):
            os.makedirs(result_path)
        return result_path

    @classmethod
    def is_check_sql(cls, sql_text: str):
        for tmp_sql in SlowLogConfig.CHECK_SQL_LIST:
            if sql_text.lower().find(tmp_sql.lower()) >= 0:
                return True
        return False

    def check_env(self):
        if not os.path.exists(self.slow_log_path):
            print("源文件{}不存在，请检查".format(self.slow_log_path))
            return False
        return True

    def init_sql_top_map(self):
        self.sql_top_map = {}
        for sql_top_type in SlowLogConfig.SQL_TOP_TYPES:
            self.sql_top_map[sql_top_type] = TopMaxHeap(top_size=self.sql_top_number)

    def update_sql_top_map(self, log_item: dict):
        """
        更新SQL统计的汇总数据
        :param log_item: 新增日志记录
        :return:
        """
        for sql_top_type in SlowLogConfig.SQL_TOP_TYPES:
            top_item = TopMaxHeapItem(item_key=log_item[sql_top_type], item_data=log_item)
            self.sql_top_map[sql_top_type].push_item(top_item)

    def update_sql_stats_map(self, log_item: dict):
        """
        更新SQL统计的汇总数据
        :param log_item: 新增日志记录
        :return:
        """
        status_types = [""]
        sql_stats_map = self.sql_stats_map
        sql_pf_md5 = log_item["sql_pf_md5"]
        if sql_pf_md5 not in sql_stats_map.keys():
            stats_item = dict()
            stats_item["sum_exec_count"] = 0
            stats_item["sql_pf"] = log_item["sql_pf"]
            stats_item["sql_pf_md5"] = log_item["sql_pf_md5"]
            stats_item["sql_sample"] = log_item["sql_text"]
            for status_type in SlowLogConfig.SQL_STATS_TYPES.keys():
                stats_item["sum_" + status_type] = 0
                stats_item["avg_" + status_type] = 0
            sql_stats_map[sql_pf_md5] = stats_item
        stats_item = sql_stats_map[sql_pf_md5]
        sum_exec_count = stats_item["sum_exec_count"] + 1
        stats_item["sum_exec_count"] = sum_exec_count
        for status_type in SlowLogConfig.SQL_STATS_TYPES.keys():
            stats_item["sum_" + status_type] += log_item[status_type]
            stats_item["avg_" + status_type] = stats_item["sum_" + status_type] / sum_exec_count

    def get_sql_stats_print_info(self, sql_status_type):
        print_info = []
        sql_map = self.sql_stats_map
        sql_map_items = list(sql_map.values())
        if len(sql_map_items) == 0:
            return print_info
        if sql_status_type not in sql_map_items[0].keys():
            return print_info
        sql_items = sorted(sql_map.values(), key=lambda item: item[sql_status_type], reverse=True)
        total_value = 0
        for sql_item in sql_items:
            total_value += sql_item[sql_status_type]
        for sql_item in sql_items:
            print_info.append("*" * 100 + "")
            if total_value != 0:
                exec_rate = round(sql_item[sql_status_type] * 100.0 / total_value, 2)
            else:
                exec_rate = ""
            print_info.append("执行占比:    {0}% (按{1})".format(exec_rate, sql_status_type))
            print_info.append("指纹SQL:    {0}".format(sql_item["sql_pf"]))
            print_info.append("指纹MD5:    {0}".format(sql_item["sql_pf_md5"]))
            print_info.append("示例SQL:    {0}".format(sql_item["sql_sample"]))
            print_info.append("执行次数:    {0}".format(sql_item["sum_exec_count"]))
            for status_type in SlowLogConfig.SQL_STATS_TYPES.keys():
                print_info.append("平均/汇总{}:    {} / {}".format(
                    SlowLogConfig.SQL_STATS_TYPES[status_type],
                    int(sql_item["avg_" + status_type]),
                    int(sql_item["sum_" + status_type])
                ))
        return print_info

    def check_log_filter(self, log_item):
        lf = self.log_filter
        if lf.start_time <= log_item["stop_time"] <= lf.stop_time \
                and log_item["query_time_ms"] >= lf.min_query_seconds * 1000 \
                and log_item["affected_rows"] >= lf.min_affect_rows \
                and log_item["examined_rows"] >= lf.min_examined_rows \
                and log_item["sent_rows"] >= lf.min_sent_rows \
                and log_item["sent_bytes"] >= lf.min_sent_bytes:
            return True
        return False

    def write_cache_logs(self):
        cache_logs = self.get_cache_logs_print_info()
        self.append_file_content(
            file_path=self.format_log_path,
            file_content=cache_logs
        )

    def scan_slow_logs(self):
        loop_index = 0
        log_item = {}
        sql_text_lines = []

        with open(file=self.slow_log_path, mode="rb") as slh:
            for line in slh:
                try:
                    line = line.decode("utf-8")
                except Exception as ex:
                    logger.info(str(ex))
                    continue
                if not line:
                    break
                loop_index += 1
                if loop_index % 100 == 0:
                    logger.info("处理第{}行记录".format(loop_index))
                if len(self.cache_log_items) > SlowLogConfig.TMP_CACHE_LOG_NUMBER:
                    logger.info("write to format log")
                    self.write_cache_logs()
                    self.cache_log_items = []
                if line.startswith("SET timestamp="):
                    log_item["timestamp"] = line.replace("SET timestamp=", "")
                    sql_text_lines = []
                    continue
                if re.match(SlowLogConfig.USE_DB_PATTERN, line.strip(), re.IGNORECASE):
                    continue
                if line.startswith("# User@Host"):
                    # 新一条慢SQL开始,处理上一条记录
                    log_item["sql_text"] = " ".join(sql_text_lines)
                    sql_text_len = len(log_item["sql_text"])
                    if sql_text_len > self.big_sql_size:
                        self.big_sql_list.append(sql_text_len)
                        log_item = None
                    else:
                        log_item = self.format_log_item(log_item)
                    if (log_item is not None) \
                            and self.check_log_filter(log_item):
                        self.update_sql_stats_map(log_item)
                        self.update_sql_top_map(log_item)
                        self.cache_log_items.append(log_item)
                    # 开始新的慢SQL
                    log_item = {}
                    sql_text_lines = []
                    self.spilt_sql_line_to_dict(sql_line=line, log_item=log_item)
                    continue
                if line.startswith("# Time:"):
                    continue
                if line.startswith("# Schema:") or line.startswith("# Query_time:") or line.startswith("# Bytes_sent:"):
                    self.spilt_sql_line_to_dict(sql_line=line, log_item=log_item)
                    continue
                if not line.startswith("#"):
                    sql_text_lines.append(line)
        self.write_cache_logs()

    def create_status_files(self):
        tmp_stats_list = ["sum_exec_count"]
        for sql_status_type in SlowLogConfig.SQL_STATS_TYPES.keys():
            tmp_stats_list.append("sum_" + sql_status_type)
            tmp_stats_list.append("avg_" + sql_status_type)

        for sql_status_type in tmp_stats_list:
            print_info = self.get_sql_stats_print_info(
                sql_status_type=sql_status_type
            )
            static_log_path = os.path.join(
                self.result_path,
                self.check_time_str + "_stats_by_{}.txt".format(sql_status_type))
            self.result_files.append(static_log_path)
            self.append_file_content(
                file_path=static_log_path,
                file_content="\n".join(print_info)
            )

    def create_top_file(self):
        for sql_top_type in SlowLogConfig.SQL_TOP_TYPES:
            top_log_path = os.path.join(
                self.result_path,
                self.check_time_str + "_top_sql_by_{}.txt".format(sql_top_type))
            top_logs = self.get_top_log_print_info(sql_top_type)
            self.append_file_content(
                file_path=top_log_path,
                file_content=top_logs
            )
            self.result_files.append(top_log_path)

    def get_top_log_print_info(self, sql_top_type):
        cache_strings = []
        top_item_heap = self.sql_top_map[sql_top_type]
        top_items = top_item_heap.get_top_items()
        for top_item in top_items:
            log_item = top_item.item_data
            cache_strings.append(self.get_log_item_print_info(log_item))
            cache_strings.append("\r")
        return "".join(cache_strings)

    def format_log_item(self, log_item: dict):
        database_name = log_item.get("Schema", "")
        sql_text = self.format_sql_text(log_item["sql_text"], database_name)
        sql_pf = SQLFingerPrint.get_finger_print(sql_text)
        sql_pf_md5 = hashlib.md5(sql_pf.encode(encoding='UTF-8')).hexdigest()
        if "timestamp" in log_item.keys():
            stop_time = datetime.datetime.fromtimestamp(int(log_item["timestamp"].replace(";", "").replace("\n", "")))
            query_seconds = float(log_item["Query_time"])
            start_time = stop_time - datetime.timedelta(seconds=query_seconds)
            return {
                "start_time": start_time,
                "stop_time": stop_time,
                "sql_text": sql_text,
                "sql_pf": sql_pf,
                "sql_pf_md5": sql_pf_md5,
                "login_user": log_item.get("User", "").strip(),
                "login_host": log_item.get("Host", "").strip(),
                "is_killed": log_item.get("Killed", "0").strip(),
                "database_name": database_name,
                "query_time_ms": int(query_seconds * 1000),
                "lock_time_ms": int(float(log_item.get("Lock_time", "0")) * 1000),
                "sent_rows": float(log_item.get("Rows_sent", "0")),
                "examined_rows": float(log_item.get("Rows_examined", "0")),
                "affected_rows": float(log_item.get("Rows_affected", "0")),
                "sent_bytes": float(log_item.get("Bytes_sent", "0")),
            }
        return None

    def get_cache_logs_print_info(self):
        cache_strings = []
        for log_item in self.cache_log_items:
            cache_strings.append(self.get_log_item_print_info(log_item))
            cache_strings.append("\r")
        return "".join(cache_strings)

    def show_result_files(self):
        logger.info("*" * 40)
        logger.info("处理结果存入下列文件")
        for result_file in self.result_files:
            logger.info(result_file)
        logger.info("*" * 40)

    def show_big_sql_info(self):
        big_sql_list_len = len(self.big_sql_list)
        if big_sql_list_len > 0:
            logger.info(
                "共找到{}个长度大于{}的慢SQL,最长SQL长度为：".format(
                    big_sql_list_len,
                    self.big_sql_size,
                    max(self.big_sql_list)
                )
            )

    def check_logs(self):
        if not self.check_env():
            return
        self.scan_slow_logs()
        self.result_files.append(self.format_log_path)
        self.create_status_files()
        self.create_top_file()
        self.show_result_files()
        self.show_big_sql_info()


class CommandParser(object):

    @classmethod
    def get_parser(cls):
        parser = argparse.ArgumentParser(
            description='格式化MySQL慢日志并汇总SQL执行情况',
            add_help=False)
        connect_setting = parser.add_argument_group('参数信息')
        parser.add_argument(
            '--help',
            dest='help',
            action='store_true',
            help='获取帮助信息',
            default=False
        )
        connect_setting.add_argument(
            '--slow_log_path',
            dest='slow_log_path',
            type=str,
            default="",
            help='慢日志文件路径，无默认值，需指定。'
        )
        connect_setting.add_argument(
            '--min_examined_rows',
            dest='min_examined_rows',
            type=float,
            default=0,
            help='最小预估行数,默认为0,对应慢日志中Rows_examined变量,低于该值的慢SQL将被忽略。'
        )
        connect_setting.add_argument(
            '--min_affect_rows',
            dest='min_affect_rows',
            type=float,
            default=0,
            help='最小影响行数,默认为0,对应慢日志中Rows_affected变量,低于该值的慢SQL将被忽略。'
        )
        connect_setting.add_argument(
            '--min_query_seconds',
            dest='min_query_seconds',
            type=float,
            default=0,
            help='最小执行时间(单位秒),默认为0,对应慢日志中Query_time变量,低于该值的慢SQL将被忽略。'
        )
        connect_setting.add_argument(
            '--min_sent_rows',
            dest='min_sent_rows',
            type=float,
            default=0,
            help='最小返回行数(单位行),默认为0,对应慢日志中Rows_sent变量,低于该值的慢SQL将被忽略。'
        )
        connect_setting.add_argument(
            '--min_sent_bytes',
            dest='min_sent_bytes',
            type=float,
            default=0,
            help='最小返回长度(单位字节),默认为0,对应慢日志中Bytes_sent变量,低于该值的慢SQL将被忽略。'
        )
        connect_setting.add_argument(
            '--start_time',
            dest='start_time',
            type=str,
            default="1970-01-01 00:00:00",
            help='慢日志开始时间,默认为1970-01-01 00:00:00,在该时间点前执行完的慢SQL将被忽略'
        )
        connect_setting.add_argument(
            '--stop_time',
            dest='stop_time',
            type=str,
            default="2199-01-01 00:00:00",
            help='慢日志结束时间,默认为2199-01-01 00:00:00,在该时间点后执行完的慢SQL将被忽略'
        )
        connect_setting.add_argument(
            '--sql_top_number',
            dest='sql_top_number',
            type=int,
            default=200,
            help='指定TOP慢日志的数量,默认为200,按照慢SQL执行时间/扫描行数排序'
        )
        connect_setting.add_argument(
            '--keep_database_name',
            dest='keep_database_name',
            type=int,
            default=1,
            help='在计算SQL指纹时是否包含数据库名称,1表示包含，0表示忽略，默认值为1，当使用分库分表时可以选择忽略库名。'
        )
        return parser

    @classmethod
    def parse_args(cls, command_args):
        need_print_help = False if command_args else True
        parser = cls.get_parser()
        args = parser.parse_args(command_args)

        if args.help or need_print_help:
            parser.print_help()
            sys.exit(1)
        if args.slow_log_path is None:
            logger.warning("请输入慢日志文件路径")
            parser.print_help()
        try:
            args.stop_time = datetime.datetime.strptime(str(args.stop_time), default_datetime_format)
            args.start_time = datetime.datetime.strptime(str(args.start_time), default_datetime_format)
        except Exception as ex:
            logger.debug(str(ex))
            logger.warning("请正确的开始时间和结束时间")
            parser.print_help()
        return args


def init_logger():
    LoggerHelper.init_logger(logger_level=logging.INFO)
    global logger
    logger = LoggerHelper.get_logger()


def main(command_args):
    logger.info("处理开始。。。")
    args = CommandParser.parse_args(command_args)
    log_filter = SlowLogFilter(
        start_time=args.start_time,
        stop_time=args.stop_time,
        min_examined_rows=args.min_examined_rows,
        min_affect_rows=args.min_affect_rows,
        min_query_seconds=args.min_query_seconds,
        min_sent_rows=args.min_sent_rows,
        min_sent_bytes=args.min_sent_bytes,
    )
    slh = SlowLogHelper(
        slow_log_path=args.slow_log_path,
        log_filter=log_filter,
        sql_top_number=args.sql_top_number)
    slh.check_logs()
    logger.info("处理完成。。。")


if __name__ == '__main__':
    init_logger()
    main(sys.argv[1:])
