# -*- coding: utf-8 -*-
# =============================================================================
#     FileName:
#         Desc:
#       Author: GGA
#        Email:
#     HomePage:
#      Version: 1.0.1
#   LastChange: 2020-12-20
#      History:
# =============================================================================
import os
import sys
import traceback

import fire
import datetime
import logging
import time
from logging.handlers import RotatingFileHandler
from utils.mysql_helper import MySQLClient
from utils.logger_helper import LoggerHelper
from utils.linux_helper import LocalLinuxHelper

DEFAULT_DATETIME_FORMAT = "%m-%d %H:%M:%S:%f"

logger = LoggerHelper.get_logger()


class MySQLProcessMonitor(object):
    def __init__(self, mysql_host, mysql_port,
                 mysql_user, mysql_pass,
                 check_interval=1,
                 check_count=10000,
                 min_process_count=5,
                 max_process_count=5,
                 max_sql_size=2000,
                 mysql_process_id=0):
        self.mysql_host = mysql_host
        self.mysql_port = mysql_port
        self.mysql_user = mysql_user
        self.mysql_pass = mysql_pass
        self.check_interval = check_interval
        self.check_count = check_count
        self.max_sql_size = max_sql_size
        self.min_process_count = min_process_count
        self.max_process_count = max_process_count
        self.mysql_process_id = mysql_process_id
        self.write_process_count = 0
        self.mysql_client = self._get_mysql_client()
        self.process_list_logger = self._get_instance_logger(logger_type="process_list")
        self.process_info_logger = self._get_instance_logger(logger_type="process_info")
        self.innodb_status_logger = self._get_instance_logger(logger_type="innodb_status")

    def _get_mysql_client(self):
        server_config = {
            "mysql_host": self.mysql_host,
            "mysql_port": self.mysql_port,
            "mysql_user": self.mysql_user,
            "mysql_password": self.mysql_pass,
            "database_name": "mysql"
        }
        return MySQLClient(**server_config)

    @classmethod
    def _get_logger(cls, logger_name):
        curr_logger = logging.getLogger(logger_name)
        curr_logger.setLevel(logging.DEBUG)
        base_dir = os.path.dirname(__file__)
        log_dir = os.path.join(base_dir, "logs")
        if not os.path.exists(log_dir):
            os.makedirs(log_dir)
        log_file_name = "{}.log".format(logger_name)
        log_file_path = os.path.join(log_dir, log_file_name)
        # 创建一个 handler，设置日志文件大小为 1 MB，保留最多 3 个备份文件
        handler = RotatingFileHandler(log_file_path, maxBytes=50 * 1024 * 1024, backupCount=5)
        # 创建一个 formatter 并设置格式
        formatter = logging.Formatter('%(message)s')
        handler.setFormatter(formatter)
        # 将 handler 添加到 logger 中
        curr_logger.addHandler(handler)
        return curr_logger

    def _get_instance_logger(self, logger_type: str):
        logger_name = "{}_{}_{}".format(
            str(self.mysql_host).replace(".", "_").replace("-", "_"),
            self.mysql_port,
            logger_type
        )
        return self._get_logger(logger_name=logger_name)

    def _get_process_list(self):
        sql_script = """
SELECT * FROM information_schema.processlist 
WHERE COMMAND NOT IN('sleep') AND ID<>CONNECTION_ID() 
AND USER NOT IN ('system user','replication')
"""
        query_rows = self.mysql_client.mysql_query(sql_script=sql_script, return_dict=True)
        monitor_time = datetime.datetime.now().strftime(DEFAULT_DATETIME_FORMAT)
        self.process_info_logger.info("{} >> {}".format(monitor_time, len(query_rows)))
        for query_row in query_rows:
            raw_sql = (str(query_row["INFO"]).replace("\r", " ").replace("\n", " ")
                       .replace("\t", " ").replace("    ", " ").replace("    ", " ")
                       .replace("   ", " ").replace("  ", " "))[0:self.max_sql_size]
            query_row["INFO"] = raw_sql
            query_row["STATE"] = self.if_null(query_row["STATE"], "")[0:30]
            query_row["MONITOR_TIME"] = monitor_time
        return query_rows

    def _get_tran_list(self):
        sql_script = """
SELECT 
pl.ID AS process_id, 
pl.USER AS login_user,
pl.HOST AS login_host,
pl.DB AS database_name,
pl.COMMAND AS command_type,
pl.TIME AS process_time,
pl.STATE AS process_state,
pl.INFO AS process_info,
tx.trx_id,
tx.trx_state,
DATE_FORMAT(tx.trx_started,'%Y-%m-%d %H:%m:%s') AS trx_started,
DATE_FORMAT(tx.trx_wait_started,'%Y-%m-%d %H:%m:%s') AS trx_wait_started,
tx.trx_query,
tx.trx_tables_locked,
tx.trx_rows_locked,
tx.trx_isolation_level,
tx.trx_is_read_only,
tx.trx_autocommit_non_locking,
CONCAT('lock_mode: ',IFNULL(lc.lock_mode,'NULL'),
', lock_type: ',IFNULL(lc.lock_type,'NULL'),
', lock_table: ',IFNULL(lc.lock_table,'NULL'),
', lock_index: ',IFNULL(lc.lock_index,'NULL'),
', lock_space: ',IFNULL(lc.lock_space,'NULL'),
', lock_page: ',IFNULL(lock_page,'NULL'),
', lock_rec: ',IFNULL(lc.lock_rec,'NULL'),
', lock_data: ',IFNULL(lc.lock_data,'NULL')) AS trx_wait_lock_info,
lw.blocking_trx_id
FROM information_schema.INNODB_TRX AS tx 
INNER JOIN information_schema.PROCESSLIST AS pl
ON tx.trx_mysql_thread_id = pl.ID
LEFT JOIN information_schema.INNODB_LOCK_WAITS AS lw 
ON tx.trx_id = lw.requesting_trx_id
LEFT JOIN information_schema.INNODB_LOCKS lc
ON lw.requested_lock_id = lc.lock_id
AND lw.`requesting_trx_id`=lc.`lock_trx_id`
AND pl.ID<>CONNECTION_ID();
"""
        query_rows = self.mysql_client.mysql_query(sql_script=sql_script, return_dict=True)
        return query_rows

    @classmethod
    def if_null(cls, object1, object2):
        if object1 is None:
            return object2
        return object1

    def _write_process_list(self, process_list):
        if len(process_list) < self.min_process_count:
            return
        log_temp = "| {:<30} | {:<30} | {:<30} | {:<30} | {:<15} | {:<30} | \r\n {} |"
        log_keys = ["MONITOR_TIME", "USER", "HOST", "DB", "TIME", "STATE", "INFO"]
        if "TIME_MS" in process_list[0].keys():
            log_temp = "| {:<30} | {:<30} | {:<30} | {:<30} | {:<15} | {:<30} | {:<15} | {:<15} | {:<15} | \r\n {} |"
            log_keys = ["MONITOR_TIME", "USER", "HOST", "DB", "TIME", "STATE", "TIME_MS", "ROWS_SENT",
                        "ROWS_EXAMINED", "INFO"]
        log_header = log_temp.format(*log_keys)
        for process_item in process_list:
            if self.write_process_count % 10 == 0:
                self.process_list_logger.info(log_header)
            log_values = [self.if_null(process_item[key_name], "") for key_name in log_keys]
            self.process_list_logger.info(log_temp.format(*log_values))
            self.write_process_count += 1
        pass

    def check_server(self):
        if not self.mysql_client.test_connection():
            logger.info("目标数据库无法正常连接")
            sys.exit(-1)
        pass

    def _write_innodb_status(self):
        try:
            sql_script = """ SHOW ENGINE INNODB STATUS; """
            query_rows = self.mysql_client.mysql_query(sql_script=sql_script, return_dict=True)
            self.innodb_status_logger.info(query_rows[0]["Status"])
        except Exception as ex:
            logger.warning(
                "执行_write_innodb_status时出错,\n错误异常为:{},\n堆栈信息为:{}".format(str(ex), traceback.format_exc())
            )

    def _write_process_stack(self):
        try:
            if self.mysql_process_id < 2:
                return
            base_dir = os.path.dirname(__file__)
            log_dir = os.path.join(base_dir, "logs")
            file_name = "ps_{}_{}_{}.log".format(
                self.mysql_host,
                self.mysql_port,
                datetime.datetime.now().strftime("%m%d%H%M%S%f")
            )
            log_file = os.path.join(log_dir, file_name)
            cmd = "/usr/bin/pstack {} >> '{}'".format(self.mysql_process_id, log_file)
            LocalLinuxHelper.exec_cmd(cmd=cmd)
        except Exception as ex:
            logger.warning(
                "执行_write_process_stack时出错,\n错误异常为:{},\n堆栈信息为:{}".format(str(ex), traceback.format_exc())
            )

    def start_monitor(self):
        logger.info("开始监控")
        self.check_server()
        while self.check_count > 0:
            process_list = self._get_process_list()
            self._write_process_list(process_list=process_list)
            # 如果并发太高, 则记录下InnoDB Engine Status信息,方便排查
            if len(process_list) > self.max_process_count:
                self._write_process_stack()
                self._write_innodb_status()
            time.sleep(self.check_interval)
            self.check_count -= 1
        logger.info("完成监控")


def init_logger():
    LoggerHelper.init_logger(logger_level=logging.INFO)
    global logger
    logger = LoggerHelper.get_logger()


def main(mysql_host, mysql_port,
         mysql_user, mysql_pass,
         check_interval=1,
         check_count=10000,
         max_sql_size=2000,
         min_process_count=5,
         max_process_count=100,
         mysql_process_id=0
         ):
    """
    监控MySQL活跃线程并将超过指定数量的活跃线程记录到日志中，方便DBA排查问题。
    :param mysql_host: MySQL实例地址
    :param mysql_port: MySQL实例端口
    :param mysql_user: MySQL实例账号
    :param mysql_pass: MySQL实例密码
    :param check_interval: 检查间隔
    :param check_count: 检查次数
    :param max_sql_size: 单条SQL最大长度，避免SQL过长影响浏览
    :param min_process_count: 最小活跃线程数量，超过该数量的活跃线程才会被写入到日志
    :param max_process_count: 最大活跃线程数量，超过该数量的活跃线程会采集innodb engine status的信息
    :param mysql_process_id: 要采集的MySQL线程ID, 如果指定该ID，当活跃线程超过最大活跃线程数量后会使用pstack抓取进程信息
    :return:
    """
    pm = MySQLProcessMonitor(
        mysql_host=mysql_host,
        mysql_port=mysql_port,
        mysql_user=mysql_user,
        mysql_pass=mysql_pass,
        check_interval=check_interval,
        check_count=check_count,
        min_process_count=min_process_count,
        max_process_count=max_process_count,
        max_sql_size=max_sql_size,
        mysql_process_id=mysql_process_id
    )
    pm.start_monitor()


if __name__ == '__main__':
    init_logger()
    fire.Fire(main)
