#!/usr/bin/env python 
# -*- coding:utf-8 -*-
"""
Description:    收集innodb_cluster集群的信息：节点，端口，模式，状态，ID
                写入InfluxDB
"""

import time

import ruamel.yaml as yaml

from db.query import MySQLBase, InfluxBase
from dingTalk.dingTalkMessage import jdjc
from dingTalk.sendDingTalk import DingTalkAPI
from logger import logger


class GetData:
    def __init__(self):
        """class init"""
        self.mysql = MySQLBase()
        self.dta = DingTalkAPI()

    def get_from_mysql(self, db):
        """
        Get information from mysql innodb cluster
        :return: mysql data
        """
        logger.info("通过{}获取集群信息".format(db['host']))
        sql_str = """
            SELECT
              @@hostname as server,
              rgm.member_id, 
              rgm.member_host,
              rgm.member_port, 
              rgm.member_state, 
              IF( gs.variable_name= 'group_replication_primary_member', 'R/W', 'R/O') as  member_mode,
              IF( rgm.member_state = 'ONLINE', 1, 0) as member_status,
			  @@global.gtid_executed as ServerGTID,
			  (SELECT Received_transaction_set FROM performance_schema.replication_connection_status WHERE Channel_name = 'group_replication_applier') as GroupGTID,
			  rgms.COUNT_TRANSACTIONS_IN_QUEUE as ServerQueue
            FROM  
              performance_schema.replication_group_members  as  rgm  
              LEFT JOIN  performance_schema.global_status  as  gs  on  rgm.member_id= gs.variable_value
			  LEFT JOIN  performance_schema.replication_group_member_stats as rgms on rgm.member_id=rgms.member_id
            """
        res = self.mysql.query(db, sql_str)
        if res == 0:
            logger.warning("发送钉钉信息：MySQL查询失败")
            nowtime = time.strftime("%Y-%m-%d %H:%M:%S")
            self.dta.dingtalk(jdjc.format("{} 查询失败。".format(db['host']), nowtime))
            logger.info("钉钉发送成功")
            # exit(1)
        else:
            return res


class DataAnalysis:

    def __init__(self):
        """class init"""
        self.dta = DingTalkAPI()
        ys = yaml.load(open('./conf/dbinfo.yaml', 'r').read(), Loader=yaml.Loader)
        self.influx = InfluxBase(ys['DBDIC_INFO']['influxdb'])

    def set_influxdb(self, points):
        """
        Push data to influxdb
        :param points: Json
        """
        logger.info("将数据写入influx")
        res = self.influx.insert(points)
        if res == 1:
            logger.info("influx数据写入成功")
        else:
            logger.info("发送钉钉信息：influx写入失败")
            nowtime = time.strftime("%Y-%m-%d %H:%M:%S")
            self.dta.dingtalk(jdjc.format("influx写入失败。", nowtime))
            logger.info("钉钉发送成功")
            # exit(1)

    @staticmethod
    def composite_data_for_influx(data):
        """
        # create database and retention policy for your data
        CREATE DATABASE "mysql_innodb_cluster"
        CREATE RETENTION POLICY "mysql_info_schema" ON "innodb_cluster" DURATION 730d REPLICATION 1 DEFAULT

        :param data: the data from mysql
        """
        logger.info("开始组合influx数据")
        metric = "mysql_innodb_cluster"
        nowtime = int(time.time())
        series = []
        for row in data:
            pointValues = {
                "time": nowtime,
                "measurement": metric,
                'tags': {
                    'server': row[0],
                    'member_uuid': row[1]
                },
                'fields': {
                    'server': row[0],
                    'member_host': row[2],
                    'member_state': row[4],
                    'member_mode': row[5],
                    'member_port': row[3],
                    'member_status': row[6],
                    'server_gtid': row[7],
                    'group_gtid': row[8],
                    'server_queue': row[9]
                }
            }
            series.append(pointValues)
        logger.info("数据组装完成")
        return series
