"""
同步sql 连接池
由于MySQL具有最大socket连接数  因此使用连接池节省资源
"""
import pymysql
from concurrent.futures import ThreadPoolExecutor
from datetime import datetime
import time
import logging
import threading
import pandas as pd
import math


class QuerySqlByThreadPool:
    db_info = {'host': 'db', 'port': 3306, 'user': "root", 'password': 'root', 'database': 'emos',
               'charset': 'utf8'}
    step = 100000
    pool = {}
    data = []

    @property
    def set_info(self):
        return self.db_info

    @set_info.setter
    def set_info(self, db_info: dict):
        self.db_info.update(db_info)

    def get_instance(self):
        name = threading.current_thread().name
        if name not in self.pool:
            conn = pymysql.connect(**self.db_info)
            self.pool[name] = conn
        return self.pool[name]

    def get_cursor(self):
        return self.get_instance().cursor(cursor=pymysql.cursors.SSDictCursor)

    def get_count(self, nename=None) -> int:
        cursor = self.get_cursor()
        sql = 'select max(id) as counts from workflow_hugealarm;'
        if nename:
            sql = sql + 'where nename = "{}";'.format(nename)
        cursor.execute(sql)
        res = cursor.fetchone()
        return int(res['counts'])

    @staticmethod
    def datetime2timestamp(dt: datetime) -> int:
        dt = time.strptime(str(dt), "%Y-%m-%d %H:%M:%S")
        return int(time.mktime(dt))

    def _query(self, n, **kwargs):
        starttime = self.datetime2timestamp(kwargs.get('starttime'))
        endtime = self.datetime2timestamp(kwargs.get('endtime'))
        start = (n - 1) * self.step
        end = self.step
        cursor = self.get_cursor()
        query_type = kwargs.get('query_type')
        nename = kwargs.get('nename')
        if query_type == "alarm":
            sql = f'select * from workflow_hugealarm where eventtime > {starttime} and eventtime < {endtime} limit {start}, {end};'
            if nename:
                sql = f'select * from workflow_hugealarm where nename = {nename} and eventtime > {starttime} and eventtime < {endtime} limit {start}, {end};'
        elif query_type == 'sgw':
            sql = "select NE_NAME,DATETIME_KEY,HTTP_P5_SESSION_RESPONSE_SUCC_RATE,HTTP_P5_SESSION_LATENCYD,HTTP_P5_DOWN_RATE,HTTP_P1_P4_DOWN_FLOW from SGW_ALL where %s<DATETIME_KEY and DATETIME_KEY<%s" % (
                starttime, endtime)
            if nename:
                sql = "select NE_NAME,DATETIME_KEY,HTTP_P5_SESSION_RESPONSE_SUCC_RATE,HTTP_P5_SESSION_LATENCYD,HTTP_P5_DOWN_RATE,HTTP_P1_P4_DOWN_FLOW from SWG_ALL where  NE_NAME='%s' and %s<DATETIME_KEY and DATETIME_KEY<%s" % (
                    nename, starttime, endtime)
        elif query_type == 'mme':
            sql = "select NE_NAME,DATETIME_KEY,TAU_SUC_RATE,ATTACH_SUCC_RATE,SERVICE_REQ_SUCC_RATE,HO_INTER_ENB_X2_SUCC_RATE,PAGING_SUCC_RATE from MME where %s<DATETIME_KEY and DATETIME_KEY<%s" % (
                starttime, endtime)
            if nename:
                sql = "select NE_NAME,DATETIME_KEY,TAU_SUC_RATE,ATTACH_SUCC_RATE,SERVICE_REQ_SUCC_RATE,HO_INTER_ENB_X2_SUCC_RATE,PAGING_SUCC_RATE from MME where NE_NAME='%s' and  %s<DATETIME_KEY and DATETIME_KEY<%s" % (
                    nename, starttime, endtime)
        elif query_type == 'new':
            table_name = 'mme_save'
            sql = "select * from %s where %s<DATETIME_KEY and DATETIME_KEY<%s" % (
                table_name, starttime, endtime)
            if nename:
                sql = "select * from %s where %s<DATETIME_KEY and DATETIME_KEY<%s and NE_NAME='%s'" % (
                    table_name, starttime, endtime, nename)
        else:
            raise SqlException(query_type)

        cursor.execute(sql)
        while True:
            next_data = cursor.fetchone()
            if not next_data:
                break
            self.data.append(next_data)
            print(next_data)
        cursor.close()

    def thread_pool(self, **kwargs):
        pages = self.get_count() / self.step
        with ThreadPoolExecutor(100) as execute:
            for step in range(1, math.ceil(pages) + 1):
                try:
                    execute.submit(self._query, step, **kwargs)
                except Exception as e:
                    logging.error(e)
                    continue
        return pd.DataFrame(self.data)
