from collections import defaultdict
from abc import ABC, abstractmethod

import pymysql
import psycopg2
import pandas as pd
from sqlalchemy import create_engine
from pyhive import presto,hive
from math import ceil
import time
from src.utils.config import logger, config

# log输出最大的sql语句长度
MAX_SQL_LOG_LENGTH = 20000


class DatabaseProcessor(ABC):
    def __init__(self, db_config, config_section='db', charset='utf8'):
        self.config = db_config
        self.config_section = config_section
        self.charset = charset
        self.log_db_name = None
        self._config_db()

    @abstractmethod
    def _config_db(self):
        """
        配置数据库host，user等
        :return:
        """

    @abstractmethod
    def _get_db_connect(self):
        """
        数据库链接
        :return: 数据库链接
        """

    @abstractmethod
    def _get_db_engine(self):
        """
        数据库引擎
        :return: 数据库引擎
        """

    def execute_sql(self, sql):
        conn = None
        cur = None

        try:
            conn = self._get_db_connect()
            cur = conn.cursor()

            logger.info("executing {} sql@{}".format(self.log_db_name, self._truncate_sql_log(sql)))

            cur.execute(sql)
            conn.commit()
            result = cur.rowcount
            logger.info('effect rowcount={}'.format(result))
        except pymysql.Error as error:
            logger.critical("execute error {} sql@{} error@{}".format(self.log_db_name, sql, error))
        finally:
            if cur is not None:
                cur.close()
            if conn is not None:
                conn.close()

    def execute_read_sql(self, sql, keys):
        conn = None
        cur = None
        results = defaultdict(list)

        try:
            conn = self._get_db_connect()
            cur = conn.cursor()

            logger.info("executing {} sql@{}".format(self.log_db_name, self._truncate_sql_log(sql)))

            cur.execute(sql)
            db_res = cur.fetchall()
            for record in db_res:
                for n, key in enumerate(keys):
                    results[key].append(record[n])
        except pymysql.Error as error:
            logger.critical("execute error {} sql@{} error@{}".format(self.log_db_name, sql, error))
            return None
        finally:
            if cur is not None:
                cur.close()
            if conn is not None:
                conn.close()

        return dict(results)

    def execute_insert_sql(self, sql, data,decide=True):
        conn = None
        cur = None

        try:
            conn = self._get_db_connect()
            cur = conn.cursor()
            batch_size = 10000
            logger.info("inserting {} sql@{}".format(self.log_db_name, self._truncate_sql_log(sql)))
            data_size = len(data)
            nums = ceil(data_size/batch_size)
            for i in range(nums):
                cur.executemany(sql, data[i*batch_size:(i+1)*batch_size])
                print(i)
            conn.commit()
            result = cur.rowcount
            if nums>0:
                result+=(nums-1)*batch_size
            logger.info("inserted {} rows into mysql,success rows={}".format(len(data),result))
            if  decide==True and data_size>0 and len(data)!=result:
                raise TypeError('插入与源数据量不一致：计算报错:')
        except pymysql.Error as error:
            logger.critical("insert error {} sql@{} error@{}".format(self.log_db_name, sql, error))
            raise TypeError('sql执行失败:') from error
            return False, str(error)
        finally:
            if cur is not None:
                cur.close()
            if conn is not None:
                conn.close()

        return True, 'OK'

    def execute_insert_postgresql(self, sql, data):
        conn = None
        cur = None

        try:
            conn = self._get_db_connect()
            cur = conn.cursor()

            logger.info("inserting {} sql@{}".format(self.log_db_name, self._truncate_sql_log(sql)))
            nums = ceil(len(data)/2500000)
            for i in range(nums):
                cur.executemany(sql, data[i*2500000:(i+1)*2500000])
                print(i)
            conn.commit()
            logger.info("inserted {} rows into gp".format(len(data)))
        except pymysql.Error as error:
            logger.critical("insert error {} sql@{} error@{}".format(self.log_db_name, sql, error))
            return False, str(error)
        finally:
            if cur is not None:
                cur.close()
            if conn is not None:
                conn.close()

        return True, 'OK'

    def load_sql(self, sql=None, parse_dates=None):
        conn = None

        try:
            logger.info("read data frame database@{} sql@{}".format(self.log_db_name, self._truncate_sql_log(sql)))
            t1=time.time()
            conn = self._get_db_connect()
            df = pd.read_sql(sql, con=conn, parse_dates=parse_dates)
            logger.info("read data frame database@{}  done shape={},cost time={} ".format(self.log_db_name,str(df.shape),round(time.time() - t1,0)))
            return df
        except Exception as error:
            try:
                error_code =error.args[0]['errorCode']
                if error_code>0:
                    time.sleep(10)
                    df = pd.read_sql(sql, con=conn, parse_dates=parse_dates, params={'timeout': 1000})
                    return df
            except Exception as error:
                logger.critical("read data frame error! sql@{} error@{}".format(sql, error))

            return None
        finally:
            if conn is not None:
                conn.close()

    def insert_data_frame(self, df: pd.DataFrame, table_name, if_exists='append', is_index=False):
        logger.info("inserting data into data frame to {} table_name@{}".format(self.log_db_name, table_name))
        try:
            engine = self._get_db_engine()
            df.to_sql(name=table_name, con=engine, if_exists=if_exists, index=is_index)
        except Exception as error:
            logger.critical('inserting data into data frame error@{}'.format(error))

    def load_data_table(self, table_name, columns=None):
        logger.info('load data into data frame from {} table_name@{}'.format(self.log_db_name, table_name))
        try:
            engine = self._get_db_engine()
            return pd.read_sql_table(table_name, con=engine, columns=columns)
        except Exception as error:
            logger.info("load data into data frame from {} error! table_name@{} error@{}".format(self.log_db_name,
                                                                                                 table_name, error))
            return None

    @staticmethod
    def _truncate_sql_log(sql):
        return '{}...'.format(sql if len(sql) <= MAX_SQL_LOG_LENGTH else sql[0:MAX_SQL_LOG_LENGTH])


class MySQLProcessor(DatabaseProcessor):
    def _config_db(self):
        self.host = self.config.get_config(self.config_section, 'db_host')
        self.port = int(self.config.get_config(self.config_section, 'db_port', 3306))
        self.db_name = self.config.get_config(self.config_section, 'db_database_name')
        self.user = self.config.get_config(self.config_section, 'db_user')
        self.password = self.config.get_config(self.config_section, 'db_password')
        self.log_db_name = 'mysql'

    def _get_db_connect(self):
        conn = pymysql.connect(host=self.host, port=self.port, user=self.user,
                               password=self.password, database=self.db_name, charset=self.charset)
        return conn

    def _get_db_engine(self):
        engine = create_engine('mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8'.format(self.user, self.password, self.host,
                                                                                    self.port, self.db_name))
        return engine


class PostgreSQLProcessor(DatabaseProcessor):
    def _config_db(self):
        self.host = self.config.get_config(self.config_section, 'db_host')
        self.port = int(self.config.get_config(self.config_section, 'db_port', 3432))
        self.db_name = self.config.get_config(self.config_section, 'db_database_name')
        self.user = self.config.get_config(self.config_section, 'db_user')
        self.password = self.config.get_config(self.config_section, 'db_password')
        self.log_db_name = 'postgre'

    def _get_db_connect(self):
        conn = psycopg2.connect(host=self.host, port=self.port, user=self.user,
                                password=self.password, database=self.db_name)
        return conn

    def _get_db_engine(self):
        engine = create_engine('postgresql+psycopg2://{}:{}@{}:{}/{}'.format(self.user, self.password, self.host,
                                                                             self.port, self.db_name))
        return engine

class PrestoProcessor(DatabaseProcessor):
    def _config_db(self):
        self.host = self.config.get_config(self.config_section, 'db_host')
        self.port = int(self.config.get_config(self.config_section, 'db_port', 3306))
        self.db_name = self.config.get_config(self.config_section, 'db_database_name')
        self.user = self.config.get_config(self.config_section, 'db_user')
        self.password = self.config.get_config(self.config_section, 'db_password')
        self.log_db_name = 'presto'
        self.session_params = {
            'password': self.password
        }

    def _get_db_connect(self):
        conn = presto.connect(host=self.host, port=self.port, username=self.user,requests_kwargs={'timeout':1000},session_props=self.session_params)
        return conn

    def _get_db_engine(self):
        engine = create_engine('presto://{}@{}:{}/hive/default?charset=utf8'.format(self.user,  self.host,
                                                                                    self.port))
        return engine

class HiveProcessor(DatabaseProcessor):
    def _config_db(self):
        self.host = self.config.get_config(self.config_section, 'db_host')
        self.port = int(self.config.get_config(self.config_section, 'db_port', 3306))
        self.db_name = self.config.get_config(self.config_section, 'db_database_name')
        self.user = self.config.get_config(self.config_section, 'db_user')
        self.password = self.config.get_config(self.config_section, 'db_password')
        self.log_db_name = 'presto'
        self.session_params = {
            'password': self.password
        }

    def _get_db_connect(self):
        conn = hive.connect(host=self.host, port=self.port, username=self.user,configuration=self.session_params)
        return conn

    def _get_db_engine(self):
        engine = create_engine('presto://{}@{}:{}/hive/default?charset=utf8'.format(self.user,  self.host,
                                                                                    self.port))
        return engine

mysql_processor = MySQLProcessor(config, 'db_price_prediction')
mysql_prediction_processor = MySQLProcessor(config, 'db_price_prediction')
mysql_prediction_processorfat = MySQLProcessor(config, 'price_predictionfat')
mysql_price_model = MySQLProcessor(config, 'db_price_prediction')

postgre_processor = PostgreSQLProcessor(config, 'presto')
presto_processor = PrestoProcessor(config, 'presto')
hive_processor = HiveProcessor(config, 'hive')
