import os
from typing import Dict, Any, List, Union, Tuple

import pandas as pd
import psycopg2
import pymysql
from _decimal import Decimal
from dbutils.pooled_db import PooledDB
from loguru import logger

from config import DATABASE, ROOT_DIR, DATA_BUFFER_SIZE
from utils.data.loader import BaseLoader, LoaderError


class MysqlLoader(BaseLoader):
    period_table_map = {
        60: 'kline_minute',
        3600: 'kline_hour',
        86400: 'kline_day',
    }
    db_conf: Dict[str, Any] = DATABASE.get('mysql')
    pool = PooledDB(pymysql, maxconnections=2, host=db_conf.get('host'), port=db_conf.get('port'),
                    user=db_conf.get('username'), passwd=db_conf.get('password'),
                    db=db_conf.get('db', db_conf.get('database')))

    del db_conf

    def __init__(self, start: int, end: int, period: int = 3600, symbols: List[str] = None,
                 with_decimal: bool = False, batch_size: int = 1):
        """
        :param start: 开始时间戳
        :param end: 结束时间戳
        """
        super().__init__(start, end, period, with_decimal, batch_size=batch_size)
        self.table = self.period_table_map[period]
        self.buffer: int = 0
        self.buffer_data: pd.DataFrame = pd.DataFrame()
        self.condition: str = '' if symbols is None or len(symbols) == 0 else ' and symbol in (%s)' % ','.join(
            [f'\'{x}\'' for x in symbols])

    @classmethod
    def _execute(cls, sql, params: Union[List[tuple], tuple] = None, result: bool = True) \
            -> Tuple[Tuple[Tuple[Any, ...], ...], List[str]]:
        """
        sql执行
        :param sql:
        :param params:
        :return:
        """
        conn: pymysql.Connection = cls.pool.connection()
        try:
            with conn.cursor() as cursor:
                if isinstance(params, list):
                    cursor.executemany(sql, params)
                else:
                    cursor.execute(sql, params)
                if result:
                    resp = cursor.fetchall()
                    return resp, ([x[0] for x in cursor.description] if cursor.description else [])

        except Exception as e:
            conn.rollback()
            raise LoaderError(f'数据库执行失败：{e.__str__()}')
        finally:
            conn.commit()
            conn.close()

    @classmethod
    def _init_db(cls):
        path = os.path.join(ROOT_DIR, 'utils/data/initMysql.sql')
        with open(path, mode='r', encoding='utf-8') as f:
            query = f.read()
        conn: pymysql.Connection = cls.pool.connection()
        queries: List[str] = query.split(';')
        try:
            with conn.cursor() as cursor:
                for item in [x for x in queries if x]:
                    cursor.execute(item)
                logger.info('mysql初始化完成')
        except Exception as e:
            conn.rollback()
            raise LoaderError(f'初始化数据库失败：{e.__str__()}')
        finally:
            conn.commit()
            conn.close()

    def check(self):
        """
        检查数据库
        :return:
        """
        data, _ = self._execute("""show tables;""")
        if self.table not in [x[0] for x in data]:
            raise LoaderError(f'数据表不存在')
        data, _ = self._execute(f"""show columns from {self.table};""")
        if self.date_column not in [x[0] for x in data]:
            raise LoaderError(f'指定的时间列不存在：{self.date_column}')
        for item in data:
            if item[0] == self.date_column and not item[3]:
                raise LoaderError(f'指定的时间列不存在索引，请添加索引')
        logger.info('数据库检查完成')

    def store(self, data: List[Tuple[Any]]):
        self._execute(f"""insert into {self.table} 
        (symbol, open_time, open, high, low, close, volume, close_time, quote_asset_volume,
        number_of_trades, taker_buy_base_asset_volume, taker_buy_quote_asset_volume, `ignore`,
        trade_type) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,%s,%s,%s)""", data)

    def read(self) -> pd.DataFrame:
        """
        逐行从缓存读取数据
        :return:
        """
        # 更新缓存
        if self.buffer < self.cursor:
            sql = f"""select * from {self.table} where {self.date_column} >= %s and 
                {self.date_column} < %s"""
            temp, col = self._execute(sql + self.condition,
                                      (self.cursor * 1000, self.cursor * 1000 +
                                       DATA_BUFFER_SIZE * self.period * self.batch_size * 1000))
            funding_rate = self.funding_rate(self.cursor,
                                             self.cursor +
                                             DATA_BUFFER_SIZE * self.period * self.batch_size * 1000)
            df = pd.DataFrame(temp, columns=col)
            df.drop_duplicates(subset=['symbol', 'open_time', 'trade_type'], keep='last', inplace=True)
            df = df.loc[:, ['symbol', 'open_time', 'open', 'high', 'low', 'close', 'volume', 'close_time',
                            'quote_asset_volume', 'number_of_trades', 'taker_buy_base_asset_volume',
                            'taker_buy_quote_asset_volume', 'trade_type']]
            # 合并K线
            if self.batch_size != 1:
                df['_time_id'] = df['open_time'] // (self.period * self.batch_size * 1000)
                df: pd.DataFrame = df.groupby(['symbol', '_time_id', 'trade_type'], as_index=False).agg({
                    'open_time': 'first', 'open': 'first', 'high': 'max', 'low': 'min', 'close': 'last',
                    'volume': 'sum', 'close_time': 'last', 'quote_asset_volume': 'sum', 'number_of_trades': 'sum',
                    'taker_buy_base_asset_volume': 'sum',
                    'taker_buy_quote_asset_volume': 'sum',
                })
                df.drop(columns=['_time_id'], inplace=True)
            self.buffer_data = pd.merge(df, funding_rate, how='left')
            # 转换类型
            if not self.with_decimal:
                for col in self.buffer_data.columns:
                    if isinstance(self.buffer_data[col][0], Decimal):
                        self.buffer_data[col] = self.buffer_data[col].astype(float)
            self.buffer = self.cursor + (DATA_BUFFER_SIZE - 1) * self.period * self.batch_size

        data = self.buffer_data[self.buffer_data[self.date_column] == self.cursor * 1000].copy()
        if data.empty:
            raise LoaderError(f'buffer数据错误')
        self.cursor += self.period * self.batch_size
        return data

    def funding_rate(self, start: int, end: int) -> pd.DataFrame:
        sql = f"""select symbol, funding_time as open_time, funding_rate
         from funding_rate where funding_rate.funding_time >= %s and funding_time < %s"""
        data, col = self._execute(sql + self.condition, (start * 1000, end * 1000))
        df = pd.DataFrame(data, columns=col)
        df.drop_duplicates(subset=['symbol', 'open_time'], keep='last', inplace=True)
        df['_time_id'] = df['open_time'] // (self.period * self.batch_size * 1000)
        df = df.groupby(['symbol', '_time_id'], as_index=False).agg({'open_time': 'first', 'funding_rate': 'sum'})
        return df

    def fetch(self, start: int = None, count: int = 1, end: int = None) -> pd.DataFrame:
        """
        一次性读取数据
        :param start:
        :param count:
        :param end:
        :return:
        """
        if start is None:
            start = 0
        if end is None:
            end = start + count * self.period * self.batch_size
        if end <= start:
            raise LoaderError(f'时间范围错误')
        sql = f"""select * from {self.table} where {self.date_column} >= %s and {self.date_column} < %s"""
        data, col = self._execute(
            sql + self.condition,
            (start * 1000, end * 1000)
        )
        funding_rate = self.funding_rate(self.start, end)
        df = pd.DataFrame(data, columns=list(col))
        df.drop_duplicates(subset=['symbol', 'open_time', 'trade_type'], keep='last', inplace=True)
        df = df.loc[:, ['symbol', 'open_time', 'open', 'high', 'low', 'close', 'volume', 'close_time',
                        'quote_asset_volume', 'number_of_trades', 'taker_buy_base_asset_volume',
                        'taker_buy_quote_asset_volume', 'trade_type']]
        # 合并K线
        if self.batch_size != 1:
            df['_time_id'] = df['open_time'] // (self.period * self.batch_size * 1000)
            df: pd.DataFrame = df.groupby(['symbol', '_time_id', 'trade_type'], as_index=False).agg({
                'open_time': 'first', 'open': 'first', 'high': 'max', 'low': 'min', 'close': 'last',
                'volume': 'sum', 'close_time': 'last', 'quote_asset_volume': 'sum', 'number_of_trades': 'sum',
                'taker_buy_base_asset_volume': 'sum',
                'taker_buy_quote_asset_volume': 'sum',
            })
            df.drop(columns=['_time_id'], inplace=True)
        df = pd.merge(df, funding_rate, how='left')
        if not self.with_decimal:
            for col in df.columns:
                if isinstance(df[col][0], Decimal):
                    df[col] = df[col].astype(float)
        return df

    def __del__(self):
        self.pool.close()


class PgLoader(MysqlLoader):
    """

    """
    period_table_map = {
        60: 'kline_minute',
        3600: 'kline_hour',
        86400: 'kline_day',
    }
    db_conf: Dict[str, Any] = DATABASE.get('postgresql')
    pool = PooledDB(psycopg2, maxconnections=2, host=db_conf.get('host'), port=db_conf.get('port'),
                    user=db_conf.get('username'), password=db_conf.get('password'),
                    database=db_conf.get('db', db_conf.get('database')))

    del db_conf

    def store(self, data: List[Tuple[Any]]):
        self._execute(f"""insert into {self.table} 
        (symbol, open_time, open, high, low, close, volume, close_time, quote_asset_volume,
        number_of_trades, taker_buy_base_asset_volume, taker_buy_quote_asset_volume, ignore,
        trade_type) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,%s,%s,%s)""", data, result=False)


loader_mapper: Dict[str, type] = {
    'mysql': MysqlLoader,
    'postgresql': PgLoader
}
