import os
import sys
import time
import datetime
import warnings

import pandas as pd

sys.path.append('..')
warnings.filterwarnings('ignore')

from tools.setting import DATA_DIR
from tools.time_tool import get_dates_to_update
from API.coingecko import CoinGeckoAPI
from tools.logger import logger

AMOUNT_DATA_DIR = os.path.join(DATA_DIR, 'amount_data_v2')
os.makedirs(AMOUNT_DATA_DIR, exist_ok=True)


def reload_cex_spot_amount():
    cg = CoinGeckoAPI()
    days = 365
    spot_cex = ['binance', 'gdax', 'okex', 'huobi', 'gate', 'kucoin', 'crypto_com', 'bitfinex', 'bybit_spot']
    results = []
    for exchange_id in spot_cex:
        logger.info(f'开始获取{exchange_id}现货交易额数据')
        amount = cg.get_exchanges_volume_chart_by_id_v2(id=exchange_id, days=days)
        amount.name = exchange_id
        results.append(amount)
        logger.info(f'{exchange_id}现货交易额数据获取成功')
    df = pd.concat(results, axis=1)
    df.index = df.index.astype(str)
    check = pd.Series([i[11:16] for i in df.index]).value_counts()
    if len(df) >= 365 and len(check) == 1 and check.index[0] == '00:00':
        df.index = [(datetime.datetime.strptime(i[:10], '%Y-%m-%d') - datetime.timedelta(days=1)).strftime('%Y-%m-%d') for i in df.index]
    df.to_csv(os.path.join(AMOUNT_DATA_DIR, 'cex_spot_amount_coingecko_reload.csv'))

def reload_cex_deri_amount():
    cg = CoinGeckoAPI()
    days = 365
    deri_cex = ['binance', 'okex', 'bybit', 'kucoin', 'crypto_com', 'huobi', 'gate', 'bitfinex', 'kraken', 'bitmex']
    deri_cex_dict = {'binance': 466, 'okex': 379, 'bybit': 460, 'ftx': 443, 'kucoin': 471, 'bitmex': 378,
                                'crypto_com': 669, 'huobi': 433, 'gate': 403, 'bitfinex': 486, 'kraken': 426}
    results = []
    for exchange_id in deri_cex:
        logger.info(f'开始获取{exchange_id}现货交易额数据')
        amount = cg.get_exchanges_volume_chart_by_id_v2(id=deri_cex_dict[exchange_id], days=days)
        amount.name = exchange_id
        results.append(amount)
        logger.info(f'{exchange_id}现货交易额数据获取成功')
    df = pd.concat(results, axis=1)
    df.index = df.index.astype(str)
    check = pd.Series([i[11:16] for i in df.index]).value_counts()
    if len(df) >= 365 and len(check) == 1 and check.index[0] == '00:00':
        df.index = [(datetime.datetime.strptime(i[:10], '%Y-%m-%d') - datetime.timedelta(days=1)).strftime('%Y-%m-%d') for i in df.index]
    df.to_csv(os.path.join(AMOUNT_DATA_DIR, 'cex_deri_amount_coingecko_reload.csv'))

def reload_dex_amount():
    cg = CoinGeckoAPI()
    days = 365
    dex = ['uniswap-bsc', 'uniswap_v2', 'uniswap_v3', 'uniswap_v3_arbitrum', 'uniswap-v3-avalanche',
         'uniswap_v3_optimism', 'uniswap_v3_polygon_pos',
         'curve_arbitrum', 'curve_ethereum', 'curve_optimism',
         'apex_pro', 'maverick_protocol', 'ferro-protocol', 'openocean_finance', 'jupiter', 'orca', 'thorwallet',
         'thorswap', 'woofi', 'pulsex', 'syncswap',
         'balancer-v1', 'balancer-v2', 'balancer_arbitrum',
         'sushiswap', 'sun_io',
         'pancakeswap_aptos', 'pancakeswap_ethereum', 'pancakeswap_new', 'pancakeswap_stableswap', 'pancakeswap-v3-bsc',
         'pancakeswap-v3-ethereum',
         'dodo', 'dodo_arbitrum', 'dodo_bsc', 'dodo_polygon',
         'traderjoe', 'traderjoe-v2-1-arbitrum', 'traderjoe-v2-1-avalanche', 'traderjoe-v2-1-bsc',
         'traderjoe-v2-arbitrum', 'traderjoe-v2-avalanche', 'traderjoe-v2-bsc',
         'quickswap', 'quickswap-polygon-zkevm', 'quickswap_v3',
         ]
    results = []
    for exchange_id in dex:
        logger.info(f'开始获取{exchange_id}现货交易额数据')
        amount = cg.get_exchanges_volume_chart_by_id_v2(id=exchange_id, days=days)
        amount.name = exchange_id
        results.append(amount)
        logger.info(f'{exchange_id}现货交易额数据获取成功')
    df = pd.concat(results, axis=1)
    df.index = df.index.astype(str)
    check = pd.Series([i[11:16] for i in df.index]).value_counts()
    if len(df) >= 365 and len(check) == 1 and check.index[0] == '00:00':
        df.index = [(datetime.datetime.strptime(i[:10], '%Y-%m-%d') - datetime.timedelta(days=1)).strftime('%Y-%m-%d')
                    for i in df.index]
    df.to_csv(os.path.join(AMOUNT_DATA_DIR, 'dex_spot_amount_coingecko_reload.csv'))


def update_exchange_amount_v2(exchange_list, data_file, amount_type='spot',exchange_id_dict={}):
    dt = datetime.datetime.now()
    cg = CoinGeckoAPI()
    results = []
    if len(exchange_id_dict) == 0:
        exchange_id_dict = {i: i for i in exchange_list}
    if os.path.exists(data_file):
        df = pd.read_csv(data_file, index_col='date')
        last_update = datetime.datetime.strptime(str(df.index[-1])[:10], '%Y-%m-%d')
        days_to_update = get_dates_to_update(last_update.strftime('%Y-%m-%d'), dt.strftime('%Y-%m-%d'))
        if len(days_to_update) == 0:
            logger.info('数据已更新')
            return
        elif len(days_to_update) == 1:
            for exchange_id in exchange_list:
                logger.info(f'开始获取{exchange_id}_{amount_type}交易额数据')
                res = cg.get_exchanges_volume_chart_by_id_v2(id=exchange_id_dict[exchange_id], days=1)
                if res.empty:
                    logger.info(f'{exchange_id}_{amount_type}交易额数据获取失败,数据为空')
                    continue
                if exchange_id == 'gdax':
                    exchange_id = 'coinbase'
                res.name = exchange_id
                results.append(res)
                logger.info(f'{exchange_id}_{amount_type}交易额数据获取成功')
            update = pd.concat(results, axis=1)
            update['time'] = [i[11:16] for i in update.index.astype(str)]
            update['date_flag'] = [i - datetime.timedelta(seconds=1) for i in update.index]
            update['date_flag'] = [i[:10] for i in update['date_flag'].astype(str)]
            update = update[update['time'] == '00:00']
            if days_to_update[0] not in update['date_flag'].values:
                logger.info(f'所需更新日期{days_to_update[0]}的数据未更新')
                return
            else:
                update.drop_duplicates(subset='date_flag', keep='last')
                update.index = update['date_flag']
                del update['time']
                del update['date_flag']
                df_all = pd.concat([df, update])
                df_all.sort_index(inplace=True)
                df_all.index.name = 'date'
                df_all.to_csv(data_file)
        elif len(days_to_update) <= 14:
            results = []
            for exchange_id in exchange_list:
                logger.info(f'开始获取{exchange_id}_{amount_type}交易额数据')
                res = cg.get_exchanges_volume_chart_by_id_v2(id=exchange_id, days=14)
                if res.empty:
                    logger.info(f'{exchange_id}_{amount_type}交易额数据获取失败,数据为空')
                    continue
                flag = res.index.astype(str)[-1][14:16]
                while flag != '00':
                    if int(flag) < 50:
                        time_to_wait = 60 - datetime.datetime.now().minute
                        logger.info(f'获取数据节点超时，等待{time_to_wait}分钟后继续获取')
                        time.sleep(60 * time_to_wait)
                    else:
                        time.sleep(60)
                    print(exchange_id, flag)
                    res = cg.get_exchanges_volume_chart_by_id_v2(id=exchange_id, days=14)
                    flag = res.index.astype(str)[-1][14:16]
                if exchange_id == 'gdax':
                    exchange_id = 'coinbase'
                res.name = exchange_id
                results.append(res)
                logger.info(f'{exchange_id}_{amount_type}交易额数据获取成功')
            update = pd.concat(results, axis=1)
            update['time'] = [i[11:16] for i in update.index.astype(str)]
            update['date_flag'] = [i - datetime.timedelta(seconds=1) for i in update.index]
            update['date_flag'] = [i[:10] for i in update['date_flag'].astype(str)]
            update = update[update['time'] == '00:00']
            update.drop_duplicates(subset='date_flag', keep='last')
            update = update[update['date'].isin(days_to_update)]
            if set(days_to_update) == set(update['date_flag']):
                update.index = update['date_flag']
                del update['time']
                del update['date_flag']
                df_all = pd.concat([df, update])
                df_all.sort_index(inplace=True)
                df_all.index.name = 'date'
                df_all.to_csv(data_file)
            else:
                for date in days_to_update:
                    if date not in update['date'].values:
                        logger.info(f'所需更新日期{date}的数据未更新')
                return
        else:
            logger.info('数据过长时间未更新，请先调用reload函数拼接历史数据')
            return
    else:
        logger.info('未找到历史数据文件，请先调用reload函数')
        return


def update_exchange_amount_all_type():
    # 更新中心化交易所现货交易额
    logger.info('更新中心化交易所现货交易额')
    spot_cex = ['binance', 'gdax', 'okex', 'huobi', 'gate', 'kucoin', 'crypto_com', 'bitfinex', 'bybit_spot']
    cex_spot_file = os.path.join(AMOUNT_DATA_DIR, 'cex_spot_amount_coingecko.csv')
    update_exchange_amount_v2(spot_cex, cex_spot_file)
    # 更新中心化交易所衍生品交易额
    logger.info('更新中心化交易所衍生品交易额')
    deri_cex = ['binance', 'okex', 'bybit', 'kucoin', 'crypto_com', 'huobi', 'gate', 'bitfinex', 'kraken', 'bitmex']
    deri_cex_dict = {'binance': 466, 'okex': 379, 'bybit': 460, 'ftx': 443, 'kucoin': 471, 'bitmex': 378,
                     'crypto_com': 669, 'huobi': 433, 'gate': 403, 'bitfinex': 486, 'kraken': 426}
    cex_deri_file = os.path.join(AMOUNT_DATA_DIR, 'cex_deri_amount_coingecko.csv')
    update_exchange_amount_v2(deri_cex, cex_deri_file, amount_type='deri', exchange_id_dict=deri_cex_dict)
    # 更新去中心化交易所现货交易额
    logger.info('更新去中心化交易所现货交易额')
    dex = ['uniswap-bsc', 'uniswap_v2', 'uniswap_v3', 'uniswap_v3_arbitrum', 'uniswap-v3-avalanche',
         'uniswap_v3_optimism', 'uniswap_v3_polygon_pos',
         'curve_arbitrum', 'curve_ethereum', 'curve_optimism',
         'apex_pro', 'maverick_protocol', 'ferro-protocol', 'openocean_finance', 'jupiter', 'orca', 'thorwallet',
         'thorswap', 'woofi', 'pulsex', 'syncswap',
         'balancer-v1', 'balancer-v2', 'balancer_arbitrum',
         'sushiswap', 'sun_io',
         'pancakeswap_aptos', 'pancakeswap_ethereum', 'pancakeswap_new', 'pancakeswap_stableswap', 'pancakeswap-v3-bsc',
         'pancakeswap-v3-ethereum',
         'dodo', 'dodo_arbitrum', 'dodo_bsc', 'dodo_polygon',
         'traderjoe', 'traderjoe-v2-1-arbitrum', 'traderjoe-v2-1-avalanche', 'traderjoe-v2-1-bsc',
         'traderjoe-v2-arbitrum', 'traderjoe-v2-avalanche', 'traderjoe-v2-bsc',
         'quickswap', 'quickswap-polygon-zkevm', 'quickswap_v3',
         ]
    dex_file = os.path.join(AMOUNT_DATA_DIR, 'dex_spot_amount_coingecko.csv')
    update_exchange_amount_v2(dex, dex_file)


if __name__ == '__main__':
    # t1 = time.time()
    update_exchange_amount_all_type()
    # t2 = time.time()
    # logger.info(t2 - t1)
    # reload_cex_spot_amount()
    # reload_cex_deri_amount()
    # reload_dex_amount()



