import os
import sys
import time
import datetime
import warnings

import pandas as pd

sys.path.append('..')
warnings.filterwarnings('ignore')

from tools.setting import DATA_DIR
from tools.time_tool import str_to_timestamp
from API.coinmarketcap import get_total_marketcap_and_amount_via_http
from API.coingecko import CoinGeckoAPI
from API.binance import fetch_binance_kline
from tools.logger import logger
AMOUNT_DATA_DIR = os.path.join(DATA_DIR, 'amount_data')
os.makedirs(AMOUNT_DATA_DIR, exist_ok=True)

STABLECOINS_COUNTED = ['USDT', 'USDC']


def get_exchanges_detail():
    # 更新交易所信息
    logger.info('更新交易所详细信息')
    cg = CoinGeckoAPI()
    exchanges = cg.get_exchanges_list()
    exchange_ids = list(exchanges['id'])
    exchanges_detail_info = []
    while len(exchanges_detail_info) < len(exchange_ids):
        try:
            for i in range(len(exchanges_detail_info), len(exchange_ids)):
                id = exchange_ids[i]
                logger.info(id)
                res = cg.get_exchanges_by_id(id)
                exchanges_detail_info.append(res)
                time.sleep(3)
        except Exception as e:
            time.sleep(30)
    df = pd.DataFrame(exchanges_detail_info)
    df.to_csv(os.path.join(DATA_DIR, 'exchange_information\\exchanges_detail_information.csv'), index=False)


def update_total_marketcap_and_amount():
    # 更新全市场总市值和交易量
    logger.info('更新全市场总市值和交易量')
    file_name = os.path.join(AMOUNT_DATA_DIR, 'total_marketcap_and_amount.csv')
    if os.path.exists(file_name):
        historical_data = pd.read_csv(file_name, index_col='end_date')
        start_date = historical_data.index[-1]
    else:
        historical_data = pd.DataFrame()
        start_date = '2013-04-28'
    update = get_total_marketcap_and_amount_via_http(start_date=start_date)
    df = pd.concat([historical_data, update])
    df.drop_duplicates(inplace=True)
    df.sort_index(inplace=True)
    df.to_csv(file_name)


def update_binance_btc_to_stablecoin_amount(stablecoins=None):
    # 更新BTC兑稳定币交易额
    if stablecoins is None:
        stablecoins = STABLECOINS_COUNTED
    logger.info('更新BTC兑稳定币交易额')
    file_name = os.path.join(AMOUNT_DATA_DIR, f'binance_btc_to_stablecoin_amount.csv')
    end_date = datetime.datetime.now().strftime('%Y-%m-%d')
    if os.path.exists(file_name):
        historical_data = pd.read_csv(file_name, index_col='date')
        start_date = historical_data.index[-1]
    else:
        historical_data = pd.DataFrame()
        start_date = '2017-01-01'
    start = str_to_timestamp(start_date, tz_str='+0800')
    end = str_to_timestamp(end_date, tz_str='+0800')
    amount_list = {}
    for i in stablecoins:
        pair = 'BTC' + i
        res = fetch_binance_kline(pair, start, end, '1d')
        amount_list[pair] = res
        logger.info(f'从binance获取{pair}交易额成功')
    df = pd.DataFrame()
    for pair, data in amount_list.items():
        df[pair] = data['amount']
    df['amount'] = df.sum(axis=1)
    amount = pd.concat([historical_data, df], axis=0)
    amount.drop_duplicates(inplace=True)
    amount.to_csv(file_name)


def update_cex_spot_amount():
    # 更新中心化交易所现货交易额
    logger.info('更新中心化交易所现货交易额')
    cg = CoinGeckoAPI()
    spot_exchange_id_list = ['binance', 'gdax', 'okex', 'huobi', 'gate', 'kucoin', 'crypto_com', 'bitfinex', 'bybit_spot']
    exchange_volume_df_list = []
    file_name = os.path.join(AMOUNT_DATA_DIR, f'cex_spot_amount_coingecko.csv')
    if os.path.exists(file_name):
        days = 30
    else:
        days = 365
    for exchange_id in spot_exchange_id_list:
        logger.info(f'开始获取{exchange_id}现货交易额数据')
        exchange_volume = cg.get_exchanges_volume_chart_by_id(id=exchange_id, days=days)['amount']
        logger.info(f'{exchange_id}现货交易额数据获取成功')
        if exchange_id == 'gdax':
            exchange_name = 'coinbase'
        elif exchange_id == 'ftx_spot':
            exchange_name = 'ftx'
        else:
            exchange_name = exchange_id
        exchange_volume = exchange_volume[~exchange_volume.index.duplicated(keep="first")]
        exchange_volume.name = exchange_name
        exchange_volume_df_list.append(exchange_volume)
        time.sleep(1)
    cex_spot_amount = pd.concat(exchange_volume_df_list, axis=1)
    if os.path.exists(file_name):
        hist_spot_exchange_volume_df = pd.read_csv(file_name, index_col='date')
        cex_spot_amount = pd.concat([hist_spot_exchange_volume_df, cex_spot_amount], axis=0)
        cex_spot_amount.drop_duplicates(inplace=True)
    cex_spot_amount.reset_index(inplace=True)
    cex_spot_amount.drop_duplicates(subset='date', inplace=True)
    cex_spot_amount.to_csv(file_name, index=False)


def update_cex_deri_amount():
    # 更新中心化交易所衍生品交易额
    logger.info('更新中心化交易所衍生品交易额')
    cg = CoinGeckoAPI()
    deri_exchange_id_list = ['binance', 'okex', 'bybit', 'kucoin', 'crypto_com', 'huobi', 'gate', 'bitfinex', 'kraken',
                             'bitmex']
    deri_exchange_volume_df_list = []
    file_name = os.path.join(AMOUNT_DATA_DIR, f'cex_deri_amount_coingecko.csv')
    if os.path.exists(file_name):
        days = 30
    else:
        days = 365
    for deri_exchange_id in deri_exchange_id_list:
        logger.info(f'开始获取{deri_exchange_id}衍生品交易额数据')
        deri_exchange_data = cg.get_derivative_exchanges_volume_by_id(id=deri_exchange_id, days=days)
        deri_exchange_volume = deri_exchange_data['amount']
        logger.info(f'{deri_exchange_id}衍生品交易额数据获取成功')
        deri_exchange_volume = deri_exchange_volume[~deri_exchange_volume.index.duplicated(keep="first")]
        deri_exchange_volume[deri_exchange_volume > 100 * deri_exchange_volume.median()] = None  # 剔除掉异常值
        deri_exchange_volume.name = deri_exchange_id
        deri_exchange_volume_df_list.append(deri_exchange_volume)
        time.sleep(1)

    cex_deri_amount = pd.concat(deri_exchange_volume_df_list, axis=1)
    if os.path.exists(file_name):
        hist_spot_exchange_volume_df = pd.read_csv(file_name, index_col='date')
        cex_deri_amount = pd.concat([hist_spot_exchange_volume_df, cex_deri_amount], axis=0)
        cex_deri_amount.drop_duplicates(inplace=True)
    cex_deri_amount.reset_index(inplace=True)
    cex_deri_amount.drop_duplicates(subset='date', inplace=True)
    cex_deri_amount.to_csv(file_name, index=False)


def update_dex_amount():
    # 更新去中心化交易所现货交易额
    logger.info('更新去中心化交易所现货交易额')
    cg = CoinGeckoAPI()
    dex_exchange_id_list = \
        ['uniswap-bsc', 'uniswap_v2', 'uniswap_v3', 'uniswap_v3_arbitrum', 'uniswap-v3-avalanche',
         'uniswap_v3_optimism', 'uniswap_v3_polygon_pos',
         'curve_arbitrum', 'curve_ethereum', 'curve_optimism',
         'apex_pro', 'maverick_protocol', 'ferro-protocol', 'openocean_finance', 'jupiter', 'orca', 'thorwallet',
         'thorswap', 'woofi', 'pulsex', 'syncswap',
         'balancer-v1', 'balancer-v2', 'balancer_arbitrum',
         'sushiswap', 'sun_io',
         'pancakeswap_aptos', 'pancakeswap_ethereum', 'pancakeswap_new', 'pancakeswap_stableswap', 'pancakeswap-v3-bsc',
         'pancakeswap-v3-ethereum',
         'dodo', 'dodo_arbitrum', 'dodo_bsc', 'dodo_polygon',
         'traderjoe', 'traderjoe-v2-1-arbitrum', 'traderjoe-v2-1-avalanche', 'traderjoe-v2-1-bsc',
         'traderjoe-v2-arbitrum', 'traderjoe-v2-avalanche', 'traderjoe-v2-bsc',
         'quickswap', 'quickswap-polygon-zkevm', 'quickswap_v3',
         ]
    dex_exchange_volume_list = []
    file_name = os.path.join(AMOUNT_DATA_DIR, f'dex_spot_amount_coingecko.csv')
    if os.path.exists(file_name):
        days = 30
    else:
        days = 365
    for dex_exchange_id in dex_exchange_id_list:
        logger.info(f'开始获取{dex_exchange_id}的交易额数据')
        exchange_volume_df = cg.get_exchanges_volume_chart_by_id(id=dex_exchange_id, days=days)
        exchange_volume_df = exchange_volume_df[~exchange_volume_df.index.duplicated(keep='first')]
        exchange_volume_series = exchange_volume_df['amount']
        exchange_volume_series.name = dex_exchange_id
        dex_exchange_volume_list.append(exchange_volume_series)
        logger.info(f'获取{dex_exchange_id}的交易额数据成功')
        time.sleep(1)
    dex_spot_amount = pd.concat(dex_exchange_volume_list, axis=1)
    dex_spot_amount.sort_index(inplace=True)
    dex_spot_amount['total'] = dex_spot_amount.sum(axis=1)
    if os.path.exists(file_name):
        historical_data = pd.read_csv(file_name, index_col='date')
        dex_spot_amount = pd.concat([historical_data, dex_spot_amount.iloc[:-1, ]])
    dex_spot_amount.reset_index(inplace=True)
    dex_spot_amount.drop_duplicates(subset='date', inplace=True)
    dex_spot_amount.to_csv(file_name, index=False)


if __name__ == '__main__':
    get_exchanges_detail()
    # update_total_marketcap_and_amount()
    # update_binance_btc_to_stablecoin_amount()
    # update_cex_spot_amount()
    # update_cex_deri_amount()
    # update_dex_amount()
