import os
import sys
import datetime
import warnings

import psutil
import pandas as pd
from multiprocessing import Pool

sys.path.append('..')
warnings.filterwarnings('ignore')

from tools.setting import DATA_DIR
from tools.tools import filter_coins_not_updated_over_1year
from API.coinmarketcap import get_daily_ohlcvm_by_id_via_http
from API.glassnode import get_prices
from tools.logger import logger
OHLCVM_DATA_DIR = os.path.join(DATA_DIR, 'ohlcvm_data')
os.makedirs(OHLCVM_DATA_DIR, exist_ok=True)


def update_daily_ohlcvm_one_coin(coin_info):
    # 指定币的日行情仅能更新一年以内的。
    coin_id = coin_info[0]
    coin_name = coin_info[1]
    # logger.info(f'获取{coin_name}行情数据')
    file_name = os.path.join(OHLCVM_DATA_DIR, f'daily\\{coin_name}.csv')
    yesterday = (datetime.datetime.now() - datetime.timedelta(days=1)).strftime('%Y-%m-%d')
    if os.path.exists(file_name):
        historical_df = pd.read_csv(file_name, index_col='end_date')
        if not historical_df.empty:
            last_update = historical_df.index[-1]
        else:
            historical_df = pd.DataFrame()
            last_update = '2010-01-01'
    else:
        historical_df = pd.DataFrame()
        last_update = '2010-01-01'
    if last_update == yesterday:
        return
    update = get_daily_ohlcvm_by_id_via_http(coin_id, range='1Y')
    if update.empty:
        return
    if len(update) < 360:
        df = update
    elif update.index[0] <= last_update < update.index[-1]:
        df = pd.concat([historical_df[:-1], update[last_update:]], axis=0)
    elif update.index[0] > last_update:
        # logger.info(f'{coin_name}行情数据超过一年未更新，需调用其他api')
        return
    else:
        df = historical_df
    df.to_csv(file_name)

def update_daily_ohlcvm_all_coins():
    # 并行更新所有币的行情数据
    logger.info('开始更新所有币的行情数据')
    coins = pd.read_csv(os.path.join(DATA_DIR, 'coins_information\\coins_information.csv'))
    coins_not_updating = filter_coins_not_updated_over_1year()
    coins['updated_nearly'] = [True if i not in coins_not_updating else False for i in coins['slug']]
    coins_available = coins[(coins['status'] != 'untracked') & (coins['updated_nearly'] == True)]
    coin_info_list = list(zip(coins_available['id'], coins_available['slug']))
    cpu_num = psutil.cpu_count(logical=True)
    p = Pool(60)
    p.map(update_daily_ohlcvm_one_coin, coin_info_list)
    p.close()
    p.join()


def update_ohlc_one_coin_and_frequency(asset='BTC', interval='24h', all_data=False):
    # 更新指定币的K线价格数据
    file_name = os.path.join(OHLCVM_DATA_DIR, f'kline\\{asset}_{interval}.csv')
    if not os.path.exists(file_name):
        all_data = True
    if all_data:
        df = get_prices(asset, interval)
    else:
        historical_df = pd.read_csv(file_name, index_col='end_date')
        last_update = historical_df.index[-1]
        if interval in ['24h', '1w', '1month']:
            start_date = (datetime.datetime.strptime(last_update, '%Y-%m-%d') - datetime.timedelta(days=45)).strftime("%Y-%m-%d")
        else:
            start_date = (datetime.datetime.strptime(last_update, '%Y-%m-%d %H:%M:%S') - datetime.timedelta(days=45)).strftime("%Y-%m-%d")
        update = get_prices(asset, interval, start_date=start_date)
        df = pd.concat([historical_df,update], axis=0)
        df.reset_index(inplace=True)
        df.drop_duplicates(subset='end_date', inplace=True, keep='last')
    df.to_csv(file_name, index=False)


def update_ohlc_btc_and_eth():
    # 更新btc和eth的10分钟、1小时和1天的K线价格数据
    logger.info('开始更新K线数据')
    for interval in ['10m', '1h', '24h']:
        for asset in ['BTC', 'ETH']:
            logger.info(f"asset:{asset}, interval:{interval}")
            update_ohlc_one_coin_and_frequency(asset, interval)


if __name__ == '__main__':
    update_ohlc_btc_and_eth()
