#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Datetime: 2021/10/30 15:09
# @Author  : CHEN Wang
# @Site    :
# @File    : trading_data_update.py
# @Software: PyCharm

"""
脚本说明: 计算与价量相关衍生指标， 每天更新计算
"""

import os
import time
import datetime
import numpy as np
import pandas as pd
from dateutil.tz import tzutc
import yfinance as yf
from quant_researcher.quant.project_tool import hammer
from quant_researcher.quant.project_tool.localize import DATA_DIR
from quant_researcher.quant.project_tool.wrapper_tools.common_wrappers import deco_retry
from quant_researcher.quant.project_tool.time_tool import get_yesterday, get_today, str_to_timestamp, date_shifter, calc_date_diff, \
    get_specific_weekday_in_range, get_specific_weekday_of_each_quarter_last_month, get_quarter_end, get_the_end_of_this_month
from quant_researcher.quant.project_tool.file_tool import copy_file, get_all_filename_path
from quant_researcher.quant.datasource_fetch.crypto_api.glassnode import get_prices, get_ret, get_indicators
from quant_researcher.quant.datasource_fetch.crypto_api.binance import fetch_binance_kline, fetch_binance_margin_interest_rate, fetch_all_binance_funding_rate, fetch_binance_perpetual_leverage_margin
from quant_researcher.quant.datasource_fetch.crypto_api.bitfinex import fetch_bitfinex_kline
from quant_researcher.quant.datasource_fetch.crypto_api.okex.okex import fetch_okex_margin_interest_rate, fetch_okex_usdtusd_premium, fetch_okex_usdtcny_premium
from quant_researcher.quant.datasource_fetch.crypto_api.okex.okx.Market_api import marketAPI
from quant_researcher.quant.datasource_fetch.crypto_api.loanscan import get_lend_borrow_fee_rate
from quant_researcher.quant.datasource_fetch.crypto_api.aave import get_aave_borrow_fee_rate, get_aave_borrow_amount
from quant_researcher.quant.datasource_fetch.crypto_api.others import get_btc126_usdt_premium, get_feixiaohao_usdt_premium
from quant_researcher.quant.datasource_fetch.crypto_api.coingecko.coingecko import CoinGeckoAPI
from quant_researcher.quant.datasource_fetch.crypto_api.coinmarketcap import get_asset_ohlcvm_via_http, get_total_marketcap_amount_via_http, get_marketpair_amount_via_http
from quant_researcher.quant.datasource_fetch.crypto_api.self_defined import true_stalecoin_crypto
from quant_researcher.quant.datasource_fetch.crypto_api.coinglass import get_recent_funding_rate, get_gbtc_premium, get_exchange_open_interest_chart
from quant_researcher.quant.datasource_fetch.currency_api.currency_price import get_currency_historical_price, get_currency_latest_price
from quant_researcher.quant.project_tool.db_operator.my_excel import df_list_2_excel_sheets
from quant_researcher.quant.factors.indicator_tools.indicators import indicator_BOP, indicator_RSI, indicator_ROC, indicator_KC, indicator_CV, indicator_PPO_revised, indicator_MA, indicator_BOLL
from logger import logger
import os
import traceback
from task_monitor import task_to_db, send_error_to_email


def get_trading_data(asset, start_date, end_date=None):
    """
    计算与价量相关衍生指标

    :param str asset: 资产代码
    :param str start_date: '2014-01-01'
    :param str end_date: '2014-01-01'
    :return:
    """

    if end_date is None:
        end_date = get_yesterday(marker='with_n_dash')  # 计算截止昨日收盘
    file_path = os.path.join(DATA_DIR, f'trading_data')

    # 每日定期更新价格数据和收益率数据
    price_log_price = get_prices(ohlc=False, asset=asset, start_date=start_date, end_date=end_date)
    price_log_price['log_prices'] = np.log10(price_log_price['close'])
    file_name = os.path.join(DATA_DIR, f'{asset}_price_log_price')
    price_log_price.to_csv(f'{file_name}.csv')

    all_asset_ret = get_ret([asset, 'USDT'], start_date, end_date)  # 获取该资产与USDT收益率数据
    file_name = os.path.join(DATA_DIR, f'{asset}_USDT_ret')
    all_asset_ret.to_csv(f'{file_name}.csv')

    # 获取BTC_ohlcv数据
    file_name = os.path.join(DATA_DIR, f'BTC_history_ohlcvm')
    ohlcv_data = pd.read_excel(f'{file_name}.xlsx', index_col='end_date')

    # 计算Close VWAP120 ratio, Close VWAP140 ratio
    # prices_df = get_prices(ohlc=False, asset=asset, start_date=start_date, end_date=end_date)
    prices_df = ohlcv_data[['open', 'high', 'low', 'close']]
    volume_series = ohlcv_data.loc[start_date:end_date, 'volume']
    weighted_close = prices_df['close'].astype(float) * volume_series.astype(float)
    weighted_close.dropna(inplace=True)
    weighted_close_ma140 = weighted_close.rolling(window=140, min_periods=140).sum()
    weighted_close_ma120 = weighted_close.rolling(window=120, min_periods=120).sum()
    volume_series_ma140 = volume_series.rolling(window=140, min_periods=140).sum()
    volume_series_ma120 = volume_series.rolling(window=120, min_periods=120).sum()
    vwap140 = (weighted_close_ma140 / volume_series_ma140)
    vwap120 = (weighted_close_ma120 / volume_series_ma120)
    prices_df['Close VWAP140 ratio'] = prices_df['close'] / vwap140
    prices_df['Close VWAP120 ratio'] = prices_df['close'] / vwap120
    prices_df1 = prices_df.loc['2015-01-01':, :]
    file_name = os.path.join(file_path, f'Close VWAP140 ratio')
    prices_df1['Close VWAP140 ratio'].to_csv(f'{file_name}.csv')
    file_name = os.path.join(file_path, f'Close VWAP120 ratio')
    prices_df1['Close VWAP120 ratio'].to_csv(f'{file_name}.csv')

    # 计算Close MA140 ratio， Close MA140 ratio
    prices_df['ma140'] = prices_df['close'].rolling(window=140, min_periods=140).mean()
    prices_df['ma120'] = prices_df['close'].rolling(window=120, min_periods=120).mean()
    prices_df['Close MA140 ratio'] = prices_df['close'] / prices_df['ma140']
    prices_df['Close MA120 ratio'] = prices_df['close'] / prices_df['ma120']
    prices_df1 = prices_df.loc['2015-01-01':, :]
    file_name = os.path.join(file_path, f'Close MA140 ratio')
    prices_df1['Close MA140 ratio'].to_csv(f'{file_name}.csv')
    file_name = os.path.join(file_path, f'Close MA120 ratio')
    prices_df1['Close MA120 ratio'].to_csv(f'{file_name}.csv')

    # 计算BTC OSC
    column_list = ['close']
    for ma_period in [30, 60, 90, 120, 150, 200, 300]:
        prices_df[f'ma{ma_period}'] = prices_df['close'].rolling(window=ma_period, min_periods=ma_period).mean()
        prices_df[f'osc{ma_period}'] = (prices_df['close'] - prices_df[f'ma{ma_period}']) / prices_df[f'ma{ma_period}']
        column_list.append(f'osc{ma_period}')

    prices_df1 = prices_df.loc['2015-01-01':, column_list]
    prices_df1['log_prices'] = np.log10(prices_df1['close'])
    file_name = os.path.join(file_path, f'BTC OSC Ratio')
    prices_df1.to_csv(f'{file_name}.csv')

    # 计算Bitcoin Price Temperature
    # The Bitcoin Price Temperature (BPT) is an oscillator that models the number of standard deviations that price has moved away from the 4-yr moving average
    prices_df['ma1460'] = prices_df['close'].rolling(window=1460, min_periods=1460).mean()
    prices_df['std1460'] = prices_df['close'].rolling(window=1460, min_periods=1460).std()
    prices_df['Bitcoin Price Temperature'] = (prices_df['close'] - prices_df['ma1460']) / prices_df['std1460']
    prices_df1 = prices_df.loc['2015-01-01':, :]
    file_name = os.path.join(file_path, f'Bitcoin Price Temperature')
    prices_df1['Bitcoin Price Temperature'].to_csv(f'{file_name}.csv')

    # 计算Mayer Multiple
    # The Mayer Multiple is an oscillator calculated as the ratio between price, and the 200-day moving average.
    prices_df['ma200'] = prices_df['close'].rolling(window=200, min_periods=200).mean()
    prices_df['Mayer Multiple'] = prices_df['close'] / prices_df['ma200']
    prices_df1 = prices_df.loc['2015-01-01':, :]
    file_name = os.path.join(file_path, f'Mayer Multiple')
    prices_df1['Mayer Multiple'].to_csv(f'{file_name}.csv')

    # 计算Stablecoin Supply Ratio (SSR)
    btc_marketcap = ohlcv_data['market_cap']
    # 获取稳定币市值数据
    file_path_ssr = os.path.join(DATA_DIR, f'all_history_ohlcvm_coinmarketcap')
    file_name_ssr = os.path.join(file_path_ssr, f'stablecoin_coinmarketcap')
    stablecoin_marketcap = pd.read_excel(f'{file_name_ssr}.xlsx', sheet_name='stablecoin_marketcap', index_col='end_date')
    stablecoin_marketcap = stablecoin_marketcap['stablecoin_market_cap']
    SSR = btc_marketcap / stablecoin_marketcap
    SSR.name = 'DIY Stablecoin Supply Ratio (SSR)'
    SSR = SSR[SSR[~SSR.isnull()].index[0]:]
    file_name = os.path.join(file_path, f'DIY Stablecoin Supply Ratio (SSR)')
    SSR.to_csv(f'{file_name}.csv')

    # 计算DIY Stablecoin Supply Ratio (SSR) Oscillator 120
    ma = pd.Series.rolling(SSR, 120).mean()
    std = pd.Series.rolling(SSR, 120).std()
    SSR120 = (SSR - ma) / std
    SSR120.name = 'DIY Stablecoin Supply Ratio (SSR) Oscillator 120'
    SSR120 = SSR120[SSR120[~SSR120.isnull()].index[0]:]
    file_name = os.path.join(file_path, f'DIY Stablecoin Supply Ratio (SSR) Oscillator 120')
    SSR120.to_csv(f'{file_name}.csv')

    # 计算DIY Stablecoin Supply Ratio (SSR) Oscillator 140
    ma = pd.Series.rolling(SSR, 140).mean()
    std = pd.Series.rolling(SSR, 140).std()
    SSR140 = (SSR - ma) / std
    SSR140.name = 'DIY Stablecoin Supply Ratio (SSR) Oscillator 140'
    SSR140 = SSR140[SSR140[~SSR140.isnull()].index[0]:]
    file_name = os.path.join(file_path, f'DIY Stablecoin Supply Ratio (SSR) Oscillator 140')
    SSR140.to_csv(f'{file_name}.csv')

    # 计算DIY Stablecoin Supply Ratio (SSR) Oscillator 200
    ma = pd.Series.rolling(SSR, 200).mean()
    std = pd.Series.rolling(SSR, 200).std()
    SSR200 = (SSR - ma) / std
    SSR200.name = 'DIY Stablecoin Supply Ratio (SSR) Oscillator 200'
    SSR200 = SSR200[SSR200[~SSR200.isnull()].index[0]:]
    file_name = os.path.join(file_path, f'DIY Stablecoin Supply Ratio (SSR) Oscillator 200')
    SSR200.to_csv(f'{file_name}.csv')


def get_composite_funding_rate():
    file_path = os.path.join(DATA_DIR, f'trading_data')
    # 计算综合资金费率
    funding_rate_list = []
    for symbol in ['BTC-USDT', 'BTC-USD']:
        file_path1 = os.path.join(DATA_DIR, f'funding_rate')
        file_name = os.path.join(file_path1, f'{symbol}_composite_funding_rate')
        funding_rate_data = pd.read_excel(f'{file_name}.xlsx')
        funding_rate_data.sort_values(by='timestamp', inplace=True)
        # exchange_list = ['Okex', 'Binance', 'Huobi', 'Bitmex']  # 综合资金费率以这四家为准，取平均
        funding_rate_data['composite_funding_rate'] = funding_rate_data.mean(axis=1)

        timezone = '+0000'
        funding_rate_data['date'] = pd.to_datetime(funding_rate_data['timestamp'] - 1, unit='s').dt.tz_localize('UTC').dt.tz_convert(timezone)
        funding_rate_data['date'] = funding_rate_data['date'].dt.strftime('%Y-%m-%d')
        funding_rate_data['datetime'] = pd.to_datetime(funding_rate_data['timestamp'] - 1, unit='s').dt.tz_localize('UTC').dt.tz_convert(timezone)
        funding_rate_data['datetime'] = funding_rate_data['datetime'].dt.strftime('%Y-%m-%d %H:%M:%S')
        funding_rate_data['time'] = pd.to_datetime(funding_rate_data['timestamp'] - 1, unit='s').dt.tz_localize('UTC').dt.tz_convert(timezone)
        funding_rate_data['time'] = funding_rate_data['time'].dt.strftime('%H:%M:%S')

        funding_rate = funding_rate_data.groupby('date')['composite_funding_rate'].mean()
        funding_rate.index.name = 'end_date'
        funding_rate_list.append(funding_rate)
        funding_rate.name = f'DIY Futures Perpetual Funding Rate {symbol}'
        file_name = os.path.join(file_path, f'DIY Futures Perpetual Funding Rate {symbol}')
        funding_rate.to_csv(f'{file_name}.csv')

    funding_rate_diff = funding_rate_list[0] - funding_rate_list[1]
    funding_rate_diff = funding_rate_diff[funding_rate_diff[~funding_rate_diff.isnull()].index[0]:]  # 剔除因为相减导致的NAN
    funding_rate_diff.name = f'DIY Futures Perpetual Funding Rate Diff BTC-USDT - BTC-USD'
    file_name = os.path.join(file_path, f'DIY Futures Perpetual Funding Rate Diff BTC-USDT - BTC-USD')
    funding_rate_diff.to_csv(f'{file_name}.csv')

    funding_rate_plus = pd.concat(funding_rate_list, axis=1)
    funding_rate_plus.sort_index(inplace=True)
    funding_rate_plus['plus'] = funding_rate_plus.mean(axis=1)
    funding_rate_plus = funding_rate_plus['plus']
    funding_rate_plus.name = f'DIY Futures Perpetual Funding Rate Plus BTC-USDT + BTC-USD'
    file_name = os.path.join(file_path, f'DIY Futures Perpetual Funding Rate Plus BTC-USDT + BTC-USD')
    funding_rate_plus.to_csv(f'{file_name}.csv')


@deco_retry(retry=50, retry_sleep=15)
def get_cex_spot_volume_maxdrawdown():
    file_path = os.path.join(DATA_DIR, f'trading_data')
    # 获取BTC_ohlcv数据
    file_name = os.path.join(DATA_DIR, f'BTC_history_ohlcvm')
    ohlcv_data = pd.read_excel(f'{file_name}.xlsx', index_col='end_date')

    end_date = get_yesterday(marker='with_n_dash')  # 计算截止昨日收盘
    price_log_price = get_prices(ohlc=False, asset='BTC', start_date='2010-01-01', end_date=end_date)
    price_log_price['log_prices'] = np.log10(price_log_price['close'])

    # 中心化交易所现货交易额分析,交易额回撤分析
    trading_amount_BTC = pd.concat([ohlcv_data['amount'], price_log_price['log_prices']], axis=1)
    trading_amount_BTC.sort_index(inplace=True)
    file_name_temp = os.path.join(DATA_DIR, f'ETH_history_ohlcvm')
    trading_amount_ETH = pd.read_excel(f'{file_name_temp}.xlsx', index_col='end_date')
    trading_amount_ETH.sort_index(inplace=True)
    file_path_temp = os.path.join(DATA_DIR, r'trading_data')
    file_name_temp = os.path.join(file_path_temp, f'cex_total_spot_amount_coinmarketcap')
    trading_amount_ALL = pd.read_excel(f'{file_name_temp}.xlsx', index_col='end_date')
    trading_amount_ALL = pd.concat([trading_amount_ALL, price_log_price['log_prices']], join='inner', axis=1)
    trading_amount_ALL.sort_index(inplace=True)
    file_name_temp = os.path.join(file_path_temp, f'cex_total_spot_amount_biance_BTCUSD')
    binance_trading_amount_BTC = pd.read_excel(f'{file_name_temp}.xlsx', index_col='date')
    binance_trading_amount_BTC = pd.concat([binance_trading_amount_BTC, price_log_price['log_prices']], join='inner', axis=1)
    binance_trading_amount_BTC.sort_index(inplace=True)
    file_name_temp = os.path.join(file_path_temp, f'cex_total_spot_amount_each_exchange_coingecko')
    nine_cex_trading_amount_ALL = pd.read_excel(f'{file_name_temp}.xlsx', index_col='date')
    nine_cex_trading_amount_ALL['amount'] = nine_cex_trading_amount_ALL.sum(axis=1)
    nine_cex_trading_amount_ALL = nine_cex_trading_amount_ALL[['binance', 'coinbase', 'ftx', 'okex', 'huobi', 'amount']]
    nine_cex_trading_amount_ALL = pd.concat([nine_cex_trading_amount_ALL, price_log_price['log_prices']], join='inner', axis=1)
    nine_cex_trading_amount_ALL.sort_index(inplace=True)

    for i, trading_amount in enumerate([trading_amount_BTC, trading_amount_ETH, trading_amount_ALL, binance_trading_amount_BTC, nine_cex_trading_amount_ALL]):
        trading_amount['amount_ma7'] = trading_amount['amount'].rolling(7).mean()
        trading_amount['amount_ma30'] = trading_amount['amount'].rolling(30).mean()
        trading_amount = trading_amount.loc['2017-01-01':, :]
        trading_amount['amount_ma30_hwm'] = trading_amount['amount_ma30'].expanding().max()
        trading_amount['amount_ma30_drawdown'] = (trading_amount['amount_ma30'] - trading_amount['amount_ma30_hwm']) / trading_amount['amount_ma30_hwm']
        if i == 0:
            file_name = os.path.join(file_path, f'DIY BTC Trading Amount')
        elif i == 1:
            file_name = os.path.join(file_path, f'DIY ETH Trading Amount')
        elif i == 2:
            file_name = os.path.join(file_path, f'DIY All Trading Amount')
        elif i == 3:
            file_name = os.path.join(file_path, f'DIY BTC Trading Amount - Binance')
        else:
            file_name = os.path.join(file_path, f'DIY All Trading Amount - 9CEX')

        trading_amount.to_csv(f'{file_name}.csv')


@deco_retry(retry=50, retry_sleep=15)
def get_cex_spot_amount_dominance():
    file_path = os.path.join(DATA_DIR, f'trading_data')
    # 获取BTC_ohlcv数据
    file_name = os.path.join(DATA_DIR, f'BTC_history_ohlcvm')
    ohlcv_data = pd.read_excel(f'{file_name}.xlsx', index_col='end_date')

    end_date = get_yesterday(marker='with_n_dash')  # 计算截止昨日收盘
    price_log_price = get_prices(ohlc=False, asset='BTC', start_date='2010-01-01', end_date=end_date)
    price_log_price['log_prices'] = np.log10(price_log_price['close'])

    # 中心化交易所现货交易额分析,交易额回撤分析
    trading_amount_BTC = pd.concat([ohlcv_data['amount'], price_log_price['log_prices']], axis=1)
    trading_amount_BTC.sort_index(inplace=True)
    file_name_temp = os.path.join(DATA_DIR, f'ETH_history_ohlcvm')
    trading_amount_ETH = pd.read_excel(f'{file_name_temp}.xlsx', index_col='end_date')
    trading_amount_ETH = pd.concat([trading_amount_ETH['amount'], price_log_price['log_prices']], axis=1)
    trading_amount_ETH.sort_index(inplace=True)
    file_path_temp = os.path.join(DATA_DIR, r'trading_data')
    file_name_temp = os.path.join(file_path_temp, f'cex_total_spot_amount_coinmarketcap')
    trading_amount_ALL = pd.read_excel(f'{file_name_temp}.xlsx', index_col='end_date')
    trading_amount_ALL['amount_ma7'] = trading_amount_ALL['amount'].rolling(7).mean()
    trading_amount_ALL['amount_ma30'] = trading_amount_ALL['amount'].rolling(30).mean()
    trading_amount_ALL = trading_amount_ALL.loc['2017-01-01':, :]

    trading_amount_BTC['amount_ma7'] = trading_amount_BTC['amount'].rolling(7).mean()
    trading_amount_BTC['amount_ma30'] = trading_amount_BTC['amount'].rolling(30).mean()
    trading_amount_BTC = trading_amount_BTC.loc['2017-01-01':, :]
    trading_amount_ETH['amount_ma7'] = trading_amount_ETH['amount'].rolling(7).mean()
    trading_amount_ETH['amount_ma30'] = trading_amount_ETH['amount'].rolling(30).mean()
    trading_amount_ETH = trading_amount_ETH.loc['2017-01-01':, :]

    trading_amount_BTC['BTC_ALL_amount_dominance_ma7'] = trading_amount_BTC['amount_ma7'] / trading_amount_ALL['amount_ma7']
    trading_amount_BTC['BTC_ALL_amount_dominance_ma30'] = trading_amount_BTC['amount_ma30'] / trading_amount_ALL['amount_ma30']
    trading_amount_ETH['ETH_ALL_amount_dominance_ma7'] = trading_amount_ETH['amount_ma7'] / trading_amount_ALL['amount_ma7']
    trading_amount_ETH['ETH_ALL_amount_dominance_ma30'] = trading_amount_ETH['amount_ma30'] / trading_amount_ALL['amount_ma30']
    trading_amount_BTC['BTCETH_ALL_amount_dominance_ma7'] = (trading_amount_BTC['amount_ma7'] + trading_amount_ETH['amount_ma7']) / trading_amount_ALL['amount_ma7']
    trading_amount_BTC['BTCETH_ALL_amount_dominance_ma30'] = (trading_amount_BTC['amount_ma30'] + trading_amount_ETH['amount_ma30']) / trading_amount_ALL['amount_ma30']
    trading_amount_BTC['BTC_ETH_amount_ratio_ma7'] = trading_amount_BTC['amount_ma7'] / trading_amount_ETH['amount_ma7']
    trading_amount_BTC['BTC_ETH_amount_ratio_ma30'] = trading_amount_BTC['amount_ma30'] / trading_amount_ETH['amount_ma30']

    file_name = os.path.join(file_path, f'DIY BTC Trading Amount Dominance')
    trading_amount_BTC.to_csv(f'{file_name}.csv')
    file_name = os.path.join(file_path, f'DIY ETH Trading Amount Dominance')
    trading_amount_ETH.to_csv(f'{file_name}.csv')


@deco_retry(retry=50, retry_sleep=15)
def get_lend_borrow_fee_rate_v1():
    end_date = get_yesterday(marker='with_n_dash')  # 计算截止昨日收盘
    price_log_price = get_prices(ohlc=False, asset='BTC', start_date='2018-01-01', end_date=end_date)
    price_log_price['log_prices'] = np.log10(price_log_price['close'])
    # 分析去中心化借贷平台费率(从loanscan下载数据)
    for protocol in ['aave', 'compound', 'dydx', 'makerdao']:
        df_list = []
        for lend_or_borrow in ['lend', 'borrow']:
            for asset in ['USDC', 'DAI', 'ETH', 'WBTC']:
                df = get_lend_borrow_fee_rate(protocol=protocol, lend_or_borrow=lend_or_borrow, asset=asset)
                if df is not None:
                    df.rename(columns={f'{lend_or_borrow}_rate': f'{protocol}_{asset}_{lend_or_borrow}_rate'}, inplace=True)
                    df_list.append(df)
        all_df = pd.concat(df_list, axis=1)
        all_frame = all_df.merge(price_log_price, left_index=True, right_index=True)
        file_path = os.path.join(DATA_DIR, f'margin_interest_rate')
        os.makedirs(file_path, exist_ok=True)
        file_name = os.path.join(file_path, f'loanscan_{protocol}_margin_interest_rate')
        all_frame.to_excel(f'{file_name}.xlsx')


@deco_retry(retry=50, retry_sleep=15)
def get_aave_borrow_fee_rate_v1():
    end_date = get_yesterday(marker='with_n_dash')  # 计算截止昨日收盘
    price_log_price = get_prices(ohlc=False, asset='BTC', start_date='2018-01-01', end_date=end_date)
    price_log_price['log_prices'] = np.log10(price_log_price['close'])
    # 分析去中心化借贷平台aave
    for asset in ['USDC', 'DAI', 'ETH', 'WBTC']:
        df = get_aave_borrow_fee_rate(asset=asset)
        df = df.groupby(['date'])[df.columns[:4]].mean()
        all_frame = df.merge(price_log_price, left_index=True, right_index=True)
        file_path = os.path.join(DATA_DIR, f'margin_interest_rate')
        os.makedirs(file_path, exist_ok=True)
        file_name = os.path.join(file_path, f'aave_{asset}_margin_interest_rate')
        all_frame.to_excel(f'{file_name}.xlsx')
        time.sleep(5)

    df = get_aave_borrow_amount()
    all_frame = df.merge(price_log_price, left_index=True, right_index=True)
    all_frame.sort_index(inplace=True)
    file_name = os.path.join(file_path, f'aave_margin_tvl_borrowed')
    all_frame.to_excel(f'{file_name}.xlsx')

@deco_retry(retry=50, retry_sleep=15)
def get_exchange_margin_interest_rate():
    end_date = get_yesterday(marker='with_n_dash')  # 计算截止昨日收盘
    price_log_price = get_prices(ohlc=False, asset='BTC', start_date='2018-01-01', end_date=end_date)
    price_log_price['log_prices'] = np.log10(price_log_price['close'])

    # 分析币安杠杆交易费率
    file_path = os.path.join(DATA_DIR, f'margin_interest_rate')
    os.makedirs(file_path, exist_ok=True)
    file_name = os.path.join(file_path, f'binance_margin_interest_rate')
    if os.path.exists(f'{file_name}.xlsx'):
        history_binance_margin_interest_rate = pd.read_excel(f'{file_name}.xlsx', index_col='end_date')
        latest_date = history_binance_margin_interest_rate.index[-7]
        start = time.mktime(datetime.datetime(int(latest_date[:4]), int(latest_date[5:7]), int(latest_date[-2:]), 0, 0, 0, tzinfo=tzutc()).timetuple())
    else:
        start = time.mktime(datetime.datetime(2018, 1, 1, 0, 0, 0, tzinfo=tzutc()).timetuple())
    end = time.mktime(datetime.datetime(int(end_date[:4]), int(end_date[5:7]), int(end_date[-2:]), 23, 59, 59, tzinfo=tzutc()).timetuple())
    df_list = []
    for asset in ['USDT', 'BUSD', 'BTC', 'ETH']:
        logger.info(f'开始从binance获取{asset}的借贷费率')
        frame = fetch_binance_margin_interest_rate(asset=asset, start_time=start, end_time=end)  # https://www.binance.com/en/margin/interest-history
        frame['start_time'] = frame.apply(lambda x: int(x['timestamp'] / 1000), axis=1)
        frame['end_date'] = pd.to_datetime(frame['start_time'], unit='s').dt.tz_localize('UTC').dt.tz_convert('+0000')
        frame['end_date'] = frame['end_date'].dt.strftime('%Y-%m-%d')
        frame['datetime'] = pd.to_datetime(frame['start_time'], unit='s').dt.tz_localize('UTC').dt.tz_convert('+0000')
        frame['datetime'] = frame['datetime'].dt.strftime('%Y-%m-%d %H:%M:%S')
        frame = frame[~frame.duplicated(subset='end_date')]  # 一天可能有多个费率数据
        frame.set_index('end_date', inplace=True)
        frame.rename(columns={'margin_interest_rate': f'{asset}_margin_interest_rate'}, inplace=True)
        df_list.append(frame[f'{asset}_margin_interest_rate'])
    frame = pd.concat(df_list, axis=1)
    all_new_frame = frame.merge(price_log_price, left_index=True, right_index=True)
    if os.path.exists(f'{file_name}.xlsx'):
        all_new_frame = pd.concat([history_binance_margin_interest_rate, all_new_frame], axis=0)
        all_new_frame = all_new_frame[~all_new_frame.index.duplicated(keep='last')]  # index去重
    all_new_frame.to_excel(f'{file_name}.xlsx')

    # 分析okx杠杆交易费率
    okex_margin_interest_rate = fetch_okex_margin_interest_rate()
    file_path = os.path.join(DATA_DIR, f'margin_interest_rate')
    file_name = os.path.join(file_path, f'okx_margin_interest_rate.xlsx')
    if os.path.exists(file_name):
        historical_data = pd.read_excel(file_name)
        if max(historical_data['end_date']) < max(okex_margin_interest_rate['end_date']):
            all_df = pd.concat([historical_data, okex_margin_interest_rate], axis=0, ignore_index=True)
            all_df.to_excel(f'{file_name}', index=False)
        else:
            pass
    else:
        okex_margin_interest_rate.to_excel(f'{file_name}', index=False)


@deco_retry(retry=50, retry_sleep=15)
def get_exchange_perpetual_leverage_margin():
    # 获取币安各合约的最大杠杆倍数与维持保证金比例

    file_path = os.path.join(DATA_DIR, f'leverage_margin')
    os.makedirs(file_path, exist_ok=True)
    file_name = os.path.join(file_path, f'binance_leverage_margin')

    logger.info(f'开始从binance获取各合约的最大杠杆倍数与维持保证金比例')
    all_new_df = fetch_binance_perpetual_leverage_margin()

    if os.path.exists(f'{file_name}.xlsx'):
        history_binance_leverage_margin = pd.read_excel(f'{file_name}.xlsx', index_col='Unnamed: 0')
        all_new_df = pd.concat([history_binance_leverage_margin, all_new_df], axis=0, ignore_index=True)
        all_new_df.drop_duplicates(subset=['bracketSeq', 'symbol', 'updateTime'], inplace=True)

    all_new_df.to_excel(f'{file_name}.xlsx')


@deco_retry(retry=50, retry_sleep=15)
def get_gbtc_premium_data():
    end_date = get_yesterday(marker='with_n_dash')  # 计算截止昨日收盘
    price_log_price = get_prices(ohlc=False, asset='BTC', start_date='2015-05-05', end_date=end_date)
    price_log_price['log_prices'] = np.log10(price_log_price['close'])

    # 分析GBTC折溢价数据
    gbtc_premium = get_gbtc_premium()
    gbtc_premium_df = pd.concat([gbtc_premium, price_log_price], axis=1, join='inner')
    gbtc_premium_df['rolling30_premium'] = gbtc_premium_df['gbtc_premium'] - gbtc_premium_df['gbtc_premium'].rolling(30).mean()
    gbtc_premium_df['rolling60_premium'] = gbtc_premium_df['gbtc_premium'] - gbtc_premium_df['gbtc_premium'].rolling(60).mean()
    gbtc_premium_df['rolling90_premium'] = gbtc_premium_df['gbtc_premium'] - gbtc_premium_df['gbtc_premium'].rolling(90).mean()
    gbtc_premium_df['rolling120_premium'] = gbtc_premium_df['gbtc_premium'] - gbtc_premium_df['gbtc_premium'].rolling(120).mean()
    file_path_temp = os.path.join(DATA_DIR, f'etf_analysis')
    file_name_temp = os.path.join(file_path_temp, f'coinglass_gbtc_premium')
    gbtc_premium_df.to_excel(f'{file_name_temp}.xlsx')


@deco_retry(retry=100, retry_sleep=15)
def get_all_ohlcvm_data(data_source='coingecko', asset='all', start_date=None, end_date=None, if_print=False, **kwargs):
    """

    :param data_source:
    :param asset: 获取所有的，还是某个资产的
    :param end_date:
    :param if_print:
    :param kwargs: 关键字参数
        - initialize：所有数据重新获取，还是在原有的数据上
        - gap_days: 默认更新到最新多少天内
    :return:
    """

    initialize = kwargs.pop('initialize', False)
    gap_days = kwargs.pop('gap_days', 2)

    if start_date is None:
        start_date = '2013-01-01'
    if end_date is None:
        end_date = get_yesterday(marker='with_n_dash')  # 计算截止昨日收盘

    if data_source == 'coingecko':
        cg = CoinGeckoAPI()
        if asset == 'all':
            file_path = os.path.join(DATA_DIR, r'all_history_ohlcvm_coingecko\all_history_ohlcvm_coingecko')
            all_filename_path_dict = get_all_filename_path(file_path)
            all_existing_coins_name = [i.replace('.xlsx', '') for i in all_filename_path_dict.keys()]

            # 获取市场上所有币的信息
            # all_crypto_df = cg.get_coins_markets(vs_currency='usd', all_data=True, order='market_cap_desc')  # 这个可能获取的不全，但是可以做到市值从大到小依次获取
            all_crypto_df = cg.get_coins_list()  # 这个获取更全面
            all_crypto_df.drop_duplicates(subset=['id'], inplace=True)  # 进行去重
            logger.info(f'目前总共有{all_crypto_df.shape[0]}个币的数据')

            # 获取每个币的信息
            all_data_list = []
            for coins in all_crypto_df['id']:
                if (coins in all_existing_coins_name):
                    file_name = all_filename_path_dict[f'{coins}.xlsx']
                    history_ohlcvm = pd.read_excel(f'{file_name}')
                    if history_ohlcvm.empty:
                        history_ohlcvm_latest_date = '2013-01-01'
                    else:
                        history_ohlcvm_latest_date = history_ohlcvm.iloc[-1, 0]
                else:
                    file_name = os.path.join(file_path, f'{coins}.xlsx')
                    history_ohlcvm_latest_date = '2013-01-01'

                if (calc_date_diff(history_ohlcvm_latest_date, end_date, fmt_str='%Y-%m-%d') < 10):  # 该币已经更新到最近10天了
                    logger.info(f'{coins} 数据已最新无需再更新')
                    all_ohlcvm = history_ohlcvm.copy()
                    all_ohlcvm.drop_duplicates(subset=['end_date'], inplace=True)  # 同一天多条记录进行去重
                    all_ohlcvm.set_index('end_date', inplace=True)
                    all_ohlcvm.sort_index(inplace=True)
                    all_ohlcvm.index.name = 'end_date'
                    all_data_list.append(all_ohlcvm)
                    continue
                else:
                    all_ohlcvm = cg.get_coin_market_chart_range_by_id(id=coins, start_date=start_date, end_date=end_date)
                    if all_ohlcvm is None:  # 该id有问题，无法获取数据
                        continue
                    all_ohlcvm.sort_index(inplace=True)
                    all_ohlcvm.index.name = 'end_date'
                    all_data_list.append(all_ohlcvm)
                    all_ohlcvm.to_excel(f'{file_name}')  # 将新数据data写入coins工作表中
                    logger.info(f'{coins} 数据更新成功')
                    time.sleep(5)  # 避免读取太频繁

            return all_data_list

        else:
            all_data = cg.get_coin_market_chart_range_by_id(symbol=asset, start_date=start_date, end_date=end_date)
            try:
                # ohlc的数据还是以glassnode的数据为准
                prices_df = get_prices(ohlc=True, asset=f'{asset.upper()}', start_date=start_date, end_date=end_date)
            except Exception as err:  # ohlc 数据获取失败
                prices_df = pd.DataFrame(np.NAN, columns=['open', 'high', 'low', 'close'], index=all_data.index)

            all_df = pd.concat([all_data[['market_cap', 'amount', 'volume']], prices_df], axis=1)
            # all_df.dropna(axis=0, inplace=True)
            all_df.sort_index(ascending=True, inplace=True)
            all_df.index.name = 'end_date'

            if if_print:
                file_name = os.path.join(DATA_DIR, f'{asset.upper()}_history_ohlcvm')
                # 文件备份
                backup_path = os.path.join(DATA_DIR, f'all_history_ohlcvm_backup')
                copy_file(f'{file_name}.xlsx', backup_path)
                # 数据保存
                all_df.to_excel(f'{file_name}.xlsx')
            return all_df

    elif data_source == 'coinmarketcap':
        if asset == 'all':
            file_path = os.path.join(DATA_DIR, r'all_history_ohlcvm_coinmarketcap\all_history_ohlcvm_coinmarketcap')
            all_filename_path_dict = get_all_filename_path(file_path)
            all_existing_coins_name = [i.replace('.xlsx', '') for i in all_filename_path_dict.keys()]  # 之前已经保存过的一些数据

            # 获取市场上所有币的信息
            file_path_1 = os.path.join(DATA_DIR, 'all_history_ohlcvm_coinmarketcap')
            file_name_1 = os.path.join(file_path_1, f'all_coin_list_coinmarketcap')
            all_coin_list_df = pd.read_excel(f'{file_name_1}.xlsx', index_col='slug')
            all_coin_list_df = all_coin_list_df[all_coin_list_df['status'] != 'untracked']  # 剔除尚未开始追踪的，因为获取不到数据
            logger.info(f'目前总共有{all_coin_list_df.shape[0]}个币的数据')

            # 获取每个币的信息
            id_series = all_coin_list_df['id']  # 所有币的id
            id_series = pd.DataFrame(id_series)
            id_series.reset_index(inplace=True)
            id_series_list_for_test = []
            split_num = 0
            while split_num < len(id_series.index):
                if split_num + 10 > len(id_series.index):
                    end_num = len(id_series.index)
                else:
                    end_num = split_num + 10
                split_id_series = id_series.iloc[split_num:end_num, :].copy()
                split_id_series.set_index('slug', inplace=True)
                split_id_series = split_id_series['id']
                id_series_list_for_test.append(split_id_series)
                split_num = split_num + 10

            # 批处理框架
            info = {
                'func': fetch_one_coin_data,
                'file_path': file_path,
                'end_date': end_date,
                'initialize': initialize,
                'gap_days': gap_days,
                'all_existing_coins_name': all_existing_coins_name,
                'all_coin_list_df': all_coin_list_df,
                'all_filename_path_dict': all_filename_path_dict,
            }

            hammer.split_a_list_into_batches_and_deal_each_with_a_process(
                the_list=id_series_list_for_test, info=info, ignore_err=True, how_many=15, what_version='v2')  # 进程数不能太多，不然可能被封

        else:
            file_path = os.path.join(DATA_DIR, r'all_history_ohlcvm_coinmarketcap\all_history_ohlcvm_coinmarketcap')
            file_name = os.path.join(file_path, f'{asset}')
            # 文件备份
            backup_path = os.path.join(DATA_DIR, r'all_history_ohlcvm_coinmarketcap\all_history_ohlcvm_coinmarketcap_backup')
            copy_file(f'{file_name}.xlsx', backup_path)

            history_ohlcvm = pd.read_excel(f'{file_name}.xlsx', index_col='end_date')
            if history_ohlcvm.empty:
                history_ohlcvm_latest_date = '2013-01-01'
            else:
                history_ohlcvm_latest_date = history_ohlcvm.index[-1]

            all_ohlcvm = get_asset_ohlcvm_via_http(asset=asset, range='1Y')
            # 数据合并
            next_date = date_shifter(history_ohlcvm_latest_date, step='days', how_many=1)
            if all_ohlcvm.index[0] > next_date:
                logger.info(f'读取的最新数据跟历史数据接不上，请检查数据源是否有问题')
                raise ValueError
            asset_df = pd.concat([history_ohlcvm, all_ohlcvm.loc[next_date:, ]])
            asset_df = asset_df[history_ohlcvm.columns]
            asset_df.sort_index(inplace=True)
            asset_df.index.name = 'end_date'

            try:
                # ohlc的数据还是以glassnode的数据为准
                file_path_1 = os.path.join(DATA_DIR, 'all_history_ohlcvm_coinmarketcap')
                file_name_1 = os.path.join(file_path_1, f'all_coin_list_coinmarketcap')
                all_coin_list_df = pd.read_excel(f'{file_name_1}.xlsx', index_col='slug')
                symbol = all_coin_list_df.loc[asset, 'symbol']
                prices_df = get_prices(ohlc=True, asset=f'{symbol}', start_date=start_date, end_date=end_date)
            except Exception as err:  # ohlc 数据获取失败
                prices_df = pd.DataFrame(np.NAN, columns=['open', 'high', 'low', 'close'], index=asset_df.index)

            all_df = pd.concat([asset_df[['market_cap', 'amount', 'volume']], prices_df], axis=1)
            # all_df.dropna(axis=0, inplace=True)
            all_df.sort_index(ascending=True, inplace=True)
            all_df.index.name = 'end_date'

            if if_print:
                file_name = os.path.join(DATA_DIR, f'{symbol}_history_ohlcvm')
                all_df.to_excel(f'{file_name}.xlsx')
            return all_df

    elif data_source == 'cryptodatadownload':
        exhange_filename = os.listdir(f"E:\指标数据\ohlcv")
        exhange_filename.remove('others')

        all_exchange_ohlcv_data_list = []
        for filename in exhange_filename:
            exchange_name = filename.split('_')[0]
            filename_path = os.path.join(f"E:\指标数据\ohlcv", filename)
            exchange_ohlcv_data = pd.read_csv(f'{filename_path}')
            exchange_ohlcv_data.reset_index(inplace=True)
            exchange_ohlcv_data.columns = exchange_ohlcv_data.iloc[0, :]  # 第一行为列名
            exchange_ohlcv_data.drop(index=0, inplace=True)
            exchange_ohlcv_data['exchange'] = exchange_name
            exchange_ohlcv_data.rename(columns={'Volume USDT': 'Volume USD'}, inplace=True)  # 有些是USDT交易

            if exchange_name == 'Poloniex':
                # 原始数据Volume USD， Volume BTC数据搞反了
                exchange_ohlcv_data.rename(columns={'Volume USD': 'Volume USD1'}, inplace=True)
                exchange_ohlcv_data.rename(columns={'Volume BTC': 'Volume BTC1'}, inplace=True)
                exchange_ohlcv_data.rename(columns={'Volume USD1': 'Volume BTC'}, inplace=True)
                exchange_ohlcv_data.rename(columns={'Volume BTC1': 'Volume USD'}, inplace=True)

            if exchange_name == 'Bitstamp':
                # 原始数据中2018-02-27之前的数据Volume USD， Volume BTC数据搞反了
                exchange_ohlcv_data[['Volume BTC', 'Volume USD']] = exchange_ohlcv_data[['Volume BTC', 'Volume USD']].astype(float)
                test1 = exchange_ohlcv_data[exchange_ohlcv_data['Volume BTC'] <= exchange_ohlcv_data['Volume USD']]
                test2 = exchange_ohlcv_data[exchange_ohlcv_data['Volume BTC'] > exchange_ohlcv_data['Volume USD']]
                test2.rename(columns={'Volume USD': 'Volume USD1'}, inplace=True)
                test2.rename(columns={'Volume BTC': 'Volume BTC1'}, inplace=True)
                test2.rename(columns={'Volume USD1': 'Volume BTC'}, inplace=True)
                test2.rename(columns={'Volume BTC1': 'Volume USD'}, inplace=True)
                exchange_ohlcv_data = pd.concat([test1, test2])

            exchange_ohlcv_data.columns = exchange_ohlcv_data.columns.map(lambda x: x.lower())  # 列名变小写
            exchange_ohlcv_data.rename(columns={'volume btc': 'volume'}, inplace=True)  # 量
            exchange_ohlcv_data[['open', 'high', 'low', 'close', 'volume']] = \
                exchange_ohlcv_data[['open', 'high', 'low', 'close', 'volume']].astype(float)
            if 'volume usd' not in exchange_ohlcv_data.columns:
                exchange_ohlcv_data['volume usd'] = exchange_ohlcv_data['volume'] * exchange_ohlcv_data['close']
            exchange_ohlcv_data.rename(columns={'volume usd': 'amount'}, inplace=True)  # 额
            exchange_ohlcv_data['amount'] = exchange_ohlcv_data['amount'].astype(float)
            exchange_ohlcv_data.rename(columns={'unix timestamp': 'unix'}, inplace=True)
            exchange_ohlcv_data['date'] = exchange_ohlcv_data['date'].str[:10]
            all_exchange_ohlcv_data_list.append(exchange_ohlcv_data)

        all_exchange_ohlcv_data = pd.concat(all_exchange_ohlcv_data_list, axis=0)
        all_exchange_ohlcv_data.sort_values(by='date', ascending=False, inplace=True)
        all_exchange_ohlcv_data = all_exchange_ohlcv_data[all_exchange_ohlcv_data['date'] <= '2021-12-18']

        all_data = all_exchange_ohlcv_data.groupby('date')['volume'].sum()
        all_data = pd.DataFrame(all_data)
        all_data['amount'] = all_exchange_ohlcv_data.groupby('date')['amount'].sum()

    elif data_source == 'exchange':
        # 储存全历史交易量信息
        start = time.mktime(datetime.datetime(2010, 1, 1, tzinfo=tzutc()).timetuple())
        end = time.mktime(datetime.datetime(2017, 8, 18, tzinfo=tzutc()).timetuple())
        bitfinex_data = fetch_bitfinex_kline("tBTCUSD", start, end, '1D')
        start = time.mktime(datetime.datetime(2017, 8, 17, tzinfo=tzutc()).timetuple())
        today = get_today(marker='with_n_dash')
        end = time.mktime(
            datetime.datetime(int(today[:4]), int(today[5:7]), int(today[-2:]), tzinfo=tzutc()).timetuple())
        binance_data = fetch_binance_kline("BTCUSDT", start, end, '1d')
        volume_coff = binance_data.loc['2017-08-17', 'volume'] / bitfinex_data.loc['2017-08-17', 'volume']
        bitfinex_data.loc[:, 'volume'] = bitfinex_data.loc[:, 'volume'] * volume_coff
        amount_coff = binance_data.loc['2017-08-17', 'amount'] / bitfinex_data.loc['2017-08-17', 'amount']
        bitfinex_data.loc[:, 'amount'] = bitfinex_data.loc[:, 'amount'] * amount_coff
        all_df = pd.concat([bitfinex_data.iloc[:-1, :], binance_data], axis=0).dropna(axis=1)


def fetch_one_coin_data(id_series, **kwargs):
    end_date = kwargs.pop('end_date')
    file_path = kwargs.pop('file_path')
    initialize = kwargs.pop('initialize')
    gap_days = kwargs.pop('gap_days')  # 默认更新至最新两天
    all_existing_coins_name = kwargs.pop('all_existing_coins_name')
    all_coin_list_df = kwargs.pop('all_coin_list_df')
    all_filename_path_dict = kwargs.pop('all_filename_path_dict')

    for id in id_series:
        logger.info(f'开始获取id={id}的数据')
        coins = id_series[id_series == id].index[0]  # 币的名称
        if (coins in all_existing_coins_name):  # 如果这个币之前获取过
            file_name = all_filename_path_dict[f'{coins}.xlsx']
            history_ohlcvm = pd.read_excel(f'{file_name}', index_col='end_date')
            if history_ohlcvm.empty:
                history_ohlcvm_latest_date = '2013-01-01'
            else:
                history_ohlcvm_latest_date = history_ohlcvm.index[-1]
        else:  # 之前没获取过，则默认都需要进行获取
            file_name = os.path.join(file_path, f'{coins}.xlsx')
            history_ohlcvm_latest_date = '2013-01-01'

        if initialize:  # 如果是初始化的话，只要文件夹里已经包含这个文件就不进行更新, 因为中间可能中断过，文件夹里有的就不重新获取了
            mask = (coins in all_existing_coins_name)
        else:  # 如果是不是初始化的话，需要判断数据是不是最新，并且这个币是不是处于inactive状态
            mask1 = (calc_date_diff(history_ohlcvm_latest_date, end_date, fmt_str='%Y-%m-%d') < gap_days)  # 该币已经更新到最近2天了
            mask2 = bool(all_coin_list_df[all_coin_list_df['id'] == id]['is_active'][0])
            mask = mask1 | (not mask2)  # 数据已经更新到最近gap_days天了，或者数据是inactive 才不进行更新，否则需要进行更新

        if mask:  # 无需更新
            logger.info(f'{coins} 数据已最新,或者数据已经不更新了，无需再更新')
            # all_ohlcvm = history_ohlcvm.copy()
            # all_ohlcvm.reset_index(inplace=True)
            # all_ohlcvm.drop_duplicates(subset=['end_date'], inplace=True)  # 同一天多条记录进行去重
            # all_ohlcvm.set_index('end_date', inplace=True)
            # all_ohlcvm.sort_index(inplace=True)
            # all_ohlcvm.index.name = 'end_date'
            continue
        else:  # 需要更新或者进行头一次获取
            all_ohlcvm = get_asset_ohlcvm_via_http(id=id, range='1Y')
            if all_ohlcvm is None:  # 该id有问题，无法获取数据
                logger.info(f'{coins} 数据更新失败，获取出来为None')
                continue
            if coins in all_existing_coins_name:  # 数据更新拼接
                next_date = date_shifter(history_ohlcvm_latest_date, step='days', how_many=1)
                if all_ohlcvm.index[0] > next_date:  # 数据有遗漏，无法拼接
                    logger.info(f'读取的最新数据跟历史数据接不上，请检查数据源是否有问题')
                    continue
                if all_ohlcvm.index[0] == history_ohlcvm.index[0]:  # 新数据可以直接覆盖旧数据
                    pass
                else:
                    all_ohlcvm = pd.concat([history_ohlcvm, all_ohlcvm.loc[next_date:, ]])
                    all_ohlcvm = all_ohlcvm[history_ohlcvm.columns]
                    all_ohlcvm.sort_index(inplace=True)
                    all_ohlcvm.index.name = 'end_date'

            all_ohlcvm.to_excel(f'{file_name}')  # 将新数据data写入coins工作表中
            logger.info(f'{coins} 数据更新成功')
            time.sleep(3)  # 避免读取太频繁

def df_list_2_excel_sheets_remove_special_sign(df_list, res_dir=None, file_name='output', sheet_name_list=None, **kwargs):
    """
    df_list 写到同一个Excel不同sheet里面； 注意如果Excel中原来包含多个sheet, 原有数据会被覆盖，无法只更新某个sheet

    :param df_list: list, df列表
    :param res_dir: 文件输出位置
    :param file_name: 文件名称,不用加后缀，自动加
    :param sheet_name_list: sheet_name名称列表
    :param kwargs: 读取to_excel需要传入的参数
        - index，bool，是否将DF的index一并写入文件，默认：False
    :return:
    """
    if_index = kwargs.pop('index', False)
    if os.path.exists(res_dir):
        print(f'预计的结果存储目录已存在：{res_dir}')
    else:
        print(f'预计的结果存储目录不存在，即将创建：{res_dir}')
        os.mkdir(res_dir)
        print(f'已创建目录：{res_dir}')

    target_path = os.path.join(res_dir, f'{file_name}.xlsx')
    writer = pd.ExcelWriter(target_path, engine='openpyxl')

    if len(df_list) != len(sheet_name_list):
        print(f'传入的df_list长度与sheet_name_list不一致')

    print(f'将要把DF写入文件(kwargs: {kwargs})：{res_dir}->{file_name}')
    for df, sheet_name in zip(df_list, sheet_name_list):
        df.to_excel(excel_writer=writer, sheet_name=sheet_name, encoding='gbk', index=if_index,
                    freeze_panes=(0, 0))
    writer.save()
    writer.close()
    print('写入成功！')


def get_stablecoin_marketcap_data(data_source='coinmarketcap', update=True, start_date=None, end_date=None, if_print=False):
    """

    :param data_source:
    :param end_date:
    :param if_print:
    :return:
    """

    if start_date is None:
        start_date = '2013-01-01'
    if end_date is None:
        end_date = get_yesterday(marker='with_n_dash')  # 计算截止昨日收盘

    file_path = os.path.join(DATA_DIR, f'all_history_ohlcvm_{data_source}')
    file_name = os.path.join(file_path, f'stablecoin_{data_source}')
    # 文件备份
    backup_path = os.path.join(file_path, f'all_history_ohlcvm_{data_source}_backup')
    copy_file(f'{file_name}.xlsx', backup_path)

    all_recent_data_list = []
    asset_df_list = []
    for asset in true_stalecoin_crypto.keys():
        if update:
            asset_history_data = pd.read_excel(f'{file_name}.xlsx', sheet_name=asset)
            asset_history_data['end_date'] = pd.to_datetime(asset_history_data['end_date'])
            asset_history_data['end_date'] = asset_history_data['end_date'].dt.strftime('%Y-%m-%d')
            asset_history_data.set_index('end_date', inplace=True)

            if data_source == 'coinmarketcap':
                recent_data = get_asset_ohlcvm_via_http(asset=true_stalecoin_crypto[asset], range='1Y')
            elif data_source == 'coingecko':
                start_date = date_shifter(end_date, step='days', how_many=-91)
                recent_data = get_all_ohlcvm_data(data_source='coingecko', asset=asset, start_date=start_date, end_date=None, if_print=False)
                
            next_date = date_shifter(asset_history_data.index[-1], step='days', how_many=1)
            if recent_data is not None:
                asset_df = pd.concat([asset_history_data.iloc[:, ], recent_data.loc[next_date:, ]])
            else:
                asset_df = asset_history_data.copy()
                recent_data = asset_history_data.copy()
                
            asset_df = pd.concat([asset_history_data.iloc[:, ], recent_data.loc[next_date:, ]])
            asset_df = asset_df[asset_history_data.columns]
            asset_df.sort_index(inplace=True)
            asset_df.index.name = 'end_date'

        else:  # 重算全部历史数据
            if data_source == 'coingecko':
                recent_data = get_all_ohlcvm_data(data_source='coingecko', asset=asset, start_date=start_date, end_date=None, if_print=False)
            else:
                raise NotImplementedError
            asset_df = recent_data.copy()

        recent_data.rename(columns={'market_cap': asset}, inplace=True)
        all_recent_data_list.append(recent_data[asset])
        asset_df_list.append(asset_df)
        time.sleep(5)  # 避免读取太频繁

    all_recent_data_df = pd.concat(all_recent_data_list, axis=1)
    all_recent_data_df['stablecoin_market_cap'] = all_recent_data_df.sum(axis=1)
    if update:
        history_data = pd.read_excel(f'{file_name}.xlsx', sheet_name='stablecoin_marketcap')
        history_data['end_date'] = pd.to_datetime(history_data['end_date'])
        history_data['end_date'] = history_data['end_date'].dt.strftime('%Y-%m-%d')
        history_data.set_index('end_date', inplace=True)
        all_df = pd.concat([history_data.iloc[:-1, ], all_recent_data_df.loc[history_data.index[-1]:, ]])
    else:
        all_df = all_recent_data_df.copy()

    all_df.sort_index(inplace=True)
    all_df.index.name = 'end_date'

    asset_df_list.append(all_df)
    if if_print:
        sheet_name_list = list(true_stalecoin_crypto.keys())
        sheet_name_list.append(f'stablecoin_marketcap')
        df_list_2_excel_sheets_remove_special_sign(df_list=asset_df_list, res_dir=file_path, file_name=f'stablecoin_{data_source}', sheet_name_list=sheet_name_list, index=True)

    return asset_df_list


def reformat_okex_history_funding_rate_data():
    # 对okex资金费率数据做预处理，creat_time重命名为timestamp, 资金费率重命名为funding_rate, 删除日期那一列
    # 保存为新的Excel

    file_path = os.path.join(DATA_DIR, f'funding_rate')
    for symbol in ['BTC-USDT', 'BTC-USD']:
        # 注意交易所的原始数据的时间的时区都是+8000
        file_name = os.path.join(file_path, f'Okex_{symbol}_funding_rate')
        exchange_history_data = pd.read_excel(f'{file_name}.xlsx')

        exchange_history_data['timestamp'] = pd.to_datetime(exchange_history_data['timestamp'])
        exchange_history_data['date'] = exchange_history_data['timestamp'].dt.strftime('%Y-%m-%d')
        exchange_history_data['datetime'] = exchange_history_data['timestamp'].dt.strftime('%Y-%m-%d %H:%M:%S')
        exchange_history_data['time'] = exchange_history_data['timestamp'].dt.strftime('%H:%M:%S')
        exchange_history_data['timestamp'] = exchange_history_data['datetime'].apply(str_to_timestamp)
        exchange_history_data.set_index('datetime', inplace=True)

        file_path = os.path.join(DATA_DIR, f'funding_rate')
        os.makedirs(file_path, exist_ok=True)
        file_name = os.path.join(file_path, f'Okex_{symbol}_funding_rate')
        exchange_history_data.to_excel(f'{file_name}.xlsx')


def reformat_history_funding_rate_data():
    file_path = os.path.join(DATA_DIR, f'funding_rate')
    for symbol in ['BTC-USDT', 'BTC-USD']:
        file_name = os.path.join(file_path, f'{symbol}_composite_funding_rate')
        all_history_data = pd.read_excel(f'{file_name}.xlsx')
        all_history_data.sort_values(by='timestamp', inplace=True)
        all_history_data.set_index('datetime', drop=True, inplace=True)

        file_name = os.path.join(file_path, f'coinglass_{symbol}_funding_rate')
        coinglass_history_data = pd.read_excel(f'{file_name}.xlsx')
        coinglass_history_data.sort_values(by='timestamp', inplace=True)
        coinglass_history_data.set_index('datetime', drop=True, inplace=True)

        # 各家交易所费率数据整合
        for i, exchange in enumerate(['Okex', 'Binance', 'Huobi', 'Bitmex']):
            # 注意 各交易所的原始数据的时间的时区都是+8000
            file_name = os.path.join(file_path, f'{exchange}_{symbol}_funding_rate')
            exchange_history_data = pd.read_excel(f'{file_name}.xlsx')

            exchange_history_data['funding_rate'] = exchange_history_data['funding_rate'] * 100
            exchange_history_data.rename(columns={'funding_rate': exchange}, inplace=True)
            exchange_history_data = exchange_history_data[['datetime', 'timestamp', exchange]]
            if i == 0:
                df = exchange_history_data.copy()
            else:
                df = pd.merge(df, exchange_history_data, on=['datetime', 'timestamp'], how='outer')

        df.sort_values(by='timestamp', inplace=True)
        df.set_index('datetime', drop=True, inplace=True)

        all_df = pd.concat([df.loc[:'2021-08-15 00:00:00', ], all_history_data.loc['2021-08-15 08:00:00':, ]], axis=0)
        all_df.ffill(inplace=True)

        timezone = 'Asia/Shanghai'
        all_df['date'] = pd.to_datetime(all_df['timestamp'], unit='s').dt.tz_localize('UTC').dt.tz_convert(timezone)
        all_df['date'] = all_df['date'].dt.strftime('%Y-%m-%d')
        all_df['time'] = pd.to_datetime(all_df['timestamp'], unit='s').dt.tz_localize('UTC').dt.tz_convert(timezone)
        all_df['time'] = all_df['time'].dt.strftime('%H:%M:%S')

        all_df.sort_index(inplace=True)
        all_df.index.name = 'datetime'

        # exchange_list = [i for i in all_df.columns if i not in ['timestamp', 'price', 'date', 'time']]
        # all_df['composite_funding_rate'] = all_df[exchange_list].mean(axis=1)

        file_name = os.path.join(file_path, f'{symbol}_composite_funding_rate')
        all_df.to_excel(f'{file_name}.xlsx')

    return


@deco_retry(retry=50, retry_sleep=15)
def get_funding_rate_data(data_source='coinglass', end_date=None):
    """

    :param data_source:
    :param end_date:
    :param if_print:
    :return:
    """

    if end_date is None:
        end_date = get_yesterday(marker='with_n_dash')  # 计算截止昨日收盘

    file_path = os.path.join(DATA_DIR, f'funding_rate')
    if data_source == 'coinglass':
        for symbol in ['BTC-USDT', 'BTC-USD']:
            file_name = os.path.join(file_path, f'{symbol}_composite_funding_rate')
            if os.path.exists(f'{file_name}.xlsx'):
                all_history_data = pd.read_excel(f'{file_name}.xlsx')
                all_history_data.sort_values(by='timestamp', inplace=True)
                all_history_data.set_index('datetime', drop=True, inplace=True)
                latest_date = all_history_data.index[-1]
            else:
                all_history_data = pd.DataFrame()
                latest_date = '2010-01-01 00:00:00'

            file_name = os.path.join(file_path, f'coinglass_{symbol}_funding_rate')
            coinglass_history_data = pd.read_excel(f'{file_name}.xlsx')
            coinglass_history_data.sort_values(by='timestamp', inplace=True)
            coinglass_history_data.set_index('datetime', drop=True, inplace=True)

            timezone = 'Asia/Shanghai'
            if symbol == 'BTC-USDT':
                asset = 'BTC'
                market_type = 'U'
            elif symbol == 'BTC-USD':
                asset = 'BTC'
                market_type = 'C'
            else:
                raise ValueError
            recent_coinglass_df = get_recent_funding_rate(asset=asset, market_type=market_type)

            recent_coinglass_df['timestamp'] = recent_coinglass_df.apply(lambda x: int(x['timestamp'] / 1000), axis=1)
            recent_coinglass_df['date'] = pd.to_datetime(recent_coinglass_df['timestamp'], unit='s').dt.tz_localize('UTC').dt.tz_convert(timezone)
            recent_coinglass_df['date'] = recent_coinglass_df['date'].dt.strftime('%Y-%m-%d')
            recent_coinglass_df['datetime'] = pd.to_datetime(recent_coinglass_df['timestamp'], unit='s').dt.tz_localize('UTC').dt.tz_convert(timezone)
            recent_coinglass_df['datetime'] = recent_coinglass_df['datetime'].dt.strftime('%Y-%m-%d %H:%M:%S')
            recent_coinglass_df['time'] = pd.to_datetime(recent_coinglass_df['timestamp'], unit='s').dt.tz_localize('UTC').dt.tz_convert(timezone)
            recent_coinglass_df['time'] = recent_coinglass_df['time'].dt.strftime('%H:%M:%S')
            recent_coinglass_df.set_index('datetime', inplace=True)
            recent_coinglass_df = recent_coinglass_df.iloc[:-1, ]  # 最后一条数据不完整，需要剔除

            all_coinglass_df = pd.concat([coinglass_history_data.iloc[:-1, ], recent_coinglass_df.loc[coinglass_history_data.index[-1]:, ]])
            all_coinglass_df.sort_index(inplace=True)
            all_coinglass_df.index.name = 'datetime'
            file_path = os.path.join(DATA_DIR, f'funding_rate')
            file_name = os.path.join(file_path, f'coinglass_{symbol}_funding_rate')
            all_coinglass_df.to_excel(f'{file_name}.xlsx')

            # exchange_list = [i for i in recent_coinglass_df.columns if i not in ['timestamp', 'price', 'date', 'time']]
            # recent_coinglass_df['composite_funding_rate'] = recent_coinglass_df[exchange_list].mean(axis=1)
            all_df = pd.concat([all_history_data.iloc[:-1, ], recent_coinglass_df.loc[latest_date:, ]])
            all_df.sort_index(inplace=True)
            all_df.index.name = 'datetime'
            file_name = os.path.join(file_path, f'{symbol}_composite_funding_rate')
            all_df.to_excel(f'{file_name}.xlsx')

    elif data_source == 'binance':
        fetch_all_binance_funding_rate(symbol_list='all')
        return None

    return all_df


def get_okex_futures_kline():
    """
    市面上每时每刻只有4个K线，分别由不同的历史合约K线数据连接而成，注意不一定是同类合约连在一起，需要参考k线接替规则，进行推算历史数据：
    非次季度合约生成日：老次周轮替为新当周，老当周对接新次周， 季度、次季度不变。 
    次季度合约生成日：老次季度轮替为新当季，老当季轮替为新次周 ，老次周轮替为新当周，老当周对接新次季

    拿211231，老次季度合约举例说明，首先判断211231是否是次季度合约生成日，
    按照规则，当季交割前两周是次季度合约生成日，那么211231就不是次季度合约生成日，
    所以这个次季度合约作为当周交割时，对接到当时的新次周上，也就是220114，
    再判断220114是否是次季度合约生成日，220114作为当周交割时，当时的当季是220325，
    所以220311是次季度合约生成日，那么在次之前，交割k线对接规则都是，老当周对接新次周，
    220114就对接到220128，依次类推，220211-220225-220311此时因为220311是次季度合约生成日，
    所以接替规则又变更为，老当周对接新次季，直接对接到了220930   所以如果要获取211231合约的K线数据，需要传入220930进行获取。

    :return:
    """

    today = get_today(marker='with_n_dash')  # 今天日期

    # 获取所有日期, %Y-%m-%d
    all_date = pd.date_range(start='2015-01-01', end=today, freq='D')
    all_date_list = [x.strftime("%Y-%m-%d") for x in all_date]
    all_date_df = pd.DataFrame(all_date_list, columns=['end_date'])
    all_date_df.set_index('end_date', inplace=True)

    # 获取所有的星期五
    all_friday_list = get_specific_weekday_in_range(start_date='2015-01-01', end_date=today, weekday='Friday')

    # 获取截止当前的所有的次季合约生成日期， 次季合约生成日为每个季度的末的第三个星期五
    latest_season_date = get_quarter_end(today)
    all_season_contract_create_date = get_specific_weekday_of_each_quarter_last_month(start_date='2015-01-01', end_date=latest_season_date, weekday='Friday', num=3, ascending=False)

    # 最近的次季合约生成日
    latest_season_contract_create_date = all_season_contract_create_date[-1]

    # 判断当前当季，次季，当周，次周的合约代码上的日期
    if today < latest_season_contract_create_date:  # 当前日期小于当前季度的合约生成日； 例如2022-03-08， 该季度的次季合约生成日为2022-03-11， 因此此时的当季合约对应的是20220325， 次季合约是20220624
        # 当前次季合约的日期
        next_season_end_date = date_shifter(before=latest_season_date, step='quarters', how_many=1)
        next_season_contract_date = get_specific_weekday_in_range(start_date=next_season_end_date[:7] + '-01', end_date=next_season_end_date, weekday='Friday')[-1]
        # 当前当季合约的日期
        this_season_contract_date = get_specific_weekday_in_range(start_date=latest_season_date[:7] + '-01', end_date=latest_season_date, weekday='Friday')[-1]
        # 当前次周合约的日期
        next_week_end_date = date_shifter(before=today, step='weeks', how_many=2)
        next_week_contract_date = get_specific_weekday_in_range(start_date=next_week_end_date[:7] + '-01', end_date=next_week_end_date, weekday='Friday')[-1]
        # 当前当周合约的日期
        this_week_end_date = date_shifter(before=today, step='weeks', how_many=1)
        this_week_contract_date = get_specific_weekday_in_range(start_date=this_week_end_date[:7] + '-01', end_date=this_week_end_date, weekday='Friday')[-1]

    else:  # 当前日期已经过了该季度的合约生成日
        # 当前次季合约的日期
        next_season_end_date = date_shifter(before=latest_season_date, step='quarters', how_many=2)
        next_season_contract_date = get_specific_weekday_in_range(start_date=next_season_end_date[:7] + '-01', end_date=next_season_end_date, weekday='Friday')[-1]
        # 当前当季合约的日期
        this_season_end_date = date_shifter(before=latest_season_date, step='quarters', how_many=1)
        this_season_contract_date = get_specific_weekday_in_range(start_date=this_season_end_date[:7] + '-01', end_date=this_season_end_date, weekday='Friday')[-1]
        # 当前次周合约的日期
        next_week_end_date = date_shifter(before=today, step='weeks', how_many=2)
        next_week_contract_date = get_specific_weekday_in_range(start_date=next_week_end_date[:7] + '-01', end_date=next_week_end_date, weekday='Friday')[-1]
        # 当前当周合约的日期
        this_week_end_date = date_shifter(before=today, step='weeks', how_many=1)
        this_week_contract_date = get_specific_weekday_in_range(start_date=today, end_date=this_week_end_date, weekday='Friday')[-1]

    # 获取每个季度的最后一个星期五
    all_season_last_friday = get_specific_weekday_of_each_quarter_last_month(start_date='2015-01-01', end_date=next_season_contract_date, weekday='Friday', num=1, ascending=False)

    # 分别生成next_season_contract_date， this_season_contract_date， next_week_contract_date， this_week_contract_date 对应的K线合成规则dataframe
    kline_rule_list = []
    for kline_type in ['next_season', 'this_season', 'next_week', 'this_week']:
        if kline_type == 'next_season':
            kline_end_date = next_season_contract_date
        elif kline_type == 'this_season':
            kline_end_date = this_season_contract_date
        elif kline_type == 'next_week':
            kline_end_date = next_week_contract_date
        elif kline_type == 'this_week':
            kline_end_date = this_week_contract_date

        kline_date_list = [kline_end_date]
        this_date = kline_end_date
        while this_date > '2015-01-01':
            if this_date in all_season_last_friday:  # 说明该合约是某一个季度合约
                temp_date = date_shifter(before=this_date, step='quarters', how_many=-2)
                end_date_of_this_month = get_the_end_of_this_month(temp_date)
                this_date = get_specific_weekday_in_range(start_date=temp_date[:7] + '-01', end_date=end_date_of_this_month, weekday='Friday')[-3]  # 找到该合约的起始日期
            else:
                this_date = date_shifter(before=this_date, step='weeks', how_many=-2)
            kline_date_list.append(this_date)

        kline_date_df = pd.DataFrame([kline_date_list, kline_date_list]).T
        kline_date_df.columns = ['contract_name', 'end_date']
        kline_date_df.set_index('end_date', inplace=True)
        kline_date_df = all_date_df.merge(kline_date_df, left_index=True, right_index=True, how='outer')
        df = kline_date_df.backfill()
        df = df.shift(-1)
        df = df.dropna()

        # 判断合约当时对应的是次季，当季，次周，还是当周
        for date in df.index:
            contract_name = df.loc[date, 'contract_name']
            if contract_name not in all_season_last_friday:
                if calc_date_diff(date, contract_name) <= 7:
                    df.loc[date, 'contract_type'] = '交割当周'
                elif calc_date_diff(date, contract_name) > 7 and calc_date_diff(date, contract_name) <= 14:
                    df.loc[date, 'contract_type'] = '交割次周'
                else:
                    raise ValueError

            elif contract_name in all_season_last_friday:
                end_date_of_that_month = get_the_end_of_this_month(contract_name)
                temp_date_list = get_specific_weekday_of_each_quarter_last_month(start_date=date, end_date=end_date_of_that_month, weekday='Friday', num=3, ascending=False)
                temp_date_list = [i for i in temp_date_list if date <= i <= contract_name]
                if calc_date_diff(date, contract_name) <= 7:
                    df.loc[date, 'contract_type'] = '交割当周'
                elif calc_date_diff(date, contract_name) > 7 and calc_date_diff(date, contract_name) <= 14:
                    df.loc[date, 'contract_type'] = '交割次周'
                elif (len(temp_date_list) == 3) or (len(temp_date_list) == 2 and date not in all_season_contract_create_date):
                    df.loc[date, 'contract_type'] = '交割次季'
                elif (len(temp_date_list) == 2 and date in all_season_contract_create_date) or (len(temp_date_list) == 1 and date not in all_season_contract_create_date):
                    df.loc[date, 'contract_type'] = '交割当季'
                else:
                    raise ValueError

        kline_rule_list.append(df)

    start = str_to_timestamp(input_str='2018-01-01', tz_str='+0800') * 1000
    end = str_to_timestamp(input_str=today, tz_str='+0800') * 1000

    for contract_type in ['BTC-USD', 'BTC-USDT']:
        all_kline_df_list = []
        for idx, contract_date in enumerate([next_season_contract_date, this_season_contract_date, next_week_contract_date, this_week_contract_date]):
            contract_id = contract_type + '-' + contract_date.replace('-', '')[2:]
            result = marketAPI.get_history_candlesticks(instId=contract_id, after=end, before=start, bar='1D', time_zone='+0000')
            kline_rule = kline_rule_list[idx]
            kline_df = result.merge(kline_rule, left_index=True, right_index=True, how='left')
            kline_df['contract_date'] = kline_df['contract_name']
            kline_df['contract_name'] = kline_df['contract_name'].apply(lambda x: contract_type + '-' + x.replace('-', '')[2:])
            all_kline_df_list.append(kline_df)

        all_kline_df = pd.concat(all_kline_df_list, axis=0)
        all_kline_df.sort_index(inplace=True)
        file_path = os.path.join(DATA_DIR, 'futures_kline')
        file_name = os.path.join(file_path, f'okex_{contract_type}_futures_ohlcv')
        all_kline_df.to_excel(f'{file_name}.xlsx')

    return


def get_btc_dominance(data_source='qkl123', end_date=None):
    """
    获取btc市值占比

    :return:
    """

    if end_date is None:
        end_date = get_yesterday(marker='with_n_dash')  # 计算截止昨日收盘

    file_path = os.path.join(DATA_DIR, f'trading_data')
    if data_source == 'qkl123':
        file_name = os.path.join(DATA_DIR, f'crypto dominance')  # 数据从qlk123中下载 https://www.qkl123.com/data/market-ratio
        crypto_dominance = pd.read_csv(f'{file_name}.csv')
        crypto_dominance = crypto_dominance.shift(1, axis=1)  # 列名对应有问题
        crypto_dominance.drop(columns=['时间'], inplace=True)
        crypto_dominance.rename(columns={'Unnamed: 11': 'Others', 'BTC价格（$）': 'BTC_prices'}, inplace=True)
        crypto_dominance.index = crypto_dominance.index.str[:10]
        crypto_dominance.index.name = 'end_date'

        file_name = os.path.join(file_path, f'btc_cap_dominance')
        crypto_dominance.to_csv(f'{file_name}.csv')

    elif data_source == 'coingecko':
        # gets data from coingecko
        import csv
        import requests
        response = requests.get('https://api.coingecko.com/api/v3/coins/markets?vs_currency=usd&order=market_cap_desc&per_page=250&page=1&sparkline=false')
        response = response.json()
        # initialises variables
        BTCCap = 0
        altCap = 0
        today = get_today(marker='with_n_dash')  # TODO 这样取到的不是每天收盘的数据，有点问题
        # iterates through response
        for x in response:
            # logger.info(x['id'])
            if x['id'] == "bitcoin":  # adds bitcoin market cap to BTCCap and altCap
                BTCCap = x['market_cap']
                altCap = altCap + x['market_cap']
            else:  # adds any altcoin market cap to altCap
                altCap = altCap + x['market_cap']

        # writes the data to a CSV BTCDominance, will generate the file if it does not exist.
        with open("BTCDominance.csv", mode='a+', newline="") as datawriter:
            datawriter = csv.writer(datawriter, delimiter=",")
            datawriter.writerow([today, "{:.2f}".format((BTCCap / altCap) * 100)])  # writes bitcoin dominance out of 100 to 2dp

    elif data_source == 'coinmarketcap':
        data = get_total_marketcap_amount_via_http(exclude_btc=False)
        crypto_dominance = data['btcDominance']
    else:
        raise NotImplementedError
    file_name = os.path.join(file_path, f'btc_cap_dominance')
    crypto_dominance.to_csv(f'{file_name}.csv')
    return crypto_dominance


@deco_retry(retry=50, retry_sleep=15)
def get_exchange_trading_volume():
    # 从CMC上获取中心化交易所交易额
    data = get_total_marketcap_amount_via_http(exclude_btc=False)
    file_path = os.path.join(DATA_DIR, r'trading_data')
    file_name = os.path.join(file_path, f'cex_total_spot_amount_coinmarketcap')
    data.to_excel(f'{file_name}.xlsx')

    # 从binance获取币安BTC兑稳定币的相关现货交易额
    end_date = get_today(marker='with_n_dash')
    file_name = os.path.join(file_path, f'cex_total_spot_amount_biance_BTCUSD')
    if os.path.exists(f'{file_name}.xlsx'):
        history_binance_amount = pd.read_excel(f'{file_name}.xlsx', index_col='date')
        start_date = history_binance_amount.index[-1]
        if calc_date_diff(start_date, end_date) < 3:
            start_date = date_shifter(before=start_date, step='days', how_many=-3)
    else:
        start_date = '2017-01-01'
    start = str_to_timestamp(start_date, tz_str='+0800')
    end = str_to_timestamp(end_date, tz_str='+0800')
    binance_BTCUSDT = fetch_binance_kline("BTCUSDT", start, end, '1d')
    logger.info('从binance获取BTCUSDT交易额成功')
    # binance_BTCBUSD = fetch_binance_kline("BTCBUSD", start, end, '1d')
    # logger.info('从binance获取BTCBUSD交易额成功')
    binance_BTCUSDC = fetch_binance_kline("BTCUSDC", start, end, '1d')
    logger.info('从binance获取BTCUSDC交易额成功')
    # binance_BTCBUSD = pd.Series(binance_BTCBUSD['amount'], index=binance_BTCUSDT.index).fillna(0)
    if binance_BTCUSDC is not None:
        binance_BTCUSDC = pd.Series(binance_BTCUSDC['amount'], index=binance_BTCUSDT.index).fillna(0)
        # amount = binance_BTCUSDT['amount'] + binance_BTCBUSD + binance_BTCUSDC
        amount = binance_BTCUSDT['amount'] + binance_BTCUSDC
    else:
        # amount = binance_BTCUSDT['amount'] + binance_BTCBUSD
        amount = binance_BTCUSDT['amount']

    amount_df = pd.DataFrame(amount)
    if os.path.exists(f'{file_name}.xlsx'):
        previous_date = date_shifter(amount.index[0], step='days', how_many=-1)
        amount_df = pd.concat([history_binance_amount.loc[:previous_date, ], amount_df.iloc[:, ]])  # 最新一天数据为当天，数据还不完整
    amount_df.to_excel(f'{file_name}.xlsx')

    ### 从coingecko获取主流交易所现货交易额历史数据 ###
    cg = CoinGeckoAPI()
    # spot_exchange_id_list = ['binance', 'gdax', 'ftx_spot', 'okex', 'huobi', 'gate', 'kucoin', 'crypto_com', 'bitfinex']
    spot_exchange_id_list = ['binance', 'gdax', 'okex', 'huobi', 'gate', 'kucoin', 'crypto_com', 'bitfinex', 'bybit_spot']
    exchange_volume_df_list = []
    file_name = os.path.join(file_path, f'cex_total_spot_amount_each_exchange_coingecko')
    if os.path.exists(f'{file_name}.xlsx'):
        days = 30
    else:
        days = 365
    for exchange_id in spot_exchange_id_list:
        logger.info(f'开始获取{exchange_id}现货交易额数据')
        exchange_volume = cg.get_exchanges_volume_chart_by_id(id=exchange_id, days=days)['amount']
        logger.info(f'{exchange_id}现货交易额数据获取成功')
        if exchange_id == 'gdax':
            exchange_name = 'coinbase'
        elif exchange_id == 'ftx_spot':
            exchange_name = 'ftx'
        else:
            exchange_name = exchange_id
        exchange_volume = exchange_volume[~exchange_volume.index.duplicated(keep="first")]
        exchange_volume.name = exchange_name
        exchange_volume_df_list.append(exchange_volume)
        time.sleep(10)

    spot_exchange_volume_df = pd.concat(exchange_volume_df_list, axis=1)
    if os.path.exists(f'{file_name}.xlsx'):
        hist_spot_exchange_volume_df = pd.read_excel(f'{file_name}.xlsx', index_col='date')
        previous_date = date_shifter(spot_exchange_volume_df.index[0], step='days', how_many=-1)
        spot_exchange_volume_df = pd.concat([hist_spot_exchange_volume_df.loc[:previous_date, ], spot_exchange_volume_df.iloc[:-1, ]])  # 最新一天数据为当天，数据还不完整
    spot_exchange_volume_df.to_excel(f'{file_name}.xlsx')

    ## 从coingecko获取主流衍生品交易所衍生品交易额历史数据 ###
    # deri_exchange_id_list = ['binance', 'okex', 'bybit', 'ftx', 'kucoin', 'crypto_com', 'huobi', 'gate', 'bitfinex', 'kraken', 'bitmex']
    deri_exchange_id_list = ['binance', 'okex', 'bybit', 'kucoin', 'crypto_com', 'huobi', 'gate', 'bitfinex', 'kraken', 'bitmex']
    deri_exchange_volume_df_list = []
    # deri_exchange_oi_df_list = []
    file_name = os.path.join(file_path, f'cex_total_deri_amount_each_exchange_coingecko')
    if os.path.exists(f'{file_name}.xlsx'):
        days = 30
    else:
        days = 365
    for deri_exchange_id in deri_exchange_id_list:
        logger.info(f'开始获取{deri_exchange_id}衍生品交易额数据')
        deri_exchange_data = cg.get_derivative_exchanges_volume_by_id(id=deri_exchange_id, days=days)
        deri_exchange_volume = deri_exchange_data['amount']
        logger.info(f'{deri_exchange_id}衍生品交易额数据获取成功')
        deri_exchange_volume = deri_exchange_volume[~deri_exchange_volume.index.duplicated(keep="first")]
        deri_exchange_volume[deri_exchange_volume > 100 * deri_exchange_volume.median()] = None  # 剔除掉异常值
        deri_exchange_volume.name = deri_exchange_id
        deri_exchange_volume_df_list.append(deri_exchange_volume)

        # deri_exchange_oi = deri_exchange_data['open_interest']
        # deri_exchange_oi = deri_exchange_oi[~deri_exchange_oi.index.duplicated(keep="first")]
        # deri_exchange_oi.name = deri_exchange_id
        # deri_exchange_oi_df_list.append(deri_exchange_oi)
        time.sleep(10)

    deri_exchange_volume_df = pd.concat(deri_exchange_volume_df_list, axis=1)
    hist_deri_exchange_volume_df = pd.read_excel(f'{file_name}.xlsx', index_col='Unnamed: 0')
    previous_date = date_shifter(deri_exchange_volume_df.index[0], step='days', how_many=-1)
    deri_exchange_volume_df = pd.concat([hist_deri_exchange_volume_df.loc[:previous_date, ], deri_exchange_volume_df.iloc[:-1, ]])  # 最新一天数据为当天，数据还不完整
    deri_exchange_volume_df.to_excel(f'{file_name}.xlsx')

    # # 各交易所的期货开仓量汇总
    # deri_exchange_oi_df = pd.concat(deri_exchange_oi_df_list, axis=1)
    # file_name = os.path.join(file_path, f'cex_total_deri_oi_each_exchange_coingecko')
    # if os.path.exists(f'{file_name}.xlsx'):
    #     hist_deri_exchange_oi_df = pd.read_excel(f'{file_name}.xlsx', index_col='date')
    #     deri_exchange_oi_df = pd.concat([hist_deri_exchange_oi_df, deri_exchange_oi_df])
    #     deri_exchange_oi_df = deri_exchange_oi_df[~deri_exchange_oi_df.index.duplicated(keep='last')]
    #     deri_exchange_oi_df.to_excel(f'{file_name}.xlsx')
    # else:
    #     deri_exchange_oi_df.to_excel(f'{file_name}.xlsx')

    time.sleep(5)

    # 获取去中心化交易所交易额
    # dex_list = ['Uniswap (v3)', 'Curve Finance', 'PancakeSwap (v2)', 'Orca', 'Biswap', 'Uniswap (v2)', 'Jupiter', 'Uniswap (Polygon)', 'SpookySwap', 'Serum DEX',
    #             'Raydium', 'Sushiswap', 'Trader Joe', 'MM Finance', 'Osmosis', 'Sunswap (v1)', 'Dodo (Polygon)', 'Saber', 'DeFiChain DEX', 'Balancer (v2)', 'Quickswap',
    #             'THORChain', 'Uniswap (Arbitrum One)', 'Dodo (BSC)', 'Balancer (v1)', 'Curve (Factory Pools)', 'Sushiswap (Arbitrum One)', 'SpiritSwap', 'Bancor Network', 'SunSwap (v2)',
    #             'Shibaswap', 'KyberSwap (Polygon)', 'Loopring AMM', 'Uniswap (Optimism)', 'Astroport', 'Binance DEX', 'Uniswap (v1)',
    #             'dYdX', 'Honeyswap', 'Sushiswap (Polygon POS)', 'ApeSwap', 'BabySwap', 'Shibaswap', 'Sushiswap (Avalanche)',
    #             'Balancer (Polygon)', 'Sushiswap (Harmony)', 'Balancer (Arbitrum)', 'Dodo (Arbitrum)', 'Sushiswap (Fantom)', 'Honeyswap']

    dex_exchange_id_list = \
        ['uniswap-bsc', 'uniswap_v2', 'uniswap_v3', 'uniswap_v3_arbitrum', 'uniswap-v3-avalanche', 'uniswap_v3_optimism', 'uniswap_v3_polygon_pos',
         'curve_arbitrum', 'curve_ethereum', 'curve_optimism',
         'apex_pro', 'maverick_protocol', 'ferro-protocol', 'openocean_finance', 'jupiter', 'orca', 'thorwallet', 'thorswap', 'woofi', 'pulsex', 'syncswap',
         'balancer-v1', 'balancer-v2', 'balancer_arbitrum',
         'sushiswap', 'sun_io',
         'pancakeswap_aptos', 'pancakeswap_ethereum', 'pancakeswap_new', 'pancakeswap_stableswap', 'pancakeswap-v3-bsc', 'pancakeswap-v3-ethereum',
         'dodo', 'dodo_arbitrum', 'dodo_bsc', 'dodo_polygon',
         'traderjoe', 'traderjoe-v2-1-arbitrum', 'traderjoe-v2-1-avalanche', 'traderjoe-v2-1-bsc', 'traderjoe-v2-arbitrum', 'traderjoe-v2-avalanche', 'traderjoe-v2-bsc',
         'quickswap', 'quickswap-polygon-zkevm', 'quickswap_v3',
         ]

    # dex_list = list(set(dex_list))
    # dex_list.sort()
    # cg = CoinGeckoAPI()
    # exchange_list = cg.get_exchanges_id_name_list()
    # file_name = os.path.join(file_path, f'coingecko_exchange_list')
    # exchange_list.to_excel(f'{file_name}.xlsx')
    # dex_exchange_id_list = []
    # for index, row in exchange_list.iterrows():
    #     name = row['name']
    #     id = row['id']
    #     if name in dex_list:
    #         dex_exchange_id_list.append(id)

    dex_exchange_volume_list = []
    for dex_exchange_id in dex_exchange_id_list:
        logger.info(f'开始获取{dex_exchange_id}的交易额数据')
        try:
            exchange_volume_df = cg.get_exchanges_volume_chart_by_id(id=dex_exchange_id, days=30)
        except:
            logger.info(f'获取{dex_exchange_id}的交易额数据失败')
        else:
            exchange_volume_df = exchange_volume_df[~exchange_volume_df.index.duplicated(keep='first')]
            exchange_volume_series = exchange_volume_df['amount']
            exchange_volume_series.name = dex_exchange_id
            dex_exchange_volume_list.append(exchange_volume_series)
            logger.info(f'获取{dex_exchange_id}的交易额数据成功')
            time.sleep(8)

    dex_exchange_volume_df = pd.concat(dex_exchange_volume_list, axis=1)
    dex_exchange_volume_df.sort_index(inplace=True)
    dex_exchange_volume_df['total'] = dex_exchange_volume_df.sum(axis=1)
    file_name = os.path.join(file_path, f'dex_total_spot_amount_coingecko')
    hist_dex_exchange_volume_df = pd.read_excel(f'{file_name}.xlsx', index_col='date')
    previous_date = date_shifter(dex_exchange_volume_df.index[0], step='days', how_many=-1)
    dex_exchange_volume_df = pd.concat([hist_dex_exchange_volume_df.loc[:previous_date, ], dex_exchange_volume_df.iloc[:-1, ]])  # 最新一天数据为当天，数据还不完整
    dex_exchange_volume_df.to_excel(f'{file_name}.xlsx')

    return


@deco_retry(retry=50, retry_sleep=15)
def get_cex_analysis_data():
    """
    计算中心化交易所板块相关数据: 中心化交易所现货交易额，现货交易额占比； 衍生品交易额，衍生品交易额占比
    # 现货交易额可以每周获取一次
    # 衍生品交易额

    :return:
    """

    #### 统计交易所平台币相关信息 ###
    end_date = get_today(marker='with_n_dash')

    file_name = os.path.join(DATA_DIR, f'BTC_history_ohlcvm')
    ohlcv_data = pd.read_excel(f'{file_name}.xlsx', index_col='end_date')
    btc_price = ohlcv_data.loc['2017-07-26':, 'close']
    btc_price.name = 'bitcoin_price'
    btc_log_price = np.log10(ohlcv_data.loc['2017-07-26':, 'close'])
    btc_log_price.name = 'bitcoin_log_price'

    # # 更新文件: 交易所代币数据从all_history_ohlcvm_coinmarketcap复制出来，
    # for exchange_token in ['bnb', 'cronos', 'ftx-token', 'gatetoken', 'huobi-token', 'kucoin-token', 'okb', 'unus-sed-leo']:
    #     file_path = os.path.join(DATA_DIR, r'all_history_ohlcvm_coinmarketcap\all_history_ohlcvm_coinmarketcap')
    #     file_name = os.path.join(file_path, f'{exchange_token}')
    #     backup_path = os.path.join(DATA_DIR, f'cex_coins_data')
    #     copy_file(f'{file_name}.xlsx', backup_path)
    # coinbase = yf.download('COIN', start='2021-04-10', end=end_date)
    # coinbase.index = pd.to_datetime(coinbase.index)
    # coinbase.index = coinbase.index.astype(str)
    # coinbase_market_cap1 = coinbase.loc['2021-04-14':'2021-06-30', 'Close'] * 210000000
    # coinbase_market_cap2 = coinbase.loc['2021-07-01':'2021-12-31', 'Close'] * 215000000
    # coinbase_market_cap3 = coinbase.loc['2022-01-03':'2022-03-31', 'Close'] * 220000000
    # coinbase_market_cap4 = coinbase.loc['2022-04-01':end_date, 'Close'] * 225000000
    # coinbase_market_cap = pd.concat([coinbase_market_cap1, coinbase_market_cap2, coinbase_market_cap3, coinbase_market_cap4])
    # coinbase['market_cap'] = coinbase_market_cap
    # file_name = os.path.join(backup_path, f'coinbase')
    # coinbase.to_excel(f'{file_name}.xlsx')
    #
    # file_path_temp = os.path.join(DATA_DIR, f'cex_coins_data')
    # all_filename_path_dict = get_all_filename_path(file_path_temp)
    # all_cex_coins_name = [i.replace('.xlsx', '') for i in all_filename_path_dict.keys()]
    #
    # all_cex_coins_price_df_list = []
    # all_cex_coins_marketcap_df_list = []
    # for coins in all_cex_coins_name:
    #     file_name = all_filename_path_dict[f'{coins}.xlsx']
    #     history_data = pd.read_excel(f'{file_name}')
    #     if 'Date' in history_data.columns:
    #         history_data.rename(columns={'Date': 'end_date', 'Adj Close': 'close'}, inplace=True)
    #         history_data['end_date'] = history_data['end_date'].astype(str)
    #     history_data.set_index('end_date', inplace=True)
    #     history_price_data = history_data['close']
    #     history_price_data.name = coins
    #     all_cex_coins_price_df_list.append(history_price_data)
    #
    #     if 'market_cap' in history_data.columns:
    #         history_marketcap_data = history_data['market_cap']
    #         history_marketcap_data.name = coins
    #         all_cex_coins_marketcap_df_list.append(history_marketcap_data)
    #
    # all_cex_coins_price_df = pd.concat(all_cex_coins_price_df_list, axis=1)
    # all_cex_coins_price_df.sort_index(inplace=True)
    # all_cex_coins_price_df.ffill(inplace=True)
    # rename_dict = {'bnb': 'binance', 'cronos': 'crypto_com', 'ftx-token': 'ftx', 'gatetoken': 'gate', 'huobi-token': 'huobi', 'kucoin-token': 'kucoin', 'okb': 'okex', 'unus-sed-leo': 'bitfinex'}
    # all_cex_coins_price_df.rename(columns=rename_dict, inplace=True)
    # all_daily_ret_df = all_cex_coins_price_df.pct_change()  # 各个币每日收益率
    # all_cex_coins_price_df_temp = all_cex_coins_price_df.copy()
    # all_cex_coins_price_df_temp.index = pd.to_datetime(all_cex_coins_price_df_temp.index)
    # all_weekly_ret_df = all_cex_coins_price_df_temp.resample('W').last().pct_change()
    # all_monthly_ret_df = all_cex_coins_price_df_temp.resample('M').last().pct_change()
    # file_path = os.path.join(DATA_DIR, f'cex_coins_data\statistics_data')
    # file_name = os.path.join(file_path, f'all_cex_coins_daily_ret')
    # all_daily_ret_df.to_excel(f'{file_name}.xlsx')
    # file_name = os.path.join(file_path, f'all_cex_coins_weekly_ret')
    # all_weekly_ret_df.to_excel(f'{file_name}.xlsx')
    # file_name = os.path.join(file_path, f'all_cex_coins_monthly_ret')
    # all_monthly_ret_df.to_excel(f'{file_name}.xlsx')
    #
    # # 各币回购前后的收益率情况, 回购对币价有没有冲击
    # repurchase_date_dict = {'binance': ['2017-10-18',
    #                                     '2018-01-15', '2018-04-15', '2018-07-18', '2018-10-17', '2019-01-16', '2019-04-16', '2019-07-12', '2019-10-17',
    #                                     '2020-01-18', '2020-04-18', '2020-07-18', '2020-10-17', '2021-01-19', '2021-04-16', '2021-07-19', '2021-10-18',
    #                                     '2022-01-18', '2022-04-19', '2022-07-13'],
    #                         'huobi': ['2019-04-15', '2019-07-15', '2019-10-15', '2020-01-15', '2020-04-15', '2020-07-15', '2020-10-15', '2021-01-15',
    #                                   '2021-04-15', '2021-07-15', '2021-10-15', '2022-01-15', '2022-04-15', '2022-07-15'],
    #                         'okex': ['2019-05-31', '2019-09-03', '2019-12-06', '2020-03-02', '2020-06-02', '2020-09-02', '2020-12-02', '2021-03-02',
    #                                  '2021-06-02', '2021-09-06', '2021-12-02', '2022-03-02', '2022-06-04']
    #                         }
    # repurchase_data_list = []
    # for exchange_token in repurchase_date_dict.keys():
    #     for date in repurchase_date_dict[exchange_token]:
    #         data_list = [exchange_token, date]
    #         for period in [0, 3, 7, 14]:
    #             if period == 0:
    #                 cum_ret = all_daily_ret_df.loc[date, exchange_token]
    #                 data_list.extend([cum_ret])
    #             else:
    #                 start_date1 = date_shifter(date, step='days', how_many=-period)
    #                 start_date2 = date_shifter(date, step='days', how_many=-1)
    #                 end_date1 = date_shifter(date, step='days', how_many=1)
    #                 end_date2 = date_shifter(date, step='days', how_many=period)
    #                 cum_ret1 = all_daily_ret_df.loc[start_date1: start_date2, exchange_token]
    #                 cum_ret1 = ((cum_ret1 + 1).cumprod() - 1)[-1]
    #                 cum_ret2 = all_daily_ret_df.loc[end_date1: end_date2, exchange_token]
    #                 cum_ret2 = ((cum_ret2 + 1).cumprod() - 1)[-1]
    #                 data_list.extend([cum_ret1, cum_ret2])
    #         repurchase_data_list.append(data_list)
    # repurchase_data = pd.DataFrame(repurchase_data_list, columns=['交易所名称', '回购日期', '当天收益', '前3天收益', '后3天收益', '前7天收益', '后7天收益', '前14天收益', '后14天收益'])
    # file_path = os.path.join(DATA_DIR, f'cex_coins_data\statistics_data')
    # file_name = os.path.join(file_path, f'all_cex_repurchase_effect')
    # repurchase_data.to_excel(f'{file_name}.xlsx')
    #
    # # 测试是否有上市刚开始的新手光环，每个币刚上市时的累计收益率，以及其他币对应的收益率
    # all_exchange_cum_ret_list = []
    # for period in [7, 14, 30, 90]:
    #     for exchange_token_ico in all_daily_ret_df.columns:
    #         exchange_token_ico_date = all_daily_ret_df[exchange_token_ico].dropna()
    #         exchange_token_ico_date = exchange_token_ico_date.index[0]
    #         period_end_date = date_shifter(exchange_token_ico_date, step='days', how_many=period)
    #         exchange_cum_ret = (all_daily_ret_df.loc[exchange_token_ico_date:period_end_date, :] + 1).cumprod() - 1
    #         exchange_cum_ret = exchange_cum_ret.iloc[-1, :]
    #         exchange_cum_ret.name = exchange_token_ico + str(period) + 'days'
    #         all_exchange_cum_ret_list.append(exchange_cum_ret)
    # all_exchange_cum_ret_df = pd.concat(all_exchange_cum_ret_list, axis=1)
    # file_name = os.path.join(file_path, f'all_cex_coins_cum_ret')
    # all_exchange_cum_ret_df.to_excel(f'{file_name}.xlsx')

    # all_cex_coins_log_price_df = pd.concat([np.log10(all_cex_coins_price_df), btc_price, btc_log_price], axis=1)
    # file_name = os.path.join(file_path, f'all_cex_coins_log_prices')
    # all_cex_coins_log_price_df.to_excel(f'{file_name}.xlsx')
    #
    # all_cex_coins_marketcap_df = pd.concat(all_cex_coins_marketcap_df_list, axis=1)
    # all_cex_coins_marketcap_df.rename(columns=rename_dict, inplace=True)
    # all_cex_coins_marketcap_df.ffill(inplace=True)
    # all_marketcap = all_cex_coins_marketcap_df.sum(axis=1)
    # marketcap_ratio_list = []
    # for columns in all_cex_coins_marketcap_df.columns:
    #     res = all_cex_coins_marketcap_df[columns] / all_marketcap
    #     res.name = columns
    #     marketcap_ratio_list.append(res)
    # all_cex_coins_marketcap_ratio_df = pd.concat(marketcap_ratio_list, axis=1)
    #
    # all_daily_ret_df['all_composite_ret'] = (all_daily_ret_df * all_cex_coins_marketcap_ratio_df).sum(axis=1)
    # all_daily_ret_df['all_ret'] = all_daily_ret_df.mean(axis=1)
    # all_daily_ret_df['cex_composite_nav'] = (all_daily_ret_df['all_composite_ret'] + 1).cumprod()
    # all_daily_ret_df['cex_nav'] = (all_daily_ret_df['all_ret'] + 1).cumprod()
    # all_daily_ret_df['cex_cap_weighted_log_nav'] = np.log10(all_daily_ret_df['cex_composite_nav'])
    # all_daily_ret_df['cex_equal_weighted_log_nav'] = np.log10(all_daily_ret_df['cex_nav'])
    # all_daily_ret_df = pd.concat([all_daily_ret_df, btc_price, btc_log_price], axis=1)
    #
    # file_name = os.path.join(file_path, f'all_cex_coins_ret')
    # all_daily_ret_df.to_excel(f'{file_name}.xlsx')

    # 各交易所的现货交易额汇总
    file_path_temp = os.path.join(DATA_DIR, f'trading_data')
    file_name = os.path.join(file_path_temp, f'cex_total_spot_amount_each_exchange_coingecko')
    exchange_volume_df = pd.read_excel(f'{file_name}.xlsx', index_col='date')
    # 各交易所的衍生品交易额汇总
    file_name = os.path.join(file_path_temp, f'cex_total_deri_amount_each_exchange_coingecko')
    deri_exchange_volume_df = pd.read_excel(f'{file_name}.xlsx', index_col='Unnamed: 0')

    # 各交易所现货,衍生品交易产生的手续费收入汇总
    exchange_spot_fee_df = exchange_volume_df * [0.001, 0.005, 0.0002, 0.0008, 0.002, 0.002, 0.001, 0.004, 0.001, 0.001]
    exchange_deri_fee_df = deri_exchange_volume_df * [0.00018, 0.0002, 0.0002, 0.0002, 0.0002, 0.0005, 0.0002, 0.00015, 0.0002, 0.0002, 0.0002]
    # 手续费收入按照季度汇总
    exchange_spot_fee_df.index = pd.to_datetime(exchange_spot_fee_df.index)
    exchange_season_spot_fee_df = exchange_spot_fee_df.resample('Q').sum()
    exchange_deri_fee_df.index = pd.to_datetime(exchange_deri_fee_df.index)
    exchange_season_deri_fee_df = exchange_deri_fee_df.resample('Q').sum()
    exchange_season_deri_fee_df['okex'] = 0  # okex 回购中不包含 衍生品交易带来的收入
    exchange_season_deri_fee_df['coinbase'] = 0  # coinbase 没有衍生品业务
    exchange_season_fee_df = exchange_season_spot_fee_df + exchange_season_deri_fee_df
    file_path = os.path.join(DATA_DIR, f'cex_coins_data\statistics_data')
    file_name = os.path.join(file_path, f'all_cex_season_fee')
    exchange_season_fee_df.to_excel(f'{file_name}.xlsx')

    # # 各交易所市值除以现货交易额
    # all_cex_coins_ps_ratio_df = all_cex_coins_marketcap_df / exchange_volume_df
    # all_cex_coins_ps_ratio_df.dropna(how='all', axis=0, inplace=True)
    # all_cex_coins_ps_ratio_df = all_cex_coins_ps_ratio_df.rolling(90).mean()
    # new_columns = [i + '_ps' for i in all_cex_coins_ps_ratio_df.columns]
    # all_cex_coins_ps_ratio_df.columns = new_columns
    # # 各交易所市值除以现货交易手续费收入
    # all_cex_coins_pe_ratio_df = all_cex_coins_marketcap_df / exchange_spot_fee_df
    # all_cex_coins_pe_ratio_df.dropna(how='all', axis=0, inplace=True)
    # all_cex_coins_pe_ratio_df = all_cex_coins_pe_ratio_df.rolling(90).mean()
    # all_cex_coins_pe_ratio_df = all_cex_coins_pe_ratio_df / 90
    # new_columns = [i + '_pe' for i in all_cex_coins_pe_ratio_df.columns]
    # all_cex_coins_pe_ratio_df.columns = new_columns

    # 各交易所现货交易额市场占比
    volume_ratio_df_list = []
    for column in exchange_volume_df.columns:
        volume_ratio = exchange_volume_df[column] / exchange_volume_df.sum(axis=1)
        volume_ratio = volume_ratio.rolling(7).mean()
        volume_ratio.name = column + '_ratio'
        volume_ratio_df_list.append(volume_ratio)
    total_volume = exchange_volume_df.sum(axis=1)
    total_volume.name = 'all_cex_spot_volume'
    volume_ratio_df_list.append(total_volume)
    exchange_volume_ratio_df = pd.concat(volume_ratio_df_list, axis=1)

    # 各交易所衍生品交易额市场占比
    deri_volume_ratio_df_list = []
    for column in deri_exchange_volume_df.columns:
        volume_ratio = deri_exchange_volume_df[column] / deri_exchange_volume_df.sum(axis=1)
        volume_ratio = volume_ratio.rolling(7).mean()
        volume_ratio.name = column + '_ratio'
        deri_volume_ratio_df_list.append(volume_ratio)
    total_volume = deri_exchange_volume_df.sum(axis=1)
    total_volume.name = 'all_cex_deri_volume'
    deri_volume_ratio_df_list.append(total_volume)
    deri_exchange_volume_ratio_df = pd.concat(deri_volume_ratio_df_list, axis=1)

    # 各交易所现货交易带来的利润市场占比
    spot_fee_ratio_df_list = []
    for column in exchange_spot_fee_df.columns:
        spot_fee_ratio = exchange_spot_fee_df[column] / exchange_spot_fee_df.sum(axis=1)
        spot_fee_ratio = spot_fee_ratio.rolling(7).mean()
        spot_fee_ratio.name = column + '_ratio'
        spot_fee_ratio_df_list.append(spot_fee_ratio)
    total_fee = exchange_spot_fee_df.sum(axis=1)
    total_fee.name = 'all_cex_spot_fee'
    spot_fee_ratio_df_list.append(total_fee)
    exchange_spot_fee_ratio_df = pd.concat(spot_fee_ratio_df_list, axis=1)

    # 各交易所衍生品交易带来的利润市场占比
    deri_fee_ratio_df_list = []
    for column in exchange_deri_fee_df.columns:
        deri_fee_ratio = exchange_deri_fee_df[column] / exchange_deri_fee_df.sum(axis=1)
        deri_fee_ratio = deri_fee_ratio.rolling(7).mean()
        deri_fee_ratio.name = column + '_ratio'
        deri_fee_ratio_df_list.append(deri_fee_ratio)
    total_fee = exchange_deri_fee_df.sum(axis=1)
    total_fee.name = 'all_cex_deri_fee'
    deri_fee_ratio_df_list.append(total_fee)
    exchange_deri_fee_ratio_df = pd.concat(deri_fee_ratio_df_list, axis=1)

    # all_exchange_volume_df = pd.concat([exchange_volume_df, exchange_volume_ratio_df, all_cex_coins_ps_ratio_df], axis=1)
    all_exchange_volume_df = pd.concat([exchange_volume_df, exchange_volume_ratio_df], axis=1)
    all_exchange_volume_df = pd.concat([all_exchange_volume_df, btc_log_price.reindex(all_exchange_volume_df.index)], axis=1)
    all_exchange_volume_df['binance_spot_volume_ma7'] = all_exchange_volume_df['binance'].rolling(7).mean()
    file_name = os.path.join(file_path, f'all_cex_spot_volume')
    all_exchange_volume_df.to_excel(f'{file_name}.xlsx')

    # all_exchange_spot_fee_df = pd.concat([exchange_spot_fee_df, exchange_spot_fee_ratio_df, all_cex_coins_pe_ratio_df], axis=1)
    all_exchange_spot_fee_df = pd.concat([exchange_spot_fee_df, exchange_spot_fee_ratio_df], axis=1)
    all_exchange_spot_fee_df = pd.concat([all_exchange_spot_fee_df, btc_log_price.reindex(all_exchange_spot_fee_df.index)], axis=1)
    file_name = os.path.join(file_path, f'all_cex_spot_fee')
    all_exchange_spot_fee_df.to_excel(f'{file_name}.xlsx')

    all_deri_exchange_volume_df = pd.concat([deri_exchange_volume_df, deri_exchange_volume_ratio_df], axis=1)
    all_deri_exchange_volume_df = pd.concat([all_deri_exchange_volume_df, btc_log_price.reindex(all_deri_exchange_volume_df.index)], axis=1)
    all_deri_exchange_volume_df['binance_deri_volume_ma7'] = all_deri_exchange_volume_df['binance'].rolling(7).mean()
    file_name = os.path.join(file_path, f'all_cex_deri_volume')
    all_deri_exchange_volume_df.to_excel(f'{file_name}.xlsx')

    all_exchange_deri_fee_df = pd.concat([exchange_deri_fee_df, exchange_deri_fee_ratio_df], axis=1)
    all_exchange_deri_fee_df = pd.concat([all_exchange_deri_fee_df, btc_log_price.reindex(all_exchange_deri_fee_df.index)], axis=1)
    file_name = os.path.join(file_path, f'all_cex_deri_fee')
    all_exchange_deri_fee_df.to_excel(f'{file_name}.xlsx')

    return


@deco_retry(retry=50, retry_sleep=15)
def crypto_fiat_trade_analysis():
    """
    # 分析BTC, ETH现货交易对中，加密货币对稳定币和加密货币对法币交易对交易额情况
    # 这里的数据是从cmc爬取，而且每次都只能爬取当天数据，因此需要每日更新

    :return:
    """
    today = get_today(marker='with_n_dash')
    res_list = []
    for asset in ['bitcoin', 'ethereum']:
        for sort_type in ['cmc_rank', 'volume']:
            data = get_marketpair_amount_via_http(asset=asset, sort=sort_type)
            data = data[data['volumeExcluded'] == 0]  # 剔除明显不合理交易额，部分交易所可能刷量

            if asset == 'bitcoin':
                CRYPTO_name = ['BTC', 'XBT']
            elif asset == 'ethereum':
                CRYPTO_name = ['ETH']
            else:
                NotImplementedError
            FIAT_name = ['USD', 'EUR', 'KRW', 'JPY']
            STABLECOIN = ['USDT', 'USDC', 'BUSD', 'DAI']

            fiat_df = data[data['baseSymbol'].isin(CRYPTO_name) & data['quoteSymbol'].isin(FIAT_name)]
            stablecoin_df = data[data['baseSymbol'].isin(CRYPTO_name) & data['quoteSymbol'].isin(STABLECOIN)]
            fiat_trade_ratio = fiat_df['volumeUsd'].sum() / (fiat_df['volumeUsd'].sum() + stablecoin_df['volumeUsd'].sum())
            fiatwithstablecoin_trade_ratio = (fiat_df['volumeUsd'].sum() + stablecoin_df['volumeUsd'].sum()) / data['volumeUsd'].sum()

            res = {
                'end_date': today,
                'asset': asset,
                'sort_type': sort_type,
                'total_volume': data['volumeUsd'].sum(),
                'fiatwithstablecoin_volume': (fiat_df['volumeUsd'].sum() + stablecoin_df['volumeUsd'].sum()),
                'fiat_volume': fiat_df['volumeUsd'].sum(),
                'stablecoin_volume': stablecoin_df['volumeUsd'].sum(),
                'fiat_ratio': fiat_trade_ratio,
                'fiatwithstablecoin_ratio': fiatwithstablecoin_trade_ratio}
            res_list.append(res)

    crypto_fiat_trade_df = pd.DataFrame(res_list)

    file_path = os.path.join(DATA_DIR, r'trading_data')
    file_name = os.path.join(file_path, f'crypto_fiat_trade_analysis.xlsx')
    if os.path.exists(file_name):
        hist_crypto_fiat_trade_df = pd.read_excel(file_name, index_col='Unnamed: 0')
        if today in list(hist_crypto_fiat_trade_df['end_date']):
            pass
        else:
            crypto_fiat_trade_df = pd.concat([hist_crypto_fiat_trade_df, crypto_fiat_trade_df], ignore_index=True)
            crypto_fiat_trade_df.to_excel(f'{file_name}')
    else:
        crypto_fiat_trade_df.to_excel(f'{file_name}')

    return crypto_fiat_trade_df


@deco_retry(retry=50, retry_sleep=15)
def usdt_premium_data_update():
    """
    # 获取usdt otc 兑 人民币价格
    # 数据需要网站爬取，部分数据（5m频）因为网站上不会保留那么久，因此需要每天爬取

    :return:
    """
    file_path = os.path.join(DATA_DIR, f'usdt_premium')
    os.makedirs(file_path, exist_ok=True)

    today = get_today(marker='with_n_dash')

    # 获取更新全历史离岸人民币汇率数据, 注意获取的时间为北京时间
    file_name = os.path.join(file_path, f'USDCNH_exchange_rate.csv')
    if os.path.exists(file_name):
        historical_usdcnh = pd.read_csv(file_name)
        historical_usdcnh = historical_usdcnh[~historical_usdcnh['date'].isna()]
        if max(historical_usdcnh['date']) < today:
            usdcnh = get_currency_historical_price(symbol='USDCNH')
            all_usdcnh = pd.concat([historical_usdcnh, usdcnh], axis=0)
            all_usdcnh.drop_duplicates(subset=['date'], keep='last')
            all_usdcnh.to_csv(file_name, index=False)
        else:
            pass
    else:
        all_usdcnh = get_currency_historical_price(symbol='USDCNH')
        all_usdcnh.to_csv(file_name, index=False)

    usdcnh_latest_price = get_currency_latest_price(symbol='USDCNH')

    # 从okx获取USDT溢价数据， 注意这里获取的是USDT-USD数据
    # for frequency in ['5m', '1h', '1d']:
    #     okx_usdt_premium = fetch_okex_usdtusd_premium(frequnecy=frequency)
    #     file_name = os.path.join(file_path, f'okx_usdtusd_premium_{frequency}.xlsx')
    #     if os.path.exists(file_name):
    #         historical_data = pd.read_excel(file_name)
    #         if max(historical_data['datetime']) < max(okx_usdt_premium['datetime']):
    #             all_df = pd.concat([historical_data, okx_usdt_premium], axis=0, ignore_index=True)
    #             all_df.drop_duplicates(subset=['datetime'], keep='first', inplace=True)
    #             all_df.to_excel(f'{file_name}', index=False)
    #         else:
    #             pass
    #     else:
    #         okx_usdt_premium.to_excel(f'{file_name}', index=False)
    from filelock import FileLock
    for frequency in ['5m', '1h', '1d']:
        # 1. 从okex拉取最新的数据
        okx_usdt_premium = fetch_okex_usdtusd_premium(frequnecy=frequency)
        # 2. 确定保存的 Excel 文件名和对应的lock文件
        file_name = os.path.join(file_path, f'okx_usdtusd_premium_{frequency}.xlsx')
        lock_file = file_name + ".lock"  # 用同路径不同后缀做锁文件
        # 3. 用 FileLock 做并发保护
        with FileLock(lock_file):
            # 如果存在旧Excel，先读出来和新数据合并
            if os.path.exists(file_name):
                historical_data = pd.read_excel(file_name)
                if max(historical_data['datetime']) < max(okx_usdt_premium['datetime']):
                    all_df = pd.concat([historical_data, okx_usdt_premium], axis=0, ignore_index=True)
                    all_df.drop_duplicates(subset=['datetime'], keep='first', inplace=True)
                    all_df.to_excel(f'{file_name}', index=False)
                else:
                    pass
            else:
                okx_usdt_premium.to_excel(file_name, index=False)
    # 从okx 获取usdt对cny的报价信息; 每1小时获取一次
    ticker_file_name = os.path.join(file_path, f'okx_usdtcny_premium_ticker.xlsx')
    orderbook_file_name = os.path.join(file_path, f'okx_usdtcny_premium_orderbook.csv')

    if os.path.exists(ticker_file_name) and os.path.exists(orderbook_file_name):
        ticker_historical_data = pd.read_excel(ticker_file_name)
        orderbook_historical_data = pd.read_csv(orderbook_file_name)

        df_ticker, df_orderbook = fetch_okex_usdtcny_premium()
        df_ticker['usdcnh'] = usdcnh_latest_price
        df_ticker['premium'] = (df_ticker['usdtcny'] - usdcnh_latest_price) / usdcnh_latest_price
        df_orderbook['usdcnh'] = usdcnh_latest_price

        if max(ticker_historical_data['datetime']) < max(df_ticker['datetime']):
            all_df = pd.concat([ticker_historical_data, df_ticker], axis=0, ignore_index=True)
            all_df.drop_duplicates(subset=['datetime'], keep='first', inplace=True)
            all_df.to_excel(f'{ticker_file_name}', index=False)
        else:
            pass

        if max(orderbook_historical_data['datetime']) < max(df_orderbook['datetime']):
            all_df = pd.concat([orderbook_historical_data, df_orderbook], axis=0, ignore_index=True)
            all_df.drop_duplicates(subset=['datetime', 'id'], keep='first', inplace=True)
            all_df.to_csv(f'{orderbook_file_name}', index=False)
        else:
            pass

    else:
        df_ticker, df_orderbook = fetch_okex_usdtcny_premium()
        df_ticker['usdcnh'] = usdcnh_latest_price
        df_ticker['premium'] = (df_ticker['usdtcny'] - usdcnh_latest_price) / usdcnh_latest_price
        df_orderbook['usdcnh'] = usdcnh_latest_price
        df_ticker.to_excel(f'{ticker_file_name}', index=False)
        df_orderbook.to_excel(f'{orderbook_file_name}', index=False)

    # # 从非小号获取USDT溢价数据
    # for period in ['24h', '7day', '30day']:
    #     fxh_usdt_premium = get_feixiaohao_usdt_premium(period=period)
    #     file_name = os.path.join(file_path, f'fxh_usdtcny_premium_{period}.xlsx')
    #     if os.path.exists(file_name):
    #         historical_data = pd.read_excel(file_name)
    #         if max(historical_data['datetime']) < max(fxh_usdt_premium['datetime']):
    #             all_df = pd.concat([historical_data, fxh_usdt_premium], axis=0, ignore_index=True)
    #             all_df.drop_duplicates(subset=['datetime'], keep='first', inplace=True)
    #             all_df.to_excel(f'{file_name}', index=False)
    #         else:
    #             pass
    #     else:
    #         fxh_usdt_premium.to_excel(f'{file_name}', index=False)

    file_name = os.path.join(file_path, f'btc126_usdtcny_premium.xlsx')
    if os.path.exists(file_name):
        historical_data = pd.read_excel(file_name)
        if max(historical_data['atime']) < get_yesterday(marker='with_n_dash'):
            btc126_usdt_premium = get_btc126_usdt_premium()
            all_df = pd.concat([historical_data, btc126_usdt_premium], axis=0, ignore_index=True)
            all_df.drop_duplicates(subset=['datetime'], keep='first', inplace=True)
            all_df.to_excel(f'{file_name}', index=False)
        else:
            pass
    else:
        btc126_usdt_premium = get_btc126_usdt_premium()
        btc126_usdt_premium.to_excel(f'{file_name}', index=False)

    # # 读取okx给的USDTCNY汇率数据
    # file_name = os.path.join(file_path, f'okx USDT对人民币汇率.xlsx')
    # okx_usdt_premium_offical = pd.read_excel(file_name)
    # okx_usdt_premium_offical['datetime'] = pd.to_datetime(okx_usdt_premium_offical['create_time']).dt.strftime('%Y-%m-%d %H:%M:%S')
    # okx_usdt_premium_offical['date'] = pd.to_datetime(okx_usdt_premium_offical['create_time']).dt.strftime('%Y-%m-%d')
    # all_date_list = [x.strftime("%Y-%m-%d") for x in pd.date_range(min(okx_usdt_premium_offical['date']), max(okx_usdt_premium_offical['date']))]
    # missing_date_list = [x for x in all_date_list if x + ' 23:59:59' not in list(okx_usdt_premium_offical['datetime'])]
    # all_missing_datetime_list = [x + ' 23:59:59' for x in missing_date_list]
    # all_datetime_list = all_missing_datetime_list + list(okx_usdt_premium_offical['datetime'])
    # okx_usdt_premium_offical.set_index('datetime', inplace=True)
    # okx_usdt_premium_offical = okx_usdt_premium_offical.reindex(all_datetime_list)
    # okx_usdt_premium_offical.sort_index(inplace=True)
    # okx_usdt_premium_offical.rename(columns={'rate_parities': 'usdtcny'}, inplace=True)
    # okx_usdt_premium_offical = okx_usdt_premium_offical[['usdtcny']]
    # okx_usdt_premium_offical.ffill(inplace=True)
    # okx_usdt_premium_offical['date'] = okx_usdt_premium_offical.index.str[:10]
    #
    # prices_df = get_prices(ohlc=False, asset='BTC', start_date='2014-01-01', end_date=None, interval='1h')
    # prices_df = prices_df[['close']]
    # prices_df['log_price'] = np.log10(prices_df['close'])
    #
    # file_name = os.path.join(file_path, f'USDCNH_exchange_rate.xlsx')
    # all_usdcnh = pd.read_excel(file_name)
    # all_usdcnh.drop_duplicates(subset='date', inplace=True)
    # all_usdcnh = all_usdcnh[(all_usdcnh['date'] >= min(okx_usdt_premium_offical['date'])) & (all_usdcnh['date'] <= max(okx_usdt_premium_offical['date']))]
    # all_usdcnh['datetime'] = [x + ' 23:59:59' for x in all_usdcnh['date']]
    # all_usdcnh.set_index('datetime', inplace=True)
    #
    # # 合并数据
    # okx_usdt_premium_offical = all_usdcnh.merge(okx_usdt_premium_offical, left_index=True, right_index=True, how='left')
    # okx_usdt_premium_offical['premium'] = (okx_usdt_premium_offical['usdtcny'] - okx_usdt_premium_offical['close']) / okx_usdt_premium_offical['close']
    # file_name = os.path.join(file_path, f'okx_usdtcny_premium_ticker.xlsx')
    # okx_usdtcny_premium_ticker = pd.read_excel(file_name)
    # okx_usdtcny_premium_ticker.set_index('datetime', inplace=True)
    # all_df = pd.concat([okx_usdt_premium_offical, okx_usdtcny_premium_ticker], axis=0)
    # all_df = all_df[['premium']]
    #
    # all_datetime_list = list(prices_df.index) + list(all_df.index)
    # prices_df = prices_df.reindex(all_datetime_list)
    # prices_df.sort_index(inplace=True)
    # prices_df.ffill(inplace=True)
    # all_df = all_df.merge(prices_df, left_index=True, right_index=True, how='left')
    # all_df.loc[all_df['premium'] > 0.1, 'premium'] = 0
    # all_df.loc[all_df['premium'] < -0.1, 'premium'] = 0
    # file_name = os.path.join(file_path, f'okx USDT对人民币汇率_汇总.xlsx')
    # all_df.to_excel(file_name)

    # 获取BTC_ohlcv数据, 日频数据
    file_name = os.path.join(DATA_DIR, f'BTC_history_ohlcvm')
    ohlcv_data = pd.read_excel(f'{file_name}.xlsx', index_col='end_date')
    prices_df = ohlcv_data[['close']]
    prices_df['log_price'] = np.log10(prices_df['close'])

    file_name = os.path.join(file_path, f'btc126_usdtcny_premium.xlsx')
    btc126_usdt_premium = pd.read_excel(file_name)
    all_df = btc126_usdt_premium.merge(prices_df, left_on='atime', right_index=True)
    file_name = os.path.join(file_path, f'btc126_usdtcny_premium_log_prices.xlsx')
    all_df.to_excel(file_name, index=False)

    # 获取小时频价格数据
    prices_df = get_prices(ohlc=False, asset='BTC', start_date='2022-09-20', end_date=None, interval='1h')
    prices_df = prices_df[['close']]
    prices_df['log_price'] = np.log10(prices_df['close'])

    ticker_file_name = os.path.join(file_path, f'okx_usdtcny_premium_ticker.xlsx')
    okx_usdtcny_premium_ticker = pd.read_excel(ticker_file_name)
    okx_usdtcny_premium_ticker.set_index('datetime', inplace=True)
    all_datetime_list = list(prices_df.index) + list(okx_usdtcny_premium_ticker.index)
    prices_df = prices_df.reindex(all_datetime_list)
    prices_df.sort_index(inplace=True)
    prices_df.ffill(inplace=True)
    prices_df = prices_df.reindex(okx_usdtcny_premium_ticker.index)
    all_df = okx_usdtcny_premium_ticker.merge(prices_df, left_index=True, right_index=True)
    file_name = os.path.join(file_path, f'okx_usdtcny_premium_ticker_log_prices.xlsx')
    all_df.to_excel(file_name)


def get_futures_estimated_leverage_ratio():
    """
    分析交易所内期货开仓杠杆比例，采用的计算方式是（BTC期货开仓量 + ETH期货开仓量） / 交易所内BTC,ETH,稳定币余额  以上数据均为USD计价

    :return:
    """

    end_date = get_today(marker='with_n_dash')  # 计算截止昨日收盘

    # BTC, ETH 开仓量
    btc_oi = get_exchange_open_interest_chart(asset='BTC', interval=0)
    btc_oi = btc_oi[list(btc_oi.columns)[1:-3]].sum(axis=1)
    eth_oi = get_exchange_open_interest_chart(asset='ETH', interval=0)
    eth_oi = eth_oi[list(eth_oi.columns)[1:-3]].sum(axis=1)

    # 交易所内BTC, ETH, 稳定币余额
    btc_supply_in_exchange = get_indicators(indic_name='Exchange Balance (Total)', asset='BTC', currency='USD', start_date='2020-02-28', end_date=end_date)['Exchange Balance (Total)']
    eth_supply_in_exchange = get_indicators(indic_name='Exchange Balance (Total)', asset='ETH', currency='USD', start_date='2020-02-28', end_date=end_date)['Exchange Balance (Total)']

    usdt_supply_in_exchange = get_indicators(indic_name='Exchange Balance (Total)', asset='USDT', start_date='2020-02-28', end_date=end_date)['Exchange Balance (Total)']
    usdc_supply_in_exchange = get_indicators(indic_name='Exchange Balance (Total)', asset='USDC', start_date='2020-02-28', end_date=end_date)['Exchange Balance (Total)']
    busd_supply_in_exchange = get_indicators(indic_name='Exchange Balance (Total)', asset='BUSD', start_date='2020-02-28', end_date=end_date)['Exchange Balance (Total)']
    stablecoin_in_exchange = pd.concat([usdt_supply_in_exchange, usdc_supply_in_exchange, busd_supply_in_exchange], axis=1).sum(axis=1)

    estimated_leverage_ratio = (btc_oi + eth_oi) / (btc_supply_in_exchange + eth_supply_in_exchange + stablecoin_in_exchange)
    estimated_leverage_ratio.name = 'estimated_leverage_ratio'

    # 获取BTC_ohlcv数据
    file_name = os.path.join(DATA_DIR, f'BTC_history_ohlcvm')
    ohlcv_data = pd.read_excel(f'{file_name}.xlsx', index_col='end_date')
    prices_df = ohlcv_data[['close']]
    prices_df['log_price'] = np.log10(prices_df['close'])

    all_df = prices_df.merge(estimated_leverage_ratio, left_index=True, right_index=True)
    file_path = os.path.join(DATA_DIR, f'trading_data')
    file_name = os.path.join(file_path, f'futures_estimated_leverage_ratio.xlsx')
    all_df.to_excel(file_name)

    return


def get_futures_term_structure():
    # 获取BTC_ohlcv数据
    file_name = os.path.join(DATA_DIR, f'BTC_history_ohlcvm')
    ohlcv_data = pd.read_excel(f'{file_name}.xlsx')
    prices_df = ohlcv_data[['end_date', 'close']]
    prices_df['log_price'] = np.log10(prices_df['close'])

    for contract_type in ['BTC-USD', 'BTC-USDT']:
        # 获取交割合约数据
        file_path = os.path.join(DATA_DIR, f'futures_kline')
        file_name = os.path.join(file_path, f'okex_{contract_type}_futures_ohlcv.xlsx')
        df = pd.read_excel(file_name)

        all_df = df.merge(prices_df, on='end_date', how='left')
        all_df['delta_days'] = (pd.to_datetime(all_df['contract_date']) - pd.to_datetime(all_df['datetime'])).dt.days

        # 合约价与现货价之间的溢价
        all_df['spread'] = (all_df['close_x'] - all_df['close_y']) / all_df['close_y']
        all_df['annual_spread'] = all_df['spread'] * 365 / all_df['delta_days']
        all_df['spread_slope'] = all_df['spread'] / all_df['delta_days']

        # 次季合约与当季合约之间的溢价
        for date_ in all_df['end_date'].unique():
            test_df = all_df[all_df['end_date'] == date_]
            if '交割当季' in list(test_df['contract_type']) and '交割次季' in list(test_df['contract_type']):
                this_season_deltadays = test_df.loc[test_df['contract_type'] == '交割当季', 'delta_days'].values[0]
                next_season_deltadays = test_df.loc[test_df['contract_type'] == '交割次季', 'delta_days'].values[0]
                delta_days = next_season_deltadays - this_season_deltadays
                this_season_close = test_df.loc[test_df['contract_type'] == '交割当季', 'close_x'].values[0]
                next_season_close = test_df.loc[test_df['contract_type'] == '交割次季', 'close_x'].values[0]
                spot_close = test_df.loc[test_df['contract_type'] == '交割次季', 'close_y'].values[0]

                # 季合约与现货溢价
                this_season_spread = test_df.loc[test_df['contract_type'] == '交割当季', 'spread'].values[0]  # 当季合约与现货溢价非年化
                all_df.loc[(all_df['end_date'] == date_) & (all_df['contract_type'] == '交割次季'), 'firstseason_spot_spread'] = this_season_spread  # 当季合约与现货溢价
                all_df.loc[(all_df['end_date'] == date_) & (all_df['contract_type'] == '交割次季'), 'annual_firstseason_spot_spread'] = this_season_spread * 365 / this_season_deltadays  # 当季合约与现货溢价年化
                all_df.loc[(all_df['end_date'] == date_) & (all_df['contract_type'] == '交割次季'), 'firstseason_spot_spread_slope'] = this_season_spread / this_season_deltadays  # 当季合约与现货溢价年化

                # 次季与当季合约溢价
                spread = (next_season_close - this_season_close) / spot_close
                all_df.loc[(all_df['end_date'] == date_) & (all_df['contract_type'] == '交割次季'), 'twoseason_spread'] = spread  # 次季与当季合约溢价
                all_df.loc[(all_df['end_date'] == date_) & (all_df['contract_type'] == '交割次季'), 'annual_twoseason_spread'] = spread * 365 / delta_days  # 次季与当季合约溢价
                all_df.loc[(all_df['end_date'] == date_) & (all_df['contract_type'] == '交割次季'), 'twoseason_spread_slope'] = spread / delta_days  # 次季与当季合约溢价

            else:
                pass

        file_name = os.path.join(file_path, f'okex_{contract_type}_futures_term_structure.xlsx')
        all_df.to_excel(file_name)


def daily_ret_characters_analysis():
    # 读取
    file_name = os.path.join(DATA_DIR, f'BTC_history_ohlcvm')
    prices_df = pd.read_excel(f'{file_name}.xlsx', index_col='end_date')

    prices_df['ret'] = prices_df['close'].pct_change()
    prices_df['pre_close'] = prices_df['close'].shift(1)
    prices_df['log_price'] = np.log10(prices_df['close'])
    prices_df['30days_cumret'] = prices_df['close'].pct_change(30)
    prices_df['amplitude'] = (prices_df['high'] - prices_df['low']) / prices_df['pre_close']
    prices_df['amplitude_ma30'] = prices_df['amplitude'].rolling(30).mean()
    prices_df['sign_amplitude'] = prices_df['amplitude'] * np.sign(prices_df['ret'])

    # 计算振幅加权均线
    # prices_df['amplitude_weighted_price'] = prices_df['amplitude'] * prices_df['close']
    # for weighted_period in [30, 90, 120, 180]:
    #     prices_df[f'ma_{weighted_period}'] = np.log10(prices_df['close'].rolling(weighted_period).mean())
    #     prices_df[f'amplitude_weighted_price_{weighted_period}'] = prices_df['amplitude_weighted_price'].rolling(weighted_period).sum() / prices_df['amplitude'].rolling(weighted_period).sum()
    #     prices_df[f'amplitude_weighted_price_{weighted_period}'] = np.log10(prices_df[f'amplitude_weighted_price_{weighted_period}'])
    #
    # file_path = os.path.join(DATA_DIR, f'trading_data')
    # file_name = os.path.join(file_path, f'amplitude_weighted_price')
    # prices_df = prices_df.loc['2017-01-01':, :]
    # prices_df.to_excel(f'{file_name}.xlsx')

    # 历史波动率
    # for period in [10, 15, 20, 30, 40, 60]:
    for period in [30]:
        prices_df[f'std_{period}days'] = prices_df['ret'].rolling(period).std()
        for ma_period in [30, 60, 100, 120, 150]:
            prices_df[f'std_{period}days_ma{ma_period}'] = prices_df[f'std_{period}days'].rolling(ma_period).mean()
            prices_df[f'std_{period}days_bias{ma_period}'] = prices_df[f'std_{period}days'] - prices_df[f'std_{period}days_ma{ma_period}']
            prices_df[f'std_{period}days_osc{ma_period}'] = prices_df[f'std_{period}days_bias{ma_period}'] / prices_df[f'std_{period}days_ma{ma_period}']

    # 振幅大于某一阈值占比
    # for period in [10, 15, 20, 30]:
    #     for threshold in [0.03, 0.05, 0.08, 0.1, 0.15, 0.2]:
    for period in [30]:
        for threshold in [0.05]:
            prices_df[f'amplitude_{period}days_>{threshold}_numratio'] = (prices_df['amplitude'] >= threshold).rolling(period).sum() / period
            for ma_period in [30, 60, 100, 120, 150]:
                prices_df[f'amplitude_{period}days_>{threshold}_numratio_ma{ma_period}'] = prices_df[f'amplitude_{period}days_>{threshold}_numratio'].rolling(ma_period).mean()
                prices_df[f'amplitude_{period}days_>{threshold}_numratio_bias{ma_period}'] = prices_df[f'amplitude_{period}days_>{threshold}_numratio'] - prices_df[f'amplitude_{period}days_>{threshold}_numratio_ma{ma_period}']
                prices_df[f'amplitude_{period}days_>{threshold}_numratio_osc{ma_period}'] = prices_df[f'amplitude_{period}days_>{threshold}_numratio_bias{ma_period}'] / prices_df[f'amplitude_{period}days_>{threshold}_numratio_ma{ma_period}']

    # # 振幅大于某一阈值同时当天上涨占比
    # for period in [10, 15, 20, 30]:
    #     for threshold in [0.03, 0.05, 0.08, 0.1, 0.15, 0.2]:
    #         prices_df[f'sign_amplitude_{period}days_>{threshold}_numratio'] = (prices_df['sign_amplitude'] >= threshold).rolling(period).sum() / period
    #
    # # 振幅大于某一阈值同时当天下跌占比
    # for period in [10, 15, 20, 30]:
    #     for threshold in [-0.03, -0.05, -0.08, -0.1, -0.15, -0.2]:
    #         prices_df[f'sign_amplitude_{period}days_<{threshold}_numratio'] = (prices_df['sign_amplitude'] <= threshold).rolling(period).sum() / period
    #
    # # 振幅小于某一阈值占比
    # for period in [10, 15, 20, 30]:
    #     for threshold in [0.02, 0.03, 0.05, 0.08]:
    #         prices_df[f'amplitude_{period}days_<{threshold}_numratio'] = (prices_df['amplitude'] <= threshold).rolling(period).sum() / period

    # 过去period天中收益率大于某一阈值占比
    # for period in [10, 15, 20, 30, 40, 60]:
    #     for threshold in [0.03, 0.05, 0.08, 0.1, 0.15, 0.2]:
    for period in [30]:
        for threshold in [0.05]:
            prices_df[f'ret_{period}days_>{threshold}_numratio'] = (prices_df['ret'] >= threshold).rolling(period).sum() / period

    # # 过去period天中收益率小于某一阈值占比
    # for period in [10, 15, 20, 30, 40, 60]:
    #     for threshold in [-0.03, -0.05, -0.08, -0.1, -0.15, -0.2]:
    #         prices_df[f'ret_{period}days_<{threshold}_numratio'] = (prices_df['ret'] <= threshold).rolling(period).sum() / period

    # 过去period天中上涨天数
    # for period in [10, 15, 20, 30, 40, 60]:
    for period in [60]:
        prices_df[f'ret_{period}days_>=0_numratio'] = (prices_df['ret'] >= 0).rolling(period).sum() / period

    # # 过去period天中下跌天数
    # for period in [10, 15, 20, 30, 40, 60]:
    #     prices_df[f'ret_{period}days_<=0_numratio'] = (prices_df['ret'] <= 0).rolling(period).sum() / period

    # 创过去period天新高天数
    # for newhigh_period in [10, 15, 20, 30, 40, 60]:
    for newhigh_period in [15]:
        for period in [30]:
            prices_df[f'new{newhigh_period}high_{period}days_numratio'] = ((prices_df['close'] - prices_df['close'].rolling(newhigh_period).max()) == 0).rolling(period).sum() / period

    # # 创过去period天新低天数
    # for newlow_period in [10, 15, 20, 30, 40, 60]:
    #     for period in [30]:
    #         prices_df[f'new{newlow_period}low_{period}days_numratio'] = ((prices_df['close'] - prices_df['close'].rolling(newlow_period).min()) == 0).rolling(period).sum() / period
    #
    # def longest_upordown_seq(price_list, increasing=True):
    #     lst = list(price_list)
    #     mlen = 0
    #     a = 1
    #     for i in range(0, len(lst) - 1):
    #         if increasing:
    #             mask = lst[i + 1] > lst[i]
    #         else:
    #             mask = lst[i + 1] < lst[i]
    #
    #         if mask:
    #             mlen = mlen + 1
    #         else:
    #             a = max(a, mlen)
    #             mlen = 0
    #     return max(a, mlen)
    #
    # for period in [10, 15, 20, 30, 40, 60]:
    #     prices_df[f'continue_up_{period}days_numratio'] = prices_df['close'].rolling(period).apply(lambda x: longest_upordown_seq(x, increasing=True)) / period
    #
    # for period in [10, 15, 20, 30, 40, 60]:
    #     prices_df[f'continue_down_{period}days_numratio'] = prices_df['close'].rolling(period).apply(lambda x: longest_upordown_seq(x, increasing=False)) / period

    prices_df[f'PPO_revised5120'] = indicator_PPO_revised(prices_df, short_period=5, long_period=120)
    prices_df[f'PPO_revised160'] = indicator_PPO_revised(prices_df, short_period=1, long_period=60)
    prices_df[f'BOP120'] = indicator_BOP(prices_df, period=120)
    prices_df[f'RSI120'] = indicator_RSI(prices_df, period=120)
    prices_df[f'ROC180'] = indicator_ROC(prices_df, period=180)
    prices_df[f'BOLL120'] = indicator_BOLL(prices_df, period=120)
    prices_df[f'KC120'] = indicator_KC(prices_df, period=120)
    prices_df[f'CV90'] = indicator_CV(prices_df, period=90)

    # 上涨同时放量的时候
    # for period in [7, 14, 30, 60, 90]:
    for period in [14]:
        # prices_df[f'{period}days_ret1'] = prices_df['close'].pct_change(period)
        # prices_df[f'{period}days_volume_chg1'] = prices_df['volume'].pct_change(period)
        # prices_df[f'{period}days_amount_chg1'] = prices_df['amount'].pct_change(period)
        prices_df[f'{period}days_ret'] = (prices_df['close'] - prices_df['close'].rolling(period + 1).min()) / prices_df['close'].rolling(period + 1).min()
        prices_df[f'{period}days_volume_chg'] = (prices_df['volume'] - prices_df['volume'].rolling(period + 1).min()) / prices_df['volume'].rolling(period + 1).min()
        prices_df[f'{period}days_amount_chg'] = (prices_df['amount'] - prices_df['amount'].rolling(period + 1).min()) / prices_df['amount'].rolling(period + 1).min()

        # for ret_threshold in [0.3, 0.5, 0.8, 1]:
        for ret_threshold in [0.3, 0.5]:
            # for amount_threshold in [0.5, 0.8, 1, 1.5, 2]:
            for amount_threshold in [0.5, 0.8, 1]:
                if amount_threshold >= ret_threshold:
                    prices_df[f'{period}days_ret_{ret_threshold}_amount_{amount_threshold}'] = 0
                    prices_df.loc[(prices_df[f'{period}days_ret'] >= ret_threshold) & (prices_df[f'{period}days_amount_chg'] >= amount_threshold), f'{period}days_ret_{ret_threshold}_amount_{amount_threshold}'] = 1
                    prices_df.loc[(prices_df[f'{period}days_ret'] >= ret_threshold) & (prices_df[f'{period}days_amount_chg'] >= amount_threshold), f'{period}days_ret_{ret_threshold}_amount_{amount_threshold}'] = 1

    file_path = os.path.join(DATA_DIR, f'trading_data')
    file_name = os.path.join(file_path, f'daily_ret_characters_analysis')
    prices_df = prices_df.loc['2015-01-01':, :]
    prices_df.to_excel(f'{file_name}.xlsx')


if __name__ == '__main__':
    file_path = os.path.join(DATA_DIR, f'trading_data')
    os.makedirs(file_path, exist_ok=True)

    # 解析OKX合约K线
    # get_okex_futures_kline()
    # get_futures_term_structure()

    # # get_btc_dominance(data_source='qkl123', end_date=None)
    # get_btc_dominance(data_source='coinmarketcap', end_date=None)

    # 获取all_ohlcvm数据
    # get_all_ohlcvm_data(data_source='coinmarketcap', asset='all', start_date=None, end_date=None, if_print=True, initialize=False)

    # 获取btc_ohlcvm数据
    logger.info(f"开始从coinmarketcap得到bitcoin的all_ohlcvm_data")
    task_to_db(os.path.basename(__file__), 'get_all_ohlcvm_data', 0, "bitcoin")
    try:
        get_all_ohlcvm_data(data_source='coinmarketcap', asset='bitcoin', start_date=None, end_date=None, if_print=True)
    except Exception as e:
        msg = traceback.format_exc()
        logger.info(msg)
        send_error_to_email(script_name=os.path.basename(__file__), func_name="get_all_ohlcvm_data", message=msg)
        raise e
    task_to_db(os.path.basename(__file__), 'get_all_ohlcvm_data', 1, "bitcoin")
    logger.info(f"成功从coinmarketcap得到bitcoin的all_ohlcvm_data")

    logger.info(f"开始从coinmarketcap得到ethereum的all_ohlcvm_data")
    task_to_db(os.path.basename(__file__), 'get_all_ohlcvm_data', 0, "ethereum")
    try:
        get_all_ohlcvm_data(data_source='coinmarketcap', asset='ethereum', start_date=None, end_date=None, if_print=True)
    except Exception as e:
        msg = traceback.format_exc()
        logger.info(msg)
        send_error_to_email(script_name=os.path.basename(__file__), func_name="get_all_ohlcvm_data", message=msg)
        raise e
    task_to_db(os.path.basename(__file__), 'get_all_ohlcvm_data', 1, "ethereum")
    logger.info(f"成功从coinmarketcap得到ethereum的all_ohlcvm_data")

    # 获取稳定币市值数据
    logger.info("开始从coinmarketcap得到stablecoin_marketcap_data")
    task_to_db(os.path.basename(__file__), 'get_stablecoin_marketcap_data')
    try:
        get_stablecoin_marketcap_data(data_source='coinmarketcap', update=True, start_date=None, end_date=None, if_print=True)
    except Exception as e:
        msg = traceback.format_exc()
        logger.info(msg)
        send_error_to_email(script_name=os.path.basename(__file__), func_name="get_stablecoin_marketcap_data", message=msg)
        raise e
    task_to_db(os.path.basename(__file__), 'get_stablecoin_marketcap_data', 1)
    logger.info("结束从coinmarketcap得到stablecoin_marketcap_data")

    # 计算一些自定义的价量指标
    logger.info("计算一些自定义的价量指标")
    logger.info("开始get_trading_data")
    task_to_db(os.path.basename(__file__), 'get_trading_data', 0, 'BTC')
    try:
        get_trading_data(asset='BTC', start_date='2010-01-01', end_date=None)
    except Exception as e:
        msg = traceback.format_exc()
        logger.info(msg)
        send_error_to_email(script_name=os.path.basename(__file__), func_name="get_trading_data", message=msg)
        raise e
    task_to_db(os.path.basename(__file__), 'get_trading_data', 1, 'BTC')
    logger.info("结束get_trading_data")

    # 涨跌幅特征分析
    logger.info("涨跌幅特征分析")
    logger.info("开始daily_ret_characters_analysis")
    task_to_db(os.path.basename(__file__), 'daily_ret_characters_analysis')
    try:
        daily_ret_characters_analysis()
    except Exception as e:
        msg = traceback.format_exc()
        logger.info(msg)
        send_error_to_email(script_name=os.path.basename(__file__), func_name="daily_ret_characters_analysis", message=msg)
        raise e
    task_to_db(os.path.basename(__file__), 'daily_ret_characters_analysis', 1)
    logger.info("成功daily_ret_characters_analysis")
    logger.info("涨跌幅特征分析完毕")
    # get_cex_spot_amount_dominance()