#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Datetime: 2021/11/09 20:09
# @Author  : CHEN Wang
# @Site    :
# @File    : onchain_data_update.py
# @Software: PyCharm

"""
脚本说明: 计算与链上数据相关衍生指标
"""

import os
import numpy as np
import pandas as pd
from functools import reduce
from quant_researcher.quant.project_tool.localize import DATA_DIR
from quant_researcher.quant.project_tool.time_tool import get_today
from quant_researcher.quant.datasource_fetch.crypto_api.glassnode import get_indicators, get_prices
from quant_researcher.quant.factors.factor_preprocess.preprocess import ts_diff
from quant_researcher.quant.project_tool.time_tool import date_shifter
from logger import logger
import os
import traceback
from task_monitor import task_to_db, send_error_to_email


def get_onchain_data(asset, start_date, end_date):
    """
    计算与链上数据相关衍生指标

    :param str asset: 资产代码
    :param str start_date: '2014-01-01'
    :param str end_date: '2014-01-01'
    :return:
    """

    if end_date is None:
        end_date = get_today(marker='with_n_dash')  # 计算截止昨日收盘

    # # 分析累积分数
    # # 获取各组别百分比
    # entity_supply_distribution = get_indicators(indic_name='Entities Supply Distribution', asset=asset, start_date='2013-01-01', end_date=end_date)
    # total_supply = get_indicators(indic_name='Circulating Supply', asset=asset, start_date='2013-01-01', end_date=end_date)['Circulating Supply']
    # entity_supply_distribution['less_1'] = entity_supply_distribution['less_0001'] + entity_supply_distribution['0001_001'] + entity_supply_distribution['001_01'] + entity_supply_distribution['01_1']
    # entity_supply_distribution['above_10k'] = entity_supply_distribution['above_100k'] + entity_supply_distribution['10k_100k']
    # entity_supply_distribution = entity_supply_distribution[['less_1', '1_10', '10_100', '100_1k', '1k_10k', 'above_10k']]
    # # 获取各组别持有量
    # entity_supply = entity_supply_distribution.mul(total_supply, axis=0)
    # entity_supply_chg = entity_supply.pct_change(30)
    # accumulation_score = (entity_supply_chg * entity_supply_distribution).sum(axis=1)
    # accumulation_score_normalized = (accumulation_score - accumulation_score.rolling(90).min()) / (accumulation_score.rolling(90).max() - accumulation_score.rolling(90).min())
    #
    # prices = get_prices(ohlc=False, asset=asset, start_date='2013-01-01', end_date=end_date)['close']
    # log_prices = np.log10(prices)
    #
    # all_df = pd.concat([accumulation_score, accumulation_score_normalized, log_prices], axis=1)
    # file_name = os.path.join(file_path, f'accumulation_score')
    # all_df.to_csv(f'{file_name}.csv')
    #
    # # 分析累计分数是不是由于交易所余额变动导致的
    # exchange_balance = get_indicators(indic_name='Exchange Balance (Total)', asset=asset, start_date='2013-01-01', end_date=end_date)['Exchange Balance (Total)']
    # miner_balance = get_indicators(indic_name='Miner Balance', asset=asset, start_date='2013-01-01', end_date=end_date)['Miner Balance']
    # wallet_balance = total_supply - exchange_balance - miner_balance
    # nonwallet_balance = exchange_balance + miner_balance
    # wallet_balance_chg_30 = wallet_balance.diff(30) / total_supply
    # accumulation_score_30 = (wallet_balance_chg_30 - wallet_balance_chg_30.rolling(90).min()) / (wallet_balance_chg_30.rolling(90).max() - wallet_balance_chg_30.rolling(90).min())
    # wallet_balance_chg_30.name = 'wallet_balance_chg_30'
    # accumulation_score_30.name = 'accumulation_score_30'
    # wallet_balance_chg_15 = wallet_balance.diff(15) / total_supply
    # accumulation_score_15 = (wallet_balance_chg_15 - wallet_balance_chg_15.rolling(90).min()) / (wallet_balance_chg_15.rolling(90).max() - wallet_balance_chg_15.rolling(90).min())
    # wallet_balance_chg_15.name = 'wallet_balance_chg_15'
    # accumulation_score_15.name = 'accumulation_score_15'
    #
    # all_df = pd.concat([wallet_balance_chg_30, accumulation_score_30, wallet_balance_chg_15, accumulation_score_15, log_prices], axis=1)
    # file_name = os.path.join(file_path, f'accumulation_score')
    # all_df.to_csv(f'{file_name}.csv')

    # 计算Illiquid Supply Shock
    # Illiquid Supply Shock = Illiquid Coins / (Liquid + Highly Liquid Coins)
    illiquid_supply = get_indicators(indic_name='Illiquid Supply', asset=asset, start_date=start_date, end_date=end_date)['Illiquid Supply']
    liquid_supply = get_indicators(indic_name='Liquid Supply', asset=asset, start_date=start_date, end_date=end_date)['Liquid Supply']
    highly_liquid_supply = get_indicators(indic_name='Highly Liquid Supply', asset=asset, start_date=start_date, end_date=end_date)['Highly Liquid Supply']
    illiquid_supply_shock = illiquid_supply / (liquid_supply + highly_liquid_supply)
    illiquid_supply_shock.name = 'Illiquid Supply Shock'
    file_name = os.path.join(file_path, f'Illiquid Supply Shock')
    illiquid_supply_shock.to_csv(f'{file_name}.csv')

    # 计算Long-Term Holder Supply Shock; Supply Delta
    # Long-Term Holder Supply Shock = Long-Term Holder Supply / Short-Term Holder Supply
    # Supply Delta is calculated as STH/sma(STH,720) - LTH/sma(LTH,720)
    lth_supply = get_indicators(indic_name='Long-Term Holder Supply', asset=asset, start_date='2013-01-01', end_date=end_date)['Long-Term Holder Supply']
    sth_supply = get_indicators(indic_name='Short-Term Holder Supply', asset=asset, start_date='2013-01-01', end_date=end_date)['Short-Term Holder Supply']
    lth_supply_shock = lth_supply / sth_supply
    lth_supply_shock = lth_supply_shock.loc['2015-01-01':]
    lth_supply_shock.name = 'Long-Term Holder Supply Shock'
    file_name = os.path.join(file_path, f'Long-Term Holder Supply Shock')
    lth_supply_shock.to_csv(f'{file_name}.csv')
    lth_supply_ma720 = lth_supply.rolling(window=720, min_periods=720).mean()
    sth_supply_ma720 = sth_supply.rolling(window=720, min_periods=720).mean()
    supply_delta = (sth_supply / sth_supply_ma720) - (lth_supply / lth_supply_ma720)
    supply_delta = supply_delta.loc['2015-01-01':]
    supply_delta.name = 'Supply Delta'
    file_name = os.path.join(file_path, f'Supply Delta')
    supply_delta.to_csv(f'{file_name}.csv')

    # 计算On-chain Cost Basis
    lth_mvrv = get_indicators(indic_name='LTH-MVRV', asset=asset, start_date=start_date, end_date=end_date)['LTH-MVRV']
    sth_mvrv = get_indicators(indic_name='STH-MVRV', asset=asset, start_date=start_date, end_date=end_date)['STH-MVRV']
    cost_basis_ratio = sth_mvrv / lth_mvrv
    cost_basis_ratio.name = 'On-chain Cost Basis'
    file_name = os.path.join(file_path, f'On-chain Cost Basis')
    cost_basis_ratio.to_csv(f'{file_name}.csv')

    # 计算Supply per Whale (100 - 10k BTC)
    # (Supply Held by Addresses with Balance 100 - 1k + Supply Held by Addresses with Balance 1k - 10k) / (Addresses with Balance ≥ 100 - Addresses with Balance ≥ 10k)
    m1 = get_indicators(indic_name='Supply Held by Addresses with Balance 100 - 1k', asset=asset, start_date=start_date, end_date=end_date)['Supply Held by Addresses with Balance 100 - 1k']
    m2 = get_indicators(indic_name='Supply Held by Addresses with Balance 1k - 10k', asset=asset, start_date=start_date, end_date=end_date)['Supply Held by Addresses with Balance 1k - 10k']
    m3 = get_indicators(indic_name='Addresses with Balance ≥ 100', asset=asset, start_date=start_date, end_date=end_date)['Addresses with Balance ≥ 100']
    m4 = get_indicators(indic_name='Addresses with Balance ≥ 10k', asset=asset, start_date=start_date, end_date=end_date)['Addresses with Balance ≥ 10k']
    supply_per_whale = (m1 + m2) / (m3 - m4)
    supply_per_whale.name = 'Supply per Whale (100 - 10k BTC)'
    file_name = os.path.join(file_path, f'Supply per Whale (100 - 10k BTC)')
    supply_per_whale.to_csv(f'{file_name}.csv')

    # 计算Top Cap Model
    # (cumsum(Market Cap,"2009-01-01")/cumsum(1,"2009-01-01"))*35
    market_cap = get_indicators(indic_name='Market Cap', asset=asset, start_date='2009-01-01', end_date=end_date)['Market Cap']
    cum_makert_cap = market_cap.cumsum()
    days = cum_makert_cap.copy()
    days[:] = 1
    cum_days = days.cumsum()
    top_cap_model = (cum_makert_cap / cum_days) * 35
    top_cap_model = top_cap_model.loc['2015-01-01':]
    top_cap_model.name = 'Top Cap Model'
    file_name = os.path.join(file_path, f'Top Cap Model')
    top_cap_model.to_csv(f'{file_name}.csv')

    # 计算RVT Ratio, RVT Ratio (Entity-Adjusted)
    # sma(Realized Cap/Transfer Volume (Total), 28)
    # The RVT Ratio is calculated as the ratio between the Realised Cap (USD) and the on-chain transaction value (USD), with a 28-day average applied.
    realized_cap = get_indicators(indic_name='Realized Cap', asset=asset, start_date='2014-11-30', end_date=end_date)['Realized Cap']
    transfer_volume_total = get_indicators(indic_name='Transfer Volume (Total)', asset=asset, start_date='2014-11-30', end_date=end_date, currency='USD')['Transfer Volume (Total)']
    entity_adj_volume_total = get_indicators(indic_name='Entity-Adjusted Volume (Total)', asset=asset, start_date='2014-11-30', end_date=end_date, currency='USD')['Entity-Adjusted Volume (Total)']
    RVT_Ratio = realized_cap / transfer_volume_total
    RVT_Ratio = RVT_Ratio.rolling(window=28, min_periods=28).mean()
    RVT_Ratio = RVT_Ratio.loc['2015-01-01':]
    RVT_Ratio.name = 'RVT Ratio'
    file_name = os.path.join(file_path, f'RVT Ratio')
    RVT_Ratio.to_csv(f'{file_name}.csv')
    entity_adj_RVT_Ratio = realized_cap / entity_adj_volume_total
    entity_adj_RVT_Ratio = entity_adj_RVT_Ratio.rolling(window=28, min_periods=28).mean()
    entity_adj_RVT_Ratio = entity_adj_RVT_Ratio.loc['2015-01-01':]
    entity_adj_RVT_Ratio.name = 'RVT Ratio (Entity-Adjusted)'
    file_name = os.path.join(file_path, f'RVT Ratio (Entity-Adjusted)')
    entity_adj_RVT_Ratio.to_csv(f'{file_name}.csv')

    # 计算Realized Price-to-Liveliness Ratio
    # The Realized Price-to-Liveliness Ratio (RPLR) is a metric which compares the spending / HODLing behavior of long-term investors (Liveliness) with the ‘fair value’ of bitcoin (Realized Price).
    realized_price = get_indicators(indic_name='Realized Price', asset=asset, start_date=start_date, end_date=end_date)['Realized Price']
    liveliness = get_indicators(indic_name='Liveliness', asset=asset, start_date=start_date, end_date=end_date)['Liveliness']
    realized_price_liveliness_ratio = realized_price / liveliness
    realized_price_liveliness_ratio.name = 'Realized Price-to-Liveliness Ratio'
    file_name = os.path.join(file_path, f'Realized Price-to-Liveliness Ratio')
    realized_price_liveliness_ratio.to_csv(f'{file_name}.csv')

    # 计算28-day Market Realised Gradient； 140-day Market Realised Gradient
    # diff(m1, 28) * median(((diff(m1, 28) - diff(m2, 28) - cummean(diff(m1, 28) - diff(m2, 28))) / cumstd(diff(m1, 28) - diff(m2, 28))) / (diff(m1, 28) - diff(m2, 28)), 3)
    end_date1 = date_shifter(end_date, step='days', how_many=-1)
    prices = get_prices(ohlc=False, asset=asset, start_date='2010-01-01', end_date=end_date1)['close']
    realized_price = get_indicators(indic_name='Realized Price', asset=asset, start_date='2010-01-01', end_date=end_date)['Realized Price']
    prices_diff28 = ts_diff(prices, period=28).loc['2010-08-14':]
    realized_price_diff28 = ts_diff(realized_price, period=28).loc['2010-08-14':]
    delta_diff28 = prices_diff28 - realized_price_diff28
    delta_diff28_cummean = delta_diff28.expanding().mean()
    delta_diff28_cumstd = delta_diff28.expanding().std()
    delta_gradient_day28 = (delta_diff28 - delta_diff28_cummean) / delta_diff28_cumstd
    market_gradient_day28 = prices_diff28 * (delta_gradient_day28 / delta_diff28).rolling(window=3, min_periods=3).median()
    realized_gradient_day28 = prices_diff28 * (delta_gradient_day28 / delta_diff28).rolling(window=3, min_periods=3).median()
    delta_gradient_day28 = delta_gradient_day28.loc['2015-01-01':]
    delta_gradient_day28.name = '28-day Market Realised Gradient'
    file_name = os.path.join(file_path, f'28-day Market Realised Gradient')
    delta_gradient_day28.to_csv(f'{file_name}.csv')

    prices_diff140 = ts_diff(prices, period=140).loc['2010-12-04':]
    realized_price_diff140 = ts_diff(realized_price, period=140).loc['2010-12-04':]
    delta_diff140 = prices_diff140 - realized_price_diff140
    delta_diff140_cummean = delta_diff140.expanding().mean()
    delta_diff140_cumstd = delta_diff140.expanding().std()
    delta_gradient_day140 = (delta_diff140 - delta_diff140_cummean) / delta_diff140_cumstd
    market_gradient_day140 = prices_diff140 * (delta_gradient_day140 / delta_diff140).rolling(window=3, min_periods=3).median()
    realized_gradient_day140 = prices_diff140 * (delta_gradient_day140 / delta_diff140).rolling(window=3, min_periods=3).median()
    delta_gradient_day140 = delta_gradient_day140.loc['2015-01-01':]
    delta_gradient_day140.name = '140-day Market Realised Gradient'
    file_name = os.path.join(file_path, f'140-day Market Realised Gradient')
    delta_gradient_day140.to_csv(f'{file_name}.csv')

    # 计算Value Days Destroyed Multiple
    # (MA30(CDD * Price) / MA365(CDD * Price)) * (Supply / 21e6)
    end_date1 = date_shifter(end_date, step='days', how_many=-1)
    prices = get_prices(ohlc=False, asset=asset, start_date='2010-01-01', end_date=end_date1)['close']
    cdd = get_indicators(indic_name='Coin Days Destroyed (CDD)', asset=asset, start_date='2010-01-01', end_date=end_date)['Coin Days Destroyed (CDD)']
    c_supply = get_indicators(indic_name='Circulating Supply', asset=asset, start_date='2010-01-01', end_date=end_date)['Circulating Supply']
    value_day_destroyed = prices * cdd
    value_day_destroyed_ma30 = value_day_destroyed.rolling(window=30, min_periods=30).mean()
    value_day_destroyed_ma365 = value_day_destroyed.rolling(window=365, min_periods=365).mean()
    value_day_destroyed_multiple = (value_day_destroyed_ma30 / value_day_destroyed_ma365) * (c_supply / 21000000)
    value_day_destroyed_multiple = value_day_destroyed_multiple.loc['2015-01-01':]
    value_day_destroyed_multiple.name = 'Value Days Destroyed Multiple'
    file_name = os.path.join(file_path, f'Value Days Destroyed Multiple')
    value_day_destroyed_multiple.to_csv(f'{file_name}.csv')

    # 计算Difficulty Ribbon MA9-MA200 ratio
    difficulty_ribbon = get_indicators(indic_name='Difficulty Ribbon', asset=asset, start_date='2015-01-01', end_date=end_date)
    difficulty_ribbon['Difficulty Ribbon MA9-MA200 ratio'] = difficulty_ribbon['ma9'] / difficulty_ribbon['ma200']
    file_name = os.path.join(file_path, f'Difficulty Ribbon MA9-MA200 ratio')
    difficulty_ribbon['Difficulty Ribbon MA9-MA200 ratio'].to_csv(f'{file_name}.csv')

    # CDD_Decompose
    metrics = {'Spent Volume less 1h': 'https://api.glassnode.com/v1/metrics/indicators/svl_1h',
               'Spent Volume 1h-24h': 'https://api.glassnode.com/v1/metrics/indicators/svl_1h_24h',
               'Spent Volume 1d-1w': 'https://api.glassnode.com/v1/metrics/indicators/svl_1d_1w',
               'Spent Volume 1w-1m': 'https://api.glassnode.com/v1/metrics/indicators/svl_1w_1m',
               'Spent Volume 1m-3m': 'https://api.glassnode.com/v1/metrics/indicators/svl_1m_3m',
               'Spent Volume 3m-6m': 'https://api.glassnode.com/v1/metrics/indicators/svl_3m_6m',
               'Spent Volume 6m-12m': 'https://api.glassnode.com/v1/metrics/indicators/svl_6m_12m',
               'Spent Volume 1y-2y': 'https://api.glassnode.com/v1/metrics/indicators/svl_1y_2y',
               'Spent Volume 2y-3y': 'https://api.glassnode.com/v1/metrics/indicators/svl_2y_3y',
               'Spent Volume 3y-5y': 'https://api.glassnode.com/v1/metrics/indicators/svl_3y_5y',
               'Spent Volume 5y-7y': 'https://api.glassnode.com/v1/metrics/indicators/svl_5y_7y',
               'Spent Volume 7y-10y': 'https://api.glassnode.com/v1/metrics/indicators/svl_7y_10y',
               'Spent Volume more 10y': 'https://api.glassnode.com/v1/metrics/indicators/svl_more_10y', }
    alldf_list = []
    for i in metrics.keys():
        df = get_indicators(indic_name=i, asset='BTC', start_date='2010-01-01', end_date=end_date)
        alldf_list.append(df)
    alldf = pd.concat(alldf_list, axis=1)
    file_name = os.path.join(file_path, f'Spent Volume Data')
    alldf.to_csv(f'{file_name}.csv')

    alldf_weighted = alldf * [0.5 / 24, 12 / 24, 3.5, 19, 60, 3.5 * 30, 9 * 30, 1.5 * 365, 2.5 * 365, 4 * 365, 6 * 365, 8.5 * 365, 11 * 365]
    alldf_weighted['DIY-CDD'] = alldf_weighted.sum(axis=1)

    cdd_df = get_indicators(indic_name='Coin Days Destroyed (CDD)', asset='BTC', start_date='2010-01-01', end_date=end_date)
    alldf_weighted = pd.concat([alldf_weighted, cdd_df], axis=1)
    for i in metrics.keys():
        alldf_weighted[i] = alldf_weighted[i] / alldf_weighted['DIY-CDD']
        alldf_weighted[i] = alldf_weighted[i].rolling(90).mean()
    file_name = os.path.join(file_path, f'DIY CDD_Decompose')
    alldf_weighted.to_csv(f'{file_name}.csv')

    # Binary CDD 变形
    supply_cdd_df = get_indicators(indic_name='Supply-Adjusted CDD', asset=asset, start_date='2011-01-01', end_date=end_date)
    supply_cdd_df['supplycdd_ma30'] = supply_cdd_df['Supply-Adjusted CDD'].rolling(30, min_periods=30).mean()
    supply_cdd_df['supplycdd_ma90'] = supply_cdd_df['Supply-Adjusted CDD'].rolling(90, min_periods=90).mean()
    supply_cdd_df['supplycdd_ma120'] = supply_cdd_df['Supply-Adjusted CDD'].rolling(120, min_periods=120).mean()
    supply_cdd_df['supplycdd_ma155'] = supply_cdd_df['Supply-Adjusted CDD'].rolling(155, min_periods=155).mean()
    supply_cdd_df['binarycdd_ma30'] = np.where(supply_cdd_df['Supply-Adjusted CDD'] >= supply_cdd_df['supplycdd_ma30'], 1, 0)
    supply_cdd_df['binarycdd_ma90'] = np.where(supply_cdd_df['Supply-Adjusted CDD'] >= supply_cdd_df['supplycdd_ma90'], 1, 0)
    supply_cdd_df['binarycdd_ma120'] = np.where(supply_cdd_df['Supply-Adjusted CDD'] >= supply_cdd_df['supplycdd_ma120'], 1, 0)
    supply_cdd_df['binarycdd_ma155'] = np.where(supply_cdd_df['Supply-Adjusted CDD'] >= supply_cdd_df['supplycdd_ma155'], 1, 0)

    supply_cdd_df['binarycdd_ma30_ma7'] = supply_cdd_df['binarycdd_ma30'].rolling(7).mean()
    supply_cdd_df['binarycdd_ma30_ma30'] = supply_cdd_df['binarycdd_ma30'].rolling(30).mean()
    supply_cdd_df['binarycdd_ma30_ma90'] = supply_cdd_df['binarycdd_ma30'].rolling(90).mean()
    supply_cdd_df['binarycdd_ma90_ma7'] = supply_cdd_df['binarycdd_ma90'].rolling(7).mean()
    supply_cdd_df['binarycdd_ma90_ma30'] = supply_cdd_df['binarycdd_ma90'].rolling(30).mean()
    supply_cdd_df['binarycdd_ma90_ma90'] = supply_cdd_df['binarycdd_ma90'].rolling(90).mean()
    supply_cdd_df['binarycdd_ma120_ma7'] = supply_cdd_df['binarycdd_ma120'].rolling(7).mean()
    supply_cdd_df['binarycdd_ma120_ma30'] = supply_cdd_df['binarycdd_ma120'].rolling(30).mean()
    supply_cdd_df['binarycdd_ma120_ma90'] = supply_cdd_df['binarycdd_ma120'].rolling(90).mean()
    supply_cdd_df['binarycdd_ma155_ma7'] = supply_cdd_df['binarycdd_ma155'].rolling(7).mean()
    supply_cdd_df['binarycdd_ma155_ma30'] = supply_cdd_df['binarycdd_ma155'].rolling(30).mean()
    supply_cdd_df['binarycdd_ma155_ma90'] = supply_cdd_df['binarycdd_ma155'].rolling(90).mean()

    # SF模型检验
    total_supply = get_indicators(indic_name='Circulating Supply', asset=asset, start_date='2009-01-03', end_date=end_date)
    total_supply.rename(columns={'Circulating Supply': 'total_supply'}, inplace=True)
    total_supply['daily_mined'] = total_supply['total_supply'].diff()
    total_supply['roling_yearly_mined'] = total_supply['daily_mined'].rolling(365).sum()
    total_supply['sf_ratio'] = total_supply['total_supply'] / total_supply['roling_yearly_mined']
    prices = get_prices(ohlc=False, asset=asset, start_date='2010-01-01', end_date=end_date)
    all_df = pd.concat([total_supply, prices], axis=1)
    all_df['total_marketcap'] = all_df['total_supply'] * all_df['close']
    all_df['log_total_marketcap'] = np.log10(all_df['total_marketcap'])
    all_df['log_sf_ratio'] = np.log10(all_df['sf_ratio'])
    # import matplotlib.pyplot as plt
    # # all_df['sf_ratio'].plot()
    # all_df.plot(x='log_sf_ratio', y='log_total_marketcap')
    # plt.show()
    # file_name = os.path.join(file_path, f'SF Model')
    all_df.to_csv(f'{file_name}.csv')

    # start_datetime = '2009-01-09 00:00:00'
    # end_datetime = '2022-06-25 00:00:00'
    # this_datetime = start_datetime
    # total_supply = 50
    # total_blocks = 0
    # delta_coins = 50
    # all_df = pd.DataFrame([], index=this_datetime, columns=['total_supply'])
    # while this_datetime < end_datetime:
    #     total_blocks = total_blocks + 1  # 总区块数
    #     delta_coins = delta_coins / (total_blocks // 210000 + 1)  # 每次出快奖励，每210000个区块，区块奖励减半
    #     total_supply = total_supply + delta_coins  # 总的挖出的币数
    #     this_datetime = this_datetime + 10  # 每10分钟出一个快

    # 分析稳定币分布情况，以及和行情之间的关系
    usdt_supply = get_indicators(indic_name='Circulating Supply', asset='USDT', start_date='2009-01-03', end_date=end_date)['Circulating Supply']
    usdt_supply_in_exchange = get_indicators(indic_name='Exchange Balance (Total)', asset='USDT', start_date='2009-01-03', end_date=end_date)['Exchange Balance (Total)']
    usdt_percent_supply_in_contract = get_indicators(indic_name='Supply in Smart Contracts', asset='USDT', start_date='2009-01-03', end_date=end_date)['Supply in Smart Contracts']
    usdt_supply_in_contract = usdt_supply.multiply(usdt_percent_supply_in_contract)
    usdt_supply_not_in_contract = usdt_supply.multiply(1 - usdt_percent_supply_in_contract)

    usdc_supply = get_indicators(indic_name='Circulating Supply', asset='USDC', start_date='2009-01-03', end_date=end_date)['Circulating Supply']
    usdc_supply_in_exchange = get_indicators(indic_name='Exchange Balance (Total)', asset='USDC', start_date='2009-01-03', end_date=end_date)['Exchange Balance (Total)']
    usdc_percent_supply_in_contract = get_indicators(indic_name='Supply in Smart Contracts', asset='USDC', start_date='2009-01-03', end_date=end_date)['Supply in Smart Contracts']
    usdc_supply_in_contract = usdc_supply.multiply(usdc_percent_supply_in_contract)
    usdc_supply_not_in_contract = usdc_supply.multiply(1 - usdc_percent_supply_in_contract)

    busd_supply = get_indicators(indic_name='Circulating Supply', asset='BUSD', start_date='2009-01-03', end_date=end_date)['Circulating Supply']
    busd_supply_in_exchange = get_indicators(indic_name='Exchange Balance (Total)', asset='BUSD', start_date='2009-01-03', end_date=end_date)['Exchange Balance (Total)']
    busd_percent_supply_in_contract = get_indicators(indic_name='Supply in Smart Contracts', asset='BUSD', start_date='2009-01-03', end_date=end_date)['Supply in Smart Contracts']
    busd_supply_in_contract = busd_supply.multiply(busd_percent_supply_in_contract)
    busd_supply_not_in_contract = busd_supply.multiply(1 - busd_percent_supply_in_contract)

    all_stablecoin = pd.concat([usdt_supply, usdc_supply, busd_supply], axis=1).sum(axis=1)
    stablecoin_in_exchange = pd.concat([usdt_supply_in_exchange, usdc_supply_in_exchange, busd_supply_in_exchange], axis=1).sum(axis=1)
    stablecoin_in_contract = pd.concat([usdt_supply_in_contract, usdc_supply_in_contract, busd_supply_in_contract], axis=1).sum(axis=1)
    stablecoin_not_in_contract = pd.concat([usdt_supply_not_in_contract, usdc_supply_not_in_contract, busd_supply_not_in_contract], axis=1).sum(axis=1)

    usdt_df = pd.concat([usdt_supply, usdt_supply_in_exchange, usdt_supply_in_contract, usdt_supply_not_in_contract], axis=1)
    usdt_df.columns = ['usdt_supply', 'usdt_supply_in_exchange', 'usdt_supply_in_contract', 'usdt_supply_not_in_contract']
    usdc_df = pd.concat([usdc_supply, usdc_supply_in_exchange, usdc_supply_in_contract, usdc_supply_not_in_contract], axis=1)
    usdc_df.columns = ['usdc_supply', 'usdc_supply_in_exchange', 'usdc_supply_in_contract', 'usdc_supply_not_in_contract']
    busd_df = pd.concat([busd_supply, busd_supply_in_exchange, busd_supply_in_contract, busd_supply_not_in_contract], axis=1)
    busd_df.columns = ['busd_supply', 'busd_supply_in_exchange', 'busd_supply_in_contract', 'busd_supply_not_in_contract']
    stablecoin_df = pd.concat([all_stablecoin, stablecoin_in_exchange, stablecoin_in_contract, stablecoin_not_in_contract], axis=1)
    stablecoin_df.columns = ['all_stablecoin', 'stablecoin_in_exchange', 'stablecoin_in_contract', 'stablecoin_not_in_contract']

    all_df = pd.concat([usdt_df, usdc_df, busd_df, stablecoin_df], axis=1)
    btc_df = pd.concat([market_cap, prices, np.log10(prices)], axis=1)
    btc_df.columns = ['btc_marketcap', 'btc_price', 'btc_log_price']
    stablecoin_analysis_df = pd.concat([all_df, btc_df], axis=1)
    stablecoin_analysis_df.sort_index(inplace=True)
    file_name = os.path.join(file_path, f'DIY Stablecoin Analysis')
    stablecoin_analysis_df.to_csv(f'{file_name}.csv')

    btc_exchange_balance = get_indicators(indic_name='Exchange Balance (Stacked)', asset='BTC', start_date='2009-01-03', end_date=end_date)
    btc_exchange_balance = btc_exchange_balance.mul(prices, axis=0)  # 交易所余额转换为USD计价
    usdt_exchange_balance = get_indicators(indic_name='Exchange Balance (Stacked)', asset='USDT', start_date='2009-01-03', end_date=end_date)
    usdc_exchange_balance = get_indicators(indic_name='Exchange Balance (Stacked)', asset='USDC', start_date='2009-01-03', end_date=end_date)
    busd_exchange_balance = get_indicators(indic_name='Exchange Balance (Stacked)', asset='BUSD', start_date='2009-01-03', end_date=end_date)
    stablecoin_exchange_balance = usdt_exchange_balance.fillna(0) + usdc_exchange_balance.reindex(usdt_exchange_balance.index).fillna(0) + busd_exchange_balance.reindex(usdt_exchange_balance.index).fillna(0)
    exchange_SSR_df = btc_exchange_balance / stablecoin_exchange_balance
    exchange_SSR_df.dropna(how='all', axis=1, inplace=True)
    exchange_SSR_df.columns = [i + '_ssr' for i in exchange_SSR_df.columns]
    exchange_SSR_ratio_df = (exchange_SSR_df - exchange_SSR_df.rolling(200).mean()) / exchange_SSR_df.rolling(200).std()
    exchange_SSR_ratio_df.columns = [i + '_ratio' for i in exchange_SSR_ratio_df.columns]
    exchange_SSR_info_df = pd.concat([exchange_SSR_df, exchange_SSR_ratio_df, btc_df], axis=1)
    file_name = os.path.join(file_path, f'DIY Exchange SSR')
    exchange_SSR_info_df.to_csv(f'{file_name}.csv')

    SSR_marketcap_marketcap = market_cap / stablecoin_df['all_stablecoin']
    SSR_marketcap_exchange = market_cap / stablecoin_df['stablecoin_in_exchange']
    SSR_marketcap_noncontrtact = market_cap / stablecoin_df['stablecoin_not_in_contract']
    SSR_exchange_marketcap = btc_exchange_balance.sum(axis=1) / stablecoin_df['all_stablecoin']
    SSR_exchange_exchange = btc_exchange_balance.sum(axis=1) / stablecoin_df['stablecoin_in_exchange']
    SSR_exchange_noncontrtact = btc_exchange_balance.sum(axis=1) / stablecoin_df['stablecoin_not_in_contract']
    SSR_df = pd.concat([SSR_marketcap_marketcap, SSR_marketcap_exchange, SSR_marketcap_noncontrtact, SSR_exchange_marketcap, SSR_exchange_exchange, SSR_exchange_noncontrtact], axis=1)
    SSR_df.columns = ['SSR_marketcap_marketcap', 'SSR_marketcap_exchange', 'SSR_marketcap_noncontrtact', 'SSR_exchange_marketcap', 'SSR_exchange_exchange', 'SSR_exchange_noncontrtact']
    SSR_ratio_df = (SSR_df - SSR_df.rolling(200).mean()) / SSR_df.rolling(200).std()
    SSR_ratio_df.columns = [i.replace('SSR', 'SSR_ratio') for i in SSR_ratio_df.columns]
    SSR_info_df = pd.concat([SSR_df, SSR_ratio_df, btc_df], axis=1)
    file_name = os.path.join(file_path, f'DIY SSR')
    SSR_info_df.to_csv(f'{file_name}.csv')


def get_exchange_balance():
    file_path = os.path.join(DATA_DIR, f'onchain_data')
    file_name = os.path.join(file_path, f'exchange_balance_nansen')
    exchange_balance_df = pd.read_excel(f'{file_name}.xlsx')
    cleaned_exchange_balance_df = exchange_balance_df.dropna(how='any', axis=0)
    cleaned_exchange_balance_df = cleaned_exchange_balance_df[~(cleaned_exchange_balance_df['Token'] == 'Token')]
    cleaned_exchange_balance_df['date'] = cleaned_exchange_balance_df['date'].dt.strftime("%Y-%m-%d")
    file_name = os.path.join(file_path, f'exchange_balance_nansen_cleaned')
    cleaned_exchange_balance_df.to_excel(f'{file_name}.xlsx', index=False)

    eariest_date = min(cleaned_exchange_balance_df['date'])
    latest_date = max(cleaned_exchange_balance_df['date'])
    all_date_list = list(cleaned_exchange_balance_df['date'].unique())
    all_exchange_list = list(cleaned_exchange_balance_df['exchange'].unique())

    # 按日期，交易所，币种聚类
    df1 = cleaned_exchange_balance_df.groupby(['date', 'exchange', 'datasource', 'Token'])['Price'].first()
    df2 = cleaned_exchange_balance_df.groupby(['date', 'exchange', 'datasource', 'Token'])['Balance'].sum()
    df3 = cleaned_exchange_balance_df.groupby(['date', 'exchange', 'datasource', 'Token'])['Value'].sum()
    all_df = pd.concat([df1, df2, df3], axis=1)
    value_df = all_df['Value'].unstack()
    # a = list(value_df.sort_values(by=[('2022-11-14', 'binance', 'nansen')], axis=1, ascending=False).columns[:10])
    # b = list(value_df.sort_values(by=[('2022-11-14', 'bitfinex', 'nansen')], axis=1, ascending=False).columns[:10])
    # c = list(value_df.sort_values(by=[('2022-11-14', 'crypto.com', 'nansen')], axis=1, ascending=False).columns[:10])
    # d = list(value_df.sort_values(by=[('2022-11-14', 'deribit', 'nansen')], axis=1, ascending=False).columns[:5])
    # e = list(value_df.sort_values(by=[('2022-11-14', 'huobi', 'nansen')], axis=1, ascending=False).columns[:10])
    # f = list(value_df.sort_values(by=[('2022-11-14', 'kucoin', 'nansen')], axis=1, ascending=False).columns[:10])
    # g = list(value_df.sort_values(by=[('2022-11-14', 'okx', 'nansen')], axis=1, ascending=False).columns[:10])
    # coins_to_monitor = list(reduce(lambda x, y: set(x) | set(y), [a, b, c, d, e, f, g]))
    coins_to_monitor = ['BUSD', 'USDT', 'BTC', 'ETH', 'BNB', 'USDC', 'LEO', 'EURT', 'MATIC', 'SOL', 'SHIB', 'CRO', 'LINK', 'HT', 'HBTC', 'TRX', 'KCS']
    value_df1 = value_df[coins_to_monitor]
    value_df1['Others'] = value_df.loc[:, ~ value_df.columns.isin(coins_to_monitor)].sum(axis=1)
    value_df1['Total'] = value_df1.sum(axis=1)
    value_df1.sort_values(by=[('2022-11-14', 'binance', 'nansen')], axis=1, ascending=False, inplace=True)
    value_df1.reset_index(inplace=True)
    file_name = os.path.join(file_path, f'exchange_balance_value_nansen_grouped')
    value_df1.to_excel(f'{file_name}.xlsx')

    file_path_temp = os.path.join(DATA_DIR, f'cex_coins_data\statistics_data')
    for data_type in ['spot_volume', 'spot_fee', 'deri_volume', 'deri_fee']:
        file_name_temp = os.path.join(file_path_temp, f'all_cex_{data_type}')
        all_cex_df = pd.read_excel(f'{file_name_temp}.xlsx', index_col='Unnamed: 0')
        all_cex_df.rename(columns={'okex': 'okx', 'crypto_com': 'crypto.com'}, inplace=True)
        all_cex_df = all_cex_df.rolling(30).sum()
        all_cex_df = all_cex_df.iloc[-1:, all_cex_df.columns.isin(all_exchange_list)]
        all_cex_df = all_cex_df.stack().reset_index()
        all_cex_df.columns = ['end_date', 'exchange', f'{data_type}_ma30']
        value_df1 = value_df1.merge(all_cex_df[['exchange', f'{data_type}_ma30']], on='exchange', how='left')
    value_df1['total_fee_ma30'] = value_df1['spot_fee_ma30'] + value_df1['deri_fee_ma30']
    file_name = os.path.join(file_path, f'exchange_balance_value_nansen_with_volume_fee')
    value_df1.to_excel(f'{file_name}.xlsx')

    balance_df = all_df['Balance'].unstack()
    balance_df_nansen = balance_df[coins_to_monitor]
    # 获取glassnode上的交易所余额
    exchange_balance_glassnode_list = []
    for coins in coins_to_monitor:
        exchange_balance = get_indicators(indic_name='Exchange Balance (Stacked)', asset=coins, start_date=eariest_date, end_date=latest_date)
        if exchange_balance is not None:
            exchange_balance = exchange_balance[exchange_balance.index.isin(all_date_list)]
            exchange_balance = pd.DataFrame(exchange_balance.stack())
            exchange_balance.rename(columns={0: 'glassnode'}, inplace=True)
            exchange_balance_glassnode = pd.DataFrame(exchange_balance.stack())
            exchange_balance_glassnode.index.names = balance_df_nansen.index.names
            exchange_balance_glassnode.rename(columns={0: coins}, inplace=True)
            exchange_balance_glassnode_list.append(exchange_balance_glassnode)
        else:
            pass

    exchange_balance_glassnode = pd.concat(exchange_balance_glassnode_list, axis=1)
    all_exchange_balance = pd.concat([balance_df_nansen, exchange_balance_glassnode], axis=0)
    all_exchange_balance.reset_index(inplace=True)
    file_name = os.path.join(file_path, f'exchange_balance_nansen_glassnode')
    all_exchange_balance.to_excel(f'{file_name}.xlsx')


def get_valuation_percentile(asset, start_date, end_date):
    from quant_researcher.quant.factors.factor_preprocess.preprocess import ts_percentile_rank_score
    if end_date is None:
        end_date = get_today(marker='with_n_dash')  # 计算截止昨日收盘

    # 获取已实现盈亏指标
    entity_sopr = get_indicators(indic_name='Entity-Adjusted SOPR', asset=asset, start_date=start_date, end_date=end_date)['Entity-Adjusted SOPR']
    asopr = get_indicators(indic_name='aSOPR', asset=asset, start_date=start_date, end_date=end_date)['aSOPR']

    # 获取未实现盈亏指标
    entity_mvrv = get_indicators(indic_name='Entity-Adjusted MVRV', asset=asset, start_date=start_date, end_date=end_date)['Entity-Adjusted MVRV']
    sth_mvrv = get_indicators(indic_name='STH-MVRV', asset=asset, start_date=start_date, end_date=end_date)['STH-MVRV']

    # 获取浮盈占市值占比
    relative_unrealized_profit = get_indicators(indic_name='Relative Unrealized Profit', asset=asset, start_date=start_date, end_date=end_date)['Relative Unrealized Profit']
    entity_relative_unrealized_profit = get_indicators(indic_name='Entity-Adjusted Unrealized Profit', asset=asset, start_date=start_date, end_date=end_date)['Entity-Adjusted Unrealized Profit']

    # 获取浮盈币占比
    percent_supply_in_profit = get_indicators(indic_name='Percent Supply in Profit', asset=asset, start_date=start_date, end_date=end_date)['Percent Supply in Profit']
    supply_in_profit = get_indicators(indic_name='Supply in Profit', asset=asset, start_date=start_date, end_date=end_date)['Supply in Profit']
    supply_last_active_7y = get_indicators(indic_name='Supply Last Active 7y-10y', asset=asset, start_date=start_date, end_date=end_date)['Supply Last Active 7y-10y']
    supply_last_active_10y = get_indicators(indic_name='Supply Last Active more 10y', asset=asset, start_date=start_date, end_date=end_date)['Supply Last Active more 10y']
    adjusted_supply = get_indicators(indic_name='Adjusted Supply', asset=asset, start_date=start_date, end_date=end_date)['Adjusted Supply']
    adjusted_percent_supply_in_profit = (supply_in_profit - supply_last_active_7y - supply_last_active_10y) / adjusted_supply
    adjusted_percent_supply_in_profit.name = 'Adjusted Percent Supply in Profit'

    # 获取osc120
    prices = get_prices(ohlc=False, asset=asset, start_date=start_date, end_date=end_date)['close']
    prices_ma120 = prices.rolling(120).mean()
    prices_osc120 = (prices - prices_ma120) / prices_ma120
    prices_osc120.name = 'OSC120'

    # 计算log_price
    log_price = np.log10(prices)
    log_price.name = 'log_price'

    all_data_list = []
    for data_series in [entity_sopr, asopr, entity_mvrv, sth_mvrv, relative_unrealized_profit, entity_relative_unrealized_profit, percent_supply_in_profit, adjusted_percent_supply_in_profit, prices_osc120]:
        all_data_list.append(data_series)
        for ma_period in [7, 14, 30]:
            temp_data_series = data_series.rolling(ma_period).mean()
            temp_data_series.name = data_series.name + f' ma{ma_period}'
            all_data_list.append(temp_data_series)

    all_df = pd.concat(all_data_list, axis=1)

    file_path = os.path.join(DATA_DIR, f'onchain_data')
    for analysis_start_date in ['2015-01-01', '2017-01-01']:
        analysis_df = all_df[analysis_start_date:]
        temp_log_price = log_price[analysis_start_date:]

        # 计算滚动历史百分位
        expanding_percentile_df = analysis_df.apply(ts_percentile_rank_score, way='expanding', scale=100, min_periods=100)
        file_name = os.path.join(file_path, f'valuation_expanding_percentile_analysis_start_{analysis_start_date}')
        expanding_percentile_df = pd.concat([temp_log_price, expanding_percentile_df], axis=1)
        expanding_percentile_df.to_excel(f'{file_name}.xlsx')

        # 计算全历史百分位
        whole_percentile_df = analysis_df.apply(ts_percentile_rank_score, way='whole', scale=100, min_periods=100)
        file_name = os.path.join(file_path, f'valuation_whole_percentile_analysis_start_{analysis_start_date}')
        whole_percentile_df = pd.concat([temp_log_price, whole_percentile_df], axis=1)
        whole_percentile_df.to_excel(f'{file_name}.xlsx')

        # 统计分位点数值
        describe_percentile_df = analysis_df.describe(percentiles=[0.01, 0.05, 0.1, 0.2, 0.4, 0.6, 0.8, 0.9, 0.95, 0.99])
        file_name = os.path.join(file_path, f'valuation_describe_percentile_analysis_start_{analysis_start_date}')
        describe_percentile_df.to_excel(f'{file_name}.xlsx')


if __name__ == '__main__':
    file_path = os.path.join(DATA_DIR, f'onchain_data')
    os.makedirs(file_path, exist_ok=True)

    # get_exchange_balance()
    # get_valuation_percentile(asset='BTC', start_date='2014-01-01', end_date=None)
    logger.info("开始得到BTC链上数据get_onchain_data")
    task_to_db(os.path.basename(__file__), 'get_onchain_data')
    try:
        get_onchain_data(asset='BTC', start_date='2015-01-01', end_date=None)
    except Exception as e:
        msg = traceback.format_exc()
        logger.info(msg)
        send_error_to_email(script_name=os.path.basename(__file__), func_name="get_onchain_data", message=msg)
        raise e
    task_to_db(os.path.basename(__file__), 'get_onchain_data', 1)
    logger.info("成功得到BTC链上数据get_onchain_data")