import numpy as np
import os
import sys
import warnings

import pandas as pd

sys.path.append('..')
warnings.filterwarnings('ignore')

from tools.setting import DATA_DIR
from tools.time_tool import get_today, date_shifter
from tools.rank import ts_percentile_rank_score
from API.glassnode import get_indicators, get_prices

ANALYSIS_DATA_DIR = os.path.join(DATA_DIR, 'analysis')
os.makedirs(ANALYSIS_DATA_DIR, exist_ok=True)
GLASSNODE_ANALYSIS_DATA_DIR = os.path.join(ANALYSIS_DATA_DIR, 'glassnode')
os.makedirs(GLASSNODE_ANALYSIS_DATA_DIR, exist_ok=True)


def btc_onchain_data_analysis():
    asset = 'BTC'
    end_date = get_today(marker='with_n_dash')
    start_date = '2013-01-01'

    # 计算Illiquid Supply Shock
    # Illiquid Supply Shock = Illiquid Coins / (Liquid + Highly Liquid Coins)
    illiquid_supply = get_indicators(indic_name='Illiquid Supply', asset=asset, start_date=start_date, end_date=end_date)['Illiquid Supply']
    liquid_supply = get_indicators(indic_name='Liquid Supply', asset=asset, start_date=start_date, end_date=end_date)['Liquid Supply']
    highly_liquid_supply = get_indicators(indic_name='Highly Liquid Supply', asset=asset, start_date=start_date, end_date=end_date)['Highly Liquid Supply']
    illiquid_supply_shock = illiquid_supply / (liquid_supply + highly_liquid_supply)
    illiquid_supply_shock.name = 'Illiquid Supply Shock'
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'Illiquid Supply Shock')
    illiquid_supply_shock.to_csv(f'{file_name}.csv')

    # 计算Long-Term Holder Supply Shock; Supply Delta
    # Long-Term Holder Supply Shock = Long-Term Holder Supply / Short-Term Holder Supply
    # Supply Delta is calculated as STH/sma(STH,720) - LTH/sma(LTH,720)
    lth_supply = get_indicators(indic_name='Long-Term Holder Supply', asset=asset, start_date=start_date, end_date=end_date)['Long-Term Holder Supply']
    sth_supply = get_indicators(indic_name='Short-Term Holder Supply', asset=asset, start_date=start_date, end_date=end_date)['Short-Term Holder Supply']
    lth_supply_shock = lth_supply / sth_supply
    lth_supply_shock = lth_supply_shock.loc['2015-01-01':]
    lth_supply_shock.name = 'Long-Term Holder Supply Shock'
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'Long-Term Holder Supply Shock')
    lth_supply_shock.to_csv(f'{file_name}.csv')
    lth_supply_ma720 = lth_supply.rolling(window=720, min_periods=720).mean()
    sth_supply_ma720 = sth_supply.rolling(window=720, min_periods=720).mean()
    supply_delta = (sth_supply / sth_supply_ma720) - (lth_supply / lth_supply_ma720)
    supply_delta = supply_delta.loc['2015-01-01':]
    supply_delta.name = 'Supply Delta'
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'Supply Delta')
    supply_delta.to_csv(f'{file_name}.csv')

    # 计算On-chain Cost Basis
    lth_mvrv = get_indicators(indic_name='LTH-MVRV', asset=asset, start_date=start_date, end_date=end_date)['LTH-MVRV']
    sth_mvrv = get_indicators(indic_name='STH-MVRV', asset=asset, start_date=start_date, end_date=end_date)['STH-MVRV']
    cost_basis_ratio = sth_mvrv / lth_mvrv
    cost_basis_ratio.name = 'On-chain Cost Basis'
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'On-chain Cost Basis')
    cost_basis_ratio.to_csv(f'{file_name}.csv')

    # 计算Supply per Whale (100 - 10k BTC)
    # (Supply Held by Addresses with Balance 100 - 1k + Supply Held by Addresses with Balance 1k - 10k) / (Addresses with Balance ≥ 100 - Addresses with Balance ≥ 10k)
    m1 = get_indicators(indic_name='Supply Held by Addresses with Balance 100 - 1k', asset=asset, start_date=start_date, end_date=end_date)['Supply Held by Addresses with Balance 100 - 1k']
    m2 = get_indicators(indic_name='Supply Held by Addresses with Balance 1k - 10k', asset=asset, start_date=start_date, end_date=end_date)['Supply Held by Addresses with Balance 1k - 10k']
    m3 = get_indicators(indic_name='Addresses with Balance ≥ 100', asset=asset, start_date=start_date, end_date=end_date)['Addresses with Balance ≥ 100']
    m4 = get_indicators(indic_name='Addresses with Balance ≥ 10k', asset=asset, start_date=start_date, end_date=end_date)['Addresses with Balance ≥ 10k']
    supply_per_whale = (m1 + m2) / (m3 - m4)
    supply_per_whale.name = 'Supply per Whale (100 - 10k BTC)'
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'Supply per Whale (100 - 10k BTC)')
    supply_per_whale.to_csv(f'{file_name}.csv')

    # 计算Top Cap Model
    # (cumsum(Market Cap,"2009-01-01")/cumsum(1,"2009-01-01"))*35
    market_cap = get_indicators(indic_name='Market Cap', asset=asset, start_date='2009-01-01', end_date=end_date)['Market Cap']
    cum_makert_cap = market_cap.cumsum()
    days = cum_makert_cap.copy()
    days[:] = 1
    cum_days = days.cumsum()
    top_cap_model = (cum_makert_cap / cum_days) * 35
    top_cap_model = top_cap_model.loc['2015-01-01':]
    top_cap_model.name = 'Top Cap Model'
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'Top Cap Model')
    top_cap_model.to_csv(f'{file_name}.csv')

    # 计算RVT Ratio, RVT Ratio (Entity-Adjusted)
    # sma(Realized Cap/Transfer Volume (Total), 28)
    # The RVT Ratio is calculated as the ratio between the Realised Cap (USD) and the on-chain transaction value (USD), with a 28-day average applied.
    realized_cap = get_indicators(indic_name='Realized Cap', asset=asset, start_date='2014-11-30', end_date=end_date)['Realized Cap']
    transfer_volume_total = get_indicators(indic_name='Transfer Volume (Total)', asset=asset, start_date='2014-11-30', end_date=end_date, currency='USD')['Transfer Volume (Total)']
    entity_adj_volume_total = get_indicators(indic_name='Entity-Adjusted Volume (Total)', asset=asset, start_date='2014-11-30', end_date=end_date, currency='USD')['Entity-Adjusted Volume (Total)']
    RVT_Ratio = realized_cap / transfer_volume_total
    RVT_Ratio = RVT_Ratio.rolling(window=28, min_periods=28).mean()
    RVT_Ratio = RVT_Ratio.loc['2015-01-01':]
    RVT_Ratio.name = 'RVT Ratio'
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'RVT Ratio')
    RVT_Ratio.to_csv(f'{file_name}.csv')
    entity_adj_RVT_Ratio = realized_cap / entity_adj_volume_total
    entity_adj_RVT_Ratio = entity_adj_RVT_Ratio.rolling(window=28, min_periods=28).mean()
    entity_adj_RVT_Ratio = entity_adj_RVT_Ratio.loc['2015-01-01':]
    entity_adj_RVT_Ratio.name = 'RVT Ratio (Entity-Adjusted)'
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'RVT Ratio (Entity-Adjusted)')
    entity_adj_RVT_Ratio.to_csv(f'{file_name}.csv')

    # 计算Realized Price-to-Liveliness Ratio
    # The Realized Price-to-Liveliness Ratio (RPLR) is a metric which compares the spending / HODLing behavior of long-term investors (Liveliness) with the ‘fair value’ of bitcoin (Realized Price).
    realized_price = get_indicators(indic_name='Realized Price', asset=asset, start_date=start_date, end_date=end_date)['Realized Price']
    liveliness = get_indicators(indic_name='Liveliness', asset=asset, start_date=start_date, end_date=end_date)['Liveliness']
    realized_price_liveliness_ratio = realized_price / liveliness
    realized_price_liveliness_ratio.name = 'Realized Price-to-Liveliness Ratio'
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'Realized Price-to-Liveliness Ratio')
    realized_price_liveliness_ratio.to_csv(f'{file_name}.csv')

    # 计算28-day Market Realised Gradient； 140-day Market Realised Gradient
    # diff(m1, 28) * median(((diff(m1, 28) - diff(m2, 28) - cummean(diff(m1, 28) - diff(m2, 28))) / cumstd(diff(m1, 28) - diff(m2, 28))) / (diff(m1, 28) - diff(m2, 28)), 3)
    end_date1 = date_shifter(end_date, step='days', how_many=-1)
    prices = get_prices(ohlc=False, asset=asset, start_date='2010-01-01', end_date=end_date1)['close']
    realized_price = get_indicators(indic_name='Realized Price', asset=asset, start_date='2010-01-01', end_date=end_date)['Realized Price']
    prices_diff28 = prices.diff(28).loc['2010-08-14':]
    realized_price_diff28 = realized_price.diff(28).loc['2010-08-14':]
    delta_diff28 = prices_diff28 - realized_price_diff28
    delta_diff28_cummean = delta_diff28.expanding().mean()
    delta_diff28_cumstd = delta_diff28.expanding().std()
    delta_gradient_day28 = (delta_diff28 - delta_diff28_cummean) / delta_diff28_cumstd
    delta_gradient_day28 = delta_gradient_day28.loc['2015-01-01':]
    delta_gradient_day28.name = '28-day Market Realised Gradient'
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'28-day Market Realised Gradient')
    delta_gradient_day28.to_csv(f'{file_name}.csv')

    prices_diff140 = prices.diff(140).loc['2010-12-04':]
    realized_price_diff140 = realized_price.diff(140).loc['2010-12-04':]
    delta_diff140 = prices_diff140 - realized_price_diff140
    delta_diff140_cummean = delta_diff140.expanding().mean()
    delta_diff140_cumstd = delta_diff140.expanding().std()
    delta_gradient_day140 = (delta_diff140 - delta_diff140_cummean) / delta_diff140_cumstd
    delta_gradient_day140 = delta_gradient_day140.loc['2015-01-01':]
    delta_gradient_day140.name = '140-day Market Realised Gradient'
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'140-day Market Realised Gradient')
    delta_gradient_day140.to_csv(f'{file_name}.csv')

    # 计算Value Days Destroyed Multiple
    # (MA30(CDD * Price) / MA365(CDD * Price)) * (Supply / 21e6)
    end_date1 = date_shifter(end_date, step='days', how_many=-1)
    prices = get_prices(ohlc=False, asset=asset, start_date='2010-01-01', end_date=end_date1)['close']
    cdd = get_indicators(indic_name='Coin Days Destroyed (CDD)', asset=asset, start_date='2010-01-01', end_date=end_date)['Coin Days Destroyed (CDD)']
    c_supply = get_indicators(indic_name='Circulating Supply', asset=asset, start_date='2010-01-01', end_date=end_date)['Circulating Supply']
    value_day_destroyed = prices * cdd
    value_day_destroyed_ma30 = value_day_destroyed.rolling(window=30, min_periods=30).mean()
    value_day_destroyed_ma365 = value_day_destroyed.rolling(window=365, min_periods=365).mean()
    value_day_destroyed_multiple = (value_day_destroyed_ma30 / value_day_destroyed_ma365) * (c_supply / 21000000)
    value_day_destroyed_multiple = value_day_destroyed_multiple.loc['2015-01-01':]
    value_day_destroyed_multiple.name = 'Value Days Destroyed Multiple'
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'Value Days Destroyed Multiple')
    value_day_destroyed_multiple.to_csv(f'{file_name}.csv')

    # 计算Difficulty Ribbon MA9-MA200 ratio
    difficulty_ribbon = get_indicators(indic_name='Difficulty Ribbon', asset=asset, start_date='2015-01-01', end_date=end_date)
    difficulty_ribbon['Difficulty Ribbon MA9-MA200 ratio'] = difficulty_ribbon['ma9'] / difficulty_ribbon['ma200']
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'Difficulty Ribbon MA9-MA200 ratio')
    difficulty_ribbon['Difficulty Ribbon MA9-MA200 ratio'].to_csv(f'{file_name}.csv')

    # CDD_Decompose
    metrics = {'Spent Volume less 1h': 'https://api.glassnode.com/v1/metrics/indicators/svl_1h',
               'Spent Volume 1h-24h': 'https://api.glassnode.com/v1/metrics/indicators/svl_1h_24h',
               'Spent Volume 1d-1w': 'https://api.glassnode.com/v1/metrics/indicators/svl_1d_1w',
               'Spent Volume 1w-1m': 'https://api.glassnode.com/v1/metrics/indicators/svl_1w_1m',
               'Spent Volume 1m-3m': 'https://api.glassnode.com/v1/metrics/indicators/svl_1m_3m',
               'Spent Volume 3m-6m': 'https://api.glassnode.com/v1/metrics/indicators/svl_3m_6m',
               'Spent Volume 6m-12m': 'https://api.glassnode.com/v1/metrics/indicators/svl_6m_12m',
               'Spent Volume 1y-2y': 'https://api.glassnode.com/v1/metrics/indicators/svl_1y_2y',
               'Spent Volume 2y-3y': 'https://api.glassnode.com/v1/metrics/indicators/svl_2y_3y',
               'Spent Volume 3y-5y': 'https://api.glassnode.com/v1/metrics/indicators/svl_3y_5y',
               'Spent Volume 5y-7y': 'https://api.glassnode.com/v1/metrics/indicators/svl_5y_7y',
               'Spent Volume 7y-10y': 'https://api.glassnode.com/v1/metrics/indicators/svl_7y_10y',
               'Spent Volume more 10y': 'https://api.glassnode.com/v1/metrics/indicators/svl_more_10y', }
    alldf_list = []
    for i in metrics.keys():
        df = get_indicators(indic_name=i, asset='BTC', start_date='2010-01-01', end_date=end_date)
        alldf_list.append(df)
    alldf = pd.concat(alldf_list, axis=1)
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'Spent Volume Data')
    alldf.to_csv(f'{file_name}.csv')

    alldf_weighted = alldf * [0.5 / 24, 12 / 24, 3.5, 19, 60, 3.5 * 30, 9 * 30, 1.5 * 365, 2.5 * 365, 4 * 365, 6 * 365, 8.5 * 365, 11 * 365]
    alldf_weighted['DIY-CDD'] = alldf_weighted.sum(axis=1)

    cdd_df = get_indicators(indic_name='Coin Days Destroyed (CDD)', asset='BTC', start_date='2010-01-01', end_date=end_date)
    alldf_weighted = pd.concat([alldf_weighted, cdd_df], axis=1)
    for i in metrics.keys():
        alldf_weighted[i] = alldf_weighted[i] / alldf_weighted['DIY-CDD']
        alldf_weighted[i] = alldf_weighted[i].rolling(90).mean()
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'DIY CDD_Decompose')
    alldf_weighted.to_csv(f'{file_name}.csv')

    # Binary CDD 变形
    supply_cdd_df = get_indicators(indic_name='Supply-Adjusted CDD', asset=asset, start_date='2011-01-01', end_date=end_date)
    supply_cdd_df['supplycdd_ma30'] = supply_cdd_df['Supply-Adjusted CDD'].rolling(30, min_periods=30).mean()
    supply_cdd_df['supplycdd_ma90'] = supply_cdd_df['Supply-Adjusted CDD'].rolling(90, min_periods=90).mean()
    supply_cdd_df['supplycdd_ma120'] = supply_cdd_df['Supply-Adjusted CDD'].rolling(120, min_periods=120).mean()
    supply_cdd_df['supplycdd_ma155'] = supply_cdd_df['Supply-Adjusted CDD'].rolling(155, min_periods=155).mean()
    supply_cdd_df['binarycdd_ma30'] = np.where(supply_cdd_df['Supply-Adjusted CDD'] >= supply_cdd_df['supplycdd_ma30'], 1, 0)
    supply_cdd_df['binarycdd_ma90'] = np.where(supply_cdd_df['Supply-Adjusted CDD'] >= supply_cdd_df['supplycdd_ma90'], 1, 0)
    supply_cdd_df['binarycdd_ma120'] = np.where(supply_cdd_df['Supply-Adjusted CDD'] >= supply_cdd_df['supplycdd_ma120'], 1, 0)
    supply_cdd_df['binarycdd_ma155'] = np.where(supply_cdd_df['Supply-Adjusted CDD'] >= supply_cdd_df['supplycdd_ma155'], 1, 0)

    supply_cdd_df['binarycdd_ma30_ma7'] = supply_cdd_df['binarycdd_ma30'].rolling(7).mean()
    supply_cdd_df['binarycdd_ma30_ma30'] = supply_cdd_df['binarycdd_ma30'].rolling(30).mean()
    supply_cdd_df['binarycdd_ma30_ma90'] = supply_cdd_df['binarycdd_ma30'].rolling(90).mean()
    supply_cdd_df['binarycdd_ma90_ma7'] = supply_cdd_df['binarycdd_ma90'].rolling(7).mean()
    supply_cdd_df['binarycdd_ma90_ma30'] = supply_cdd_df['binarycdd_ma90'].rolling(30).mean()
    supply_cdd_df['binarycdd_ma90_ma90'] = supply_cdd_df['binarycdd_ma90'].rolling(90).mean()
    supply_cdd_df['binarycdd_ma120_ma7'] = supply_cdd_df['binarycdd_ma120'].rolling(7).mean()
    supply_cdd_df['binarycdd_ma120_ma30'] = supply_cdd_df['binarycdd_ma120'].rolling(30).mean()
    supply_cdd_df['binarycdd_ma120_ma90'] = supply_cdd_df['binarycdd_ma120'].rolling(90).mean()
    supply_cdd_df['binarycdd_ma155_ma7'] = supply_cdd_df['binarycdd_ma155'].rolling(7).mean()
    supply_cdd_df['binarycdd_ma155_ma30'] = supply_cdd_df['binarycdd_ma155'].rolling(30).mean()
    supply_cdd_df['binarycdd_ma155_ma90'] = supply_cdd_df['binarycdd_ma155'].rolling(90).mean()
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'binary CDD')
    supply_cdd_df.to_csv(f'{file_name}.csv')

    # SF模型检验
    total_supply = get_indicators(indic_name='Circulating Supply', asset=asset, start_date='2009-01-03', end_date=end_date)
    total_supply.rename(columns={'Circulating Supply': 'total_supply'}, inplace=True)
    total_supply['daily_mined'] = total_supply['total_supply'].diff()
    total_supply['roling_yearly_mined'] = total_supply['daily_mined'].rolling(365).sum()
    total_supply['sf_ratio'] = total_supply['total_supply'] / total_supply['roling_yearly_mined']
    prices = get_prices(ohlc=False, asset=asset, start_date='2010-01-01', end_date=end_date)
    all_df = pd.concat([total_supply, prices], axis=1)
    all_df['total_marketcap'] = all_df['total_supply'] * all_df['close']
    all_df['log_total_marketcap'] = np.log10(all_df['total_marketcap'])
    all_df['log_sf_ratio'] = np.log10(all_df['sf_ratio'])
    all_df.to_csv(f'{file_name}.csv')

    # 分析稳定币分布情况，以及和行情之间的关系
    usdt_supply = get_indicators(indic_name='Circulating Supply', asset='USDT', start_date='2009-01-03', end_date=end_date)['Circulating Supply']
    usdt_supply_in_exchange = get_indicators(indic_name='Exchange Balance (Total)', asset='USDT', start_date='2009-01-03', end_date=end_date)['Exchange Balance (Total)']
    usdt_percent_supply_in_contract = get_indicators(indic_name='Supply in Smart Contracts', asset='USDT', start_date='2009-01-03', end_date=end_date)['Supply in Smart Contracts']
    usdt_supply_in_contract = usdt_supply.multiply(usdt_percent_supply_in_contract)
    usdt_supply_not_in_contract = usdt_supply.multiply(1 - usdt_percent_supply_in_contract)

    usdc_supply = get_indicators(indic_name='Circulating Supply', asset='USDC', start_date='2009-01-03', end_date=end_date)['Circulating Supply']
    usdc_supply_in_exchange = get_indicators(indic_name='Exchange Balance (Total)', asset='USDC', start_date='2009-01-03', end_date=end_date)['Exchange Balance (Total)']
    usdc_percent_supply_in_contract = get_indicators(indic_name='Supply in Smart Contracts', asset='USDC', start_date='2009-01-03', end_date=end_date)['Supply in Smart Contracts']
    usdc_supply_in_contract = usdc_supply.multiply(usdc_percent_supply_in_contract)
    usdc_supply_not_in_contract = usdc_supply.multiply(1 - usdc_percent_supply_in_contract)

    busd_supply = get_indicators(indic_name='Circulating Supply', asset='BUSD', start_date='2009-01-03', end_date=end_date)['Circulating Supply']
    busd_supply_in_exchange = get_indicators(indic_name='Exchange Balance (Total)', asset='BUSD', start_date='2009-01-03', end_date=end_date)['Exchange Balance (Total)']
    busd_percent_supply_in_contract = get_indicators(indic_name='Supply in Smart Contracts', asset='BUSD', start_date='2009-01-03', end_date=end_date)['Supply in Smart Contracts']
    busd_supply_in_contract = busd_supply.multiply(busd_percent_supply_in_contract)
    busd_supply_not_in_contract = busd_supply.multiply(1 - busd_percent_supply_in_contract)

    all_stablecoin = pd.concat([usdt_supply, usdc_supply, busd_supply], axis=1).sum(axis=1)
    stablecoin_in_exchange = pd.concat([usdt_supply_in_exchange, usdc_supply_in_exchange, busd_supply_in_exchange], axis=1).sum(axis=1)
    stablecoin_in_contract = pd.concat([usdt_supply_in_contract, usdc_supply_in_contract, busd_supply_in_contract], axis=1).sum(axis=1)
    stablecoin_not_in_contract = pd.concat([usdt_supply_not_in_contract, usdc_supply_not_in_contract, busd_supply_not_in_contract], axis=1).sum(axis=1)

    usdt_df = pd.concat([usdt_supply, usdt_supply_in_exchange, usdt_supply_in_contract, usdt_supply_not_in_contract], axis=1)
    usdt_df.columns = ['usdt_supply', 'usdt_supply_in_exchange', 'usdt_supply_in_contract', 'usdt_supply_not_in_contract']
    usdc_df = pd.concat([usdc_supply, usdc_supply_in_exchange, usdc_supply_in_contract, usdc_supply_not_in_contract], axis=1)
    usdc_df.columns = ['usdc_supply', 'usdc_supply_in_exchange', 'usdc_supply_in_contract', 'usdc_supply_not_in_contract']
    busd_df = pd.concat([busd_supply, busd_supply_in_exchange, busd_supply_in_contract, busd_supply_not_in_contract], axis=1)
    busd_df.columns = ['busd_supply', 'busd_supply_in_exchange', 'busd_supply_in_contract', 'busd_supply_not_in_contract']
    stablecoin_df = pd.concat([all_stablecoin, stablecoin_in_exchange, stablecoin_in_contract, stablecoin_not_in_contract], axis=1)
    stablecoin_df.columns = ['all_stablecoin', 'stablecoin_in_exchange', 'stablecoin_in_contract', 'stablecoin_not_in_contract']

    all_df = pd.concat([usdt_df, usdc_df, busd_df, stablecoin_df], axis=1)
    btc_df = pd.concat([market_cap, prices, np.log10(prices)], axis=1)
    btc_df.columns = ['btc_marketcap', 'btc_price', 'btc_log_price']
    stablecoin_analysis_df = pd.concat([all_df, btc_df], axis=1)
    stablecoin_analysis_df.sort_index(inplace=True)
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'DIY Stablecoin Analysis')
    stablecoin_analysis_df.to_csv(f'{file_name}.csv')

    btc_exchange_balance = get_indicators(indic_name='Exchange Balance (Stacked)', asset='BTC', start_date='2009-01-03', end_date=end_date)
    btc_exchange_balance = btc_exchange_balance.mul(prices, axis=0)  # 交易所余额转换为USD计价
    usdt_exchange_balance = get_indicators(indic_name='Exchange Balance (Stacked)', asset='USDT', start_date='2009-01-03', end_date=end_date)
    usdc_exchange_balance = get_indicators(indic_name='Exchange Balance (Stacked)', asset='USDC', start_date='2009-01-03', end_date=end_date)
    busd_exchange_balance = get_indicators(indic_name='Exchange Balance (Stacked)', asset='BUSD', start_date='2009-01-03', end_date=end_date)
    stablecoin_exchange_balance = usdt_exchange_balance.fillna(0) + usdc_exchange_balance.reindex(usdt_exchange_balance.index).fillna(0) + busd_exchange_balance.reindex(usdt_exchange_balance.index).fillna(0)
    exchange_SSR_df = btc_exchange_balance / stablecoin_exchange_balance
    exchange_SSR_df.dropna(how='all', axis=1, inplace=True)
    exchange_SSR_df.columns = [i + '_ssr' for i in exchange_SSR_df.columns]
    exchange_SSR_ratio_df = (exchange_SSR_df - exchange_SSR_df.rolling(200).mean()) / exchange_SSR_df.rolling(200).std()
    exchange_SSR_ratio_df.columns = [i + '_ratio' for i in exchange_SSR_ratio_df.columns]
    exchange_SSR_info_df = pd.concat([exchange_SSR_df, exchange_SSR_ratio_df, btc_df], axis=1)
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'DIY Exchange SSR')
    exchange_SSR_info_df.to_csv(f'{file_name}.csv')

    SSR_marketcap_marketcap = market_cap / stablecoin_df['all_stablecoin']
    SSR_marketcap_exchange = market_cap / stablecoin_df['stablecoin_in_exchange']
    SSR_marketcap_noncontrtact = market_cap / stablecoin_df['stablecoin_not_in_contract']
    SSR_exchange_marketcap = btc_exchange_balance.sum(axis=1) / stablecoin_df['all_stablecoin']
    SSR_exchange_exchange = btc_exchange_balance.sum(axis=1) / stablecoin_df['stablecoin_in_exchange']
    SSR_exchange_noncontrtact = btc_exchange_balance.sum(axis=1) / stablecoin_df['stablecoin_not_in_contract']
    SSR_df = pd.concat([SSR_marketcap_marketcap, SSR_marketcap_exchange, SSR_marketcap_noncontrtact, SSR_exchange_marketcap, SSR_exchange_exchange, SSR_exchange_noncontrtact], axis=1)
    SSR_df.columns = ['SSR_marketcap_marketcap', 'SSR_marketcap_exchange', 'SSR_marketcap_noncontrtact', 'SSR_exchange_marketcap', 'SSR_exchange_exchange', 'SSR_exchange_noncontrtact']
    SSR_ratio_df = (SSR_df - SSR_df.rolling(200).mean()) / SSR_df.rolling(200).std()
    SSR_ratio_df.columns = [i.replace('SSR', 'SSR_ratio') for i in SSR_ratio_df.columns]
    SSR_info_df = pd.concat([SSR_df, SSR_ratio_df, btc_df], axis=1)
    file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'DIY SSR')
    SSR_info_df.to_csv(f'{file_name}.csv')


def btc_valuation_percentile_analysis():
    asset = 'BTC'
    end_date = get_today(marker='with_n_dash')
    start_date = '2013-01-01'

    # 获取已实现盈亏指标
    entity_sopr = get_indicators(indic_name='Entity-Adjusted SOPR', asset=asset, start_date=start_date, end_date=end_date)['Entity-Adjusted SOPR']
    asopr = get_indicators(indic_name='aSOPR', asset=asset, start_date=start_date, end_date=end_date)['aSOPR']

    # 获取未实现盈亏指标
    entity_mvrv = get_indicators(indic_name='Entity-Adjusted MVRV', asset=asset, start_date=start_date, end_date=end_date)['Entity-Adjusted MVRV']
    sth_mvrv = get_indicators(indic_name='STH-MVRV', asset=asset, start_date=start_date, end_date=end_date)['STH-MVRV']

    # 获取浮盈占市值占比
    relative_unrealized_profit = get_indicators(indic_name='Relative Unrealized Profit', asset=asset, start_date=start_date, end_date=end_date)['Relative Unrealized Profit']
    entity_relative_unrealized_profit = get_indicators(indic_name='Entity-Adjusted Unrealized Profit', asset=asset, start_date=start_date, end_date=end_date)['Entity-Adjusted Unrealized Profit']

    # 获取浮盈币占比
    percent_supply_in_profit = get_indicators(indic_name='Percent Supply in Profit', asset=asset, start_date=start_date, end_date=end_date)['Percent Supply in Profit']
    supply_in_profit = get_indicators(indic_name='Supply in Profit', asset=asset, start_date=start_date, end_date=end_date)['Supply in Profit']
    supply_last_active_7y = get_indicators(indic_name='Supply Last Active 7y-10y', asset=asset, start_date=start_date, end_date=end_date)['Supply Last Active 7y-10y']
    supply_last_active_10y = get_indicators(indic_name='Supply Last Active more 10y', asset=asset, start_date=start_date, end_date=end_date)['Supply Last Active more 10y']
    adjusted_supply = get_indicators(indic_name='Adjusted Supply', asset=asset, start_date=start_date, end_date=end_date)['Adjusted Supply']
    adjusted_percent_supply_in_profit = (supply_in_profit - supply_last_active_7y - supply_last_active_10y) / adjusted_supply
    adjusted_percent_supply_in_profit.name = 'Adjusted Percent Supply in Profit'

    # 获取osc120
    prices = get_prices(ohlc=False, asset=asset, start_date=start_date, end_date=end_date)['close']
    prices_ma120 = prices.rolling(120).mean()
    prices_osc120 = (prices - prices_ma120) / prices_ma120
    prices_osc120.name = 'OSC120'

    # 计算log_price
    log_price = np.log10(prices)
    log_price.name = 'log_price'

    all_data_list = []
    for data_series in [entity_sopr, asopr, entity_mvrv, sth_mvrv, relative_unrealized_profit, entity_relative_unrealized_profit, percent_supply_in_profit, adjusted_percent_supply_in_profit, prices_osc120]:
        all_data_list.append(data_series)
        for ma_period in [7, 14, 30]:
            temp_data_series = data_series.rolling(ma_period).mean()
            temp_data_series.name = data_series.name + f' ma{ma_period}'
            all_data_list.append(temp_data_series)

    all_df = pd.concat(all_data_list, axis=1)

    for analysis_start_date in ['2015-01-01', '2017-01-01']:
        analysis_df = all_df[analysis_start_date:]
        temp_log_price = log_price[analysis_start_date:]

        # 计算滚动历史百分位
        expanding_percentile_df = analysis_df.apply(ts_percentile_rank_score, way='expanding', scale=100, min_periods=100)
        file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'valuation_expanding_percentile_analysis_start_{analysis_start_date}')
        expanding_percentile_df = pd.concat([temp_log_price, expanding_percentile_df], axis=1)
        expanding_percentile_df.to_csv(f'{file_name}.csv')

        # 计算全历史百分位
        whole_percentile_df = analysis_df.apply(ts_percentile_rank_score, way='whole', scale=100, min_periods=100)
        file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'valuation_whole_percentile_analysis_start_{analysis_start_date}')
        whole_percentile_df = pd.concat([temp_log_price, whole_percentile_df], axis=1)
        whole_percentile_df.to_csv(f'{file_name}.csv')

        # 统计分位点数值
        describe_percentile_df = analysis_df.describe(percentiles=[0.01, 0.05, 0.1, 0.2, 0.4, 0.6, 0.8, 0.9, 0.95, 0.99])
        file_name = os.path.join(GLASSNODE_ANALYSIS_DATA_DIR, f'valuation_describe_percentile_analysis_start_{analysis_start_date}')
        describe_percentile_df.to_csv(f'{file_name}.csv')


if __name__ == '__main__':
    btc_valuation_percentile_analysis()
    