"""
This script contains methods for calculating factor indicators.
"""
import pandas as pd
import numpy as np
from typing import Union
from functools import reduce

from lwpackage.lwstats import row_count_at_percentage

EPSILON = 1e-6


class FactorIndicator:
    """
    Calculating factor indicators including sharpe, ic, turnover, etc.
    """
    def __init__(self,
                 data: pd.DataFrame,
                 cost: Union[int, None] = 0.00025,
                 fc_freq: str = '1d',
                 performance: bool = False,
                 quantile: float = 0.2,
                 method: str = 'longshort',
                 ir: bool = False,
                 longshort_instrument_number: int = 5):
        """
        Calculating factor indicators including sharpe, ic, turnover, etc.

        :param data: dataframe containing factor value column
        :param cost: the cost of trade, if None, will not calculate any net values
        :param fc_freq: the frequency of factor, 1m, 5m or 1d
        :param performance: whether to calculate the performance
        :param quantile: the quantile for constructing long-short portfolio. default is to 0.2, meaning we
            sort underlying from max to min according to the factor value on every time cross-section
            And we use top 20% underlying to construct long position; bottom 20% underlying for
            short position. Also, we construct a capital-equalized portfolio.
        :param method: the method for calculating sharpe, can be 'longshort' or 'long_only' or 'zscore',
            default is 'longshort'
        :param ir: if ir, will calculate icir
        :param longshort_instrument_number: number of long and short lines for the instrument with large value and small
            value
        """
        self.data = data
        self.cost = cost
        self.fc_freq = fc_freq
        self.quantile = quantile
        self.method = method
        self.ir = ir
        self.longshort_instrument_number = longshort_instrument_number

        self._basic_format_check()

        assert self.method in ['longshort', 'long_only', 'zscore'], \
            f'Only supports longshort or long-only or zscore method for calculating sharpe, etc.'

        if self.cost:
            self.ret_cols = ['gross_ret', 'net_ret',
                             'gross_ret_ts', 'net_ret_ts']
        else:
            self.ret_cols = ['gross_ret', 'gross_ret_ts']

        self.get_ret()

        # deal with the nan values of signal, this is important for the accuracy of calculation.
        self.data.dropna(axis=0, how='any', inplace=True)

        # indicators
        self.ic = None
        self.icir = None
        self.ret_daily = None
        self.ret_daily_ts = None  # time series daily return, for the calculation of time series sharpe, etc
        self.ret_summary = None
        self.longshort_ret = None
        self.turnover = None
        self.daily_average_turnover = None
        self.sharpe = None
        self.annualized_return = None
        self.annualized_volatility = None
        self.win_rate = None
        self.dd = None
        self.max_dd = None

        # time range of the backtesting period.
        self.time_range = None

        # performance summary
        self.performance = None
        self.performance_all = None
        if performance:
            self.get_performance()

        self.is_ret_calculated = False

    def _basic_format_check(self):
        for col in ['signal', 'ret']:
            assert col in self.data.columns, f'df does not contain column {col}.'
        assert 'time' == self.data.index.names[0], f'{self.data.index.names[0]}'
        assert 'instrument_id' == self.data.index.names[1], f'{self.data.index.names[1]}'

    def get_ic(self):
        """
        Get ic and icir.
        """
        num_ic = self.data.groupby('time').corr('pearson').loc[(slice(None), 'signal'), ['ret']].droplevel(1)
        rank_ic = self.data.groupby('time').corr('spearman').loc[(slice(None), 'signal'), ['ret']].droplevel(1)

        ic_all = pd.concat([num_ic, rank_ic], axis=1)
        ic_all.columns = ['IC', 'Rank IC']
        ic_year = ic_all.copy()
        ic_year.reset_index(inplace=True)
        ic_year['year'] = pd.to_datetime(ic_year['time']).dt.year
        ic_year = ic_year.groupby('year')[['IC', 'Rank IC']].mean()

        ic_all = ic_all.mean().to_frame('all').T
        ic = pd.concat([ic_year, ic_all])

        self.ic = ic

        if self.ir:
            ic_ir = pd.concat([num_ic, rank_ic], axis=1)
            ic_ir.columns = ['icir', 'rank_icir']
            ic_ir_year = ic_ir.copy()
            ic_ir_year.reset_index(inplace=True)
            ic_ir_year['year'] = pd.to_datetime(ic_ir_year['time']).dt.year
            ic_ir_year = ic_ir_year.groupby('year')[['icir', 'rank_icir']].apply(lambda x: x.mean() / x.std())

            ic_ir_all = (ic_ir.mean() / ic_ir.std()).to_frame('all').T
            ic_ir = pd.concat([ic_ir_year, ic_ir_all])

            self.icir = ic_ir

    def get_ret(self):
        """
        Get daily return. For 1d freq data, calculate directly; for 1m and 5m freq data,
        we calculate the daily ret by simple interest rate.
        """
        # attention: if long only, the risk-free rate will have no effect on the calculation of sharpe.
        if self.method == 'longshort':
            ret_long = self.data.groupby('time').apply(
                lambda x: x.nlargest(row_count_at_percentage(x, self.quantile), columns='signal')['ret'].mean())
            ret_short = self.data.groupby('time').apply(
                lambda x: x.nsmallest(row_count_at_percentage(x, self.quantile), columns='signal')['ret'].mean())
            ret_total = ((ret_long - ret_short) / 2).to_frame('gross_ret')

        elif self.method == 'long_only':
            ret_long = self.data.groupby('time').apply(
                lambda x: x.nlargest(row_count_at_percentage(x, self.quantile), columns='signal')['ret'].mean())
            ret_total = ret_long.to_frame('gross_ret')

        elif self.method == 'zscore':

            assert self.data['signal'].groupby('time').sum().max() < EPSILON
            ret_total = self.data.groupby('time').apply(lambda x: (x.signal * x.ret).sum()).to_frame('gross_ret')
            # Too high or too low value for bar ret is not reasonable, and will cause overflow of the calculation
            # of sharpe.
            # todo: calculation result is wield here.
            threshold_upper = ret_total['gross_ret'].mean() + 3 * ret_total['gross_ret'].std()
            threshold_lower = ret_total['gross_ret'].mean() - 3 * ret_total['gross_ret'].std()
            ret_total.loc[ret_total['gross_ret'] > threshold_upper, 'gross_ret'] = threshold_upper
            ret_total.loc[ret_total['gross_ret'] < threshold_lower, 'gross_ret'] = threshold_lower

        else:
            raise NotImplementedError(f'Do not support method {self.method}.')

        if self.cost:
            assert self.quantile
            self.get_turnover()
            if self.method == 'longshort':
                col = 'longshort'
            elif self.method == 'long_only':
                col = 'long'
            else:
                col = 'turnover'
            ret_total['net_ret'] = ret_total['gross_ret'] - self.turnover[col] * self.cost

        # for 1m and 5m freq data, we calculate the daily ret by simple interest. This is because the calculation
        # of compound interest within a day tends to enlarge the volatility.
        ret_total.dropna(axis=0, how='any', inplace=True)
        if self.fc_freq != '1d':
            # the problem here is, the ret sumed by date can be very large, like 0.4, causing the average daily
            # ret also very large, like 0.4, then the annualized return will be 0.4 * 252 = 105, also very large.
            ret_total.reset_index(inplace=True)
            ret_total['date'] = pd.to_datetime(ret_total['time']).dt.date
            # ret_total = ret_total.groupby('date')[self.ret_cols].apply(lambda x: (x + 1).prod())
            # sum means simple interest
            ret_total = ret_total.groupby('date')[self.ret_cols].sum()
            ret_total = ret_total.reset_index().rename(columns={'date': 'time'}).set_index('time')

        # time series gross ret and net ret
        # time series way of calculating ret and turnover is simpler
        gross_ret_ts = (self.data['signal'] * self.data['ret']).groupby('time').mean()
        ret_total['gross_ret_ts'] = ret_total.index.map(gross_ret_ts)
        if self.cost:
            turnover_ts = self.data.groupby('instrument_id')['ret'].diff().abs() * self.cost
            turnover_ts = turnover_ts.groupby('time').mean()
            ret_total['turnover_ts'] = ret_total.index.map(turnover_ts)
            ret_total['net_ret_ts'] =  ret_total['gross_ret_ts'] - ret_total['turnover_ts']
            ret_total.drop(columns=['turnover_ts'], inplace=True)

        # longshort ret, i.e. the ret for n-largest and n-smallest signal values.
        data_long = self.data.groupby('time').apply(
            lambda x: x.nlargest(self.longshort_instrument_number, columns='signal')).droplevel([1, 2])
        data_short = self.data.groupby('time').apply(
            lambda x: x.nsmallest(self.longshort_instrument_number, columns='signal')).droplevel([1, 2])

        df_long_ret = pd.DataFrame(index=ret_total.index)
        for i in range(self.longshort_instrument_number):
            mapper = data_long.groupby('time').nth(i)['ret']
            df_long_ret[f'ret_long_{self.longshort_instrument_number - i}'] = \
                df_long_ret.index.map(mapper)
            df_long_ret[f'ret_long_{self.longshort_instrument_number - i}'] = \
                df_long_ret[f'ret_long_{self.longshort_instrument_number - i}'].fillna(method='bfill')

        df_short_ret = pd.DataFrame(index=ret_total.index)
        for i in range(self.longshort_instrument_number):
            mapper = data_short.groupby('time').nth(i)['ret']
            df_short_ret[f'ret_short_{self.longshort_instrument_number - i}'] = \
                df_short_ret.index.map(mapper)
            df_short_ret[f'ret_short_{self.longshort_instrument_number - i}'] = \
                df_short_ret[f'ret_short_{self.longshort_instrument_number - i}'].fillna(method='bfill')

        df_longshort_ret = pd.concat([df_short_ret, df_long_ret.iloc[:, ::-1]], axis=1)
        self.longshort_ret = df_longshort_ret
        self.ret_daily = ret_total
        self.is_ret_calculated = True

    def get_turnover(self):
        """
        Get turnover per bar and daily turnover.
        """
        if self.method == 'long_only':
            target_long = self.data.groupby('time', as_index=False).apply(
                lambda x: x.nlargest(row_count_at_percentage(x, self.quantile), columns='signal')).droplevel(0)
            target_long['weight'] = 1 / target_long.index.get_level_values('time').map(
                target_long.groupby('time').size())
            assert (target_long.groupby('time')['weight'].sum() - 1).abs().sum() < EPSILON

            target_long = target_long['weight'].unstack().fillna(0)
            turnover_long = target_long.diff().abs().sum(axis=1)

            turnover = pd.DataFrame(data={'long': turnover_long})
            daily_average_turnover = self._get_daily_turnover(turnover, ['long'])
            daily_average_turnover.columns = ['Long Daily Turnover']
        elif self.method == 'longshort':
            # target long
            target_long = self.data.groupby('time', as_index=False).apply(
                lambda x: x.nlargest(row_count_at_percentage(x, self.quantile), columns='signal')).droplevel(0)
            target_long['weight'] = 1 / target_long.index.get_level_values('time').map(
                target_long.groupby('time').size())
            assert (target_long.groupby('time')['weight'].sum() - 1).abs().sum() < EPSILON

            # target short
            target_short = self.data.groupby('time', as_index=False).apply(
                lambda x: x.nsmallest(row_count_at_percentage(x, self.quantile), columns='signal')).droplevel(0)
            target_short['weight'] = -1 / target_short.index.get_level_values('time').map(
                target_short.groupby('time').size())
            assert (target_short.groupby('time')['weight'].sum().abs() - 1).sum() < EPSILON

            # target longshort
            target_long_short = (
                pd.concat([target_long['weight'], target_short['weight']])
                .reset_index()
                .drop_duplicates(subset=['time', 'instrument_id'], keep=False)
                .set_index(['time', 'instrument_id'])
                .unstack(level=-1)
                .fillna(0)
            )
            assert all(target_long_short.sum(axis=1) < EPSILON)

            target_long = target_long['weight'].unstack().fillna(0)
            turnover_long = target_long.diff().abs().sum(axis=1)

            target_short = target_short['weight'].unstack().fillna(0)
            turnover_short = target_short.diff().abs().sum(axis=1)

            # turnover of every trade day.
            turnover_long_short = target_long_short.diff().abs().sum(axis=1) / 2

            turnover = pd.DataFrame(data={'long': turnover_long,
                                          'short': turnover_short,
                                          'longshort': turnover_long_short})

            daily_average_turnover = self._get_daily_turnover(turnover, ['long', 'short', 'longshort'])
            daily_average_turnover.columns = ['Long Daily Turnover', 'Short Daily Turnover',
                                              'Longshort Daily Turnover']
        elif self.method == 'zscore':
            target = self.data['signal'].unstack(level=-1).diff().abs().sum(axis=1)
            turnover = pd.DataFrame(data={'turnover': target})
            daily_average_turnover = self._get_daily_turnover(turnover, ['turnover'])
            daily_average_turnover.columns = ['Daily Turnover']
        else:
            raise NotImplementedError(f'Do not support method {self.method}.')

        # turnover is the turnover of each bar with corresponding freq
        self.turnover = turnover
        # daily average turnover is the daily average turnover of every year and the total daily average turnover
        self.daily_average_turnover = daily_average_turnover

    @staticmethod
    def _get_daily_turnover(turnover, cols):

        daily_average_turnover = turnover.copy()
        daily_average_turnover.reset_index(inplace=True)
        daily_average_turnover['date'] = pd.to_datetime(daily_average_turnover['time']).dt.date
        daily_average_turnover = daily_average_turnover.groupby('date')[cols].sum()
        daily_average_turnover.reset_index(inplace=True)
        daily_average_turnover['year'] = pd.to_datetime(daily_average_turnover['date']).dt.year
        daily_average_turnover_year = daily_average_turnover.groupby('year')[cols].mean()
        daily_average_turnover_all = daily_average_turnover[cols].mean().to_frame('all').T
        daily_average_turnover = pd.concat([daily_average_turnover_year, daily_average_turnover_all])

        return daily_average_turnover

    def get_sharpe(self):
        """
        Calculate the day-frequency compounded sharpe, using daily ret and daily volatility.
        for detailed formula, referring to https://zhuanlan.zhihu.com/p/611233233
        """
        if self.is_ret_calculated is None:
            self.get_ret()
        if not self.annualized_return:
            self.get_annualized_return()
        if not self.annualized_volatility:
            self.get_annualized_volatility()

        ret = self.annualized_return.copy()
        volatility = self.annualized_volatility.copy()
        ret.columns = volatility.columns

        sharpe_ratio = ret / volatility
        if self.cost:
            sharpe_ratio.columns = ['Gross Sharpe', 'Net Sharpe',
                                    'Gross TS-Sharpe', 'Net TS-Sharpe',]
        else:
            sharpe_ratio.columns = ['Gross Sharpe', 'Gross TS-Sharpe']

        self.sharpe = sharpe_ratio

    def get_annualized_volatility(self):
        """
        Get annualized volatility for every year in data and total annualized volatility.
        """
        if self.is_ret_calculated is None:
            self.get_ret()
        volatility = self.ret_daily.astype(float).reset_index()
        volatility['year'] = pd.to_datetime(volatility['time']).dt.year
        volatility_year = volatility.groupby('year')[self.ret_cols].std() * np.sqrt(252)
        volatility_all = (volatility_year.mean()).to_frame('all').T

        volatility = pd.concat([volatility_year, volatility_all])
        if self.cost:
            volatility.columns = ['Gross Annualized Volatility', 'Net Annualized Volatility',
                                  'Gross TS-Annualized Volatility', 'Net TS-Annualized Volatility']
        else:
            volatility.columns = ['Gross Annualized Volatility', 'Gross TS-Annualized Volatility']

        self.annualized_volatility = volatility

    def get_annualized_return(self):
        """
        Get annualized return for every year in data and total annualized return.
        """
        if self.is_ret_calculated is None:
            self.get_ret()

        ls_ret = self.ret_daily.astype(float)
        ls_ret.reset_index(inplace=True)
        ls_ret['year'] = pd.to_datetime(ls_ret['time']).dt.year
        ls_ret_year = ls_ret.groupby('year')[self.ret_cols].mean() * 252
        ls_ret_all = ls_ret_year.mean()
        ls_ret_all = ls_ret_all.to_frame('all').T

        ls_ret = pd.concat([ls_ret_year, ls_ret_all])

        if self.cost:
            ls_ret.columns = ['Gross Annualized Return', 'Net Annualized Return',
                              'Gross TS-Annualized Return', 'Net TS-Annualized Return']
        else:
            ls_ret.columns = ['Gross Annualized Return', 'Gross TS-Annualized Return']

        self.annualized_return = ls_ret

    def get_win_rate(self):
        """
        Get win rate, which is the ratio of profitable trades.
        """
        win_rate = self.ret_daily.reset_index()
        win_rate['year'] = pd.to_datetime(win_rate['time']).dt.year
        win_rate_year, win_rate_all = [], []
        for col in self.ret_cols:
            win_rate_year.append(win_rate.groupby('year')[col].apply(lambda x: len(x[x > 0]) / len(x)))
            win_rate_col = pd.Series(len(win_rate[col][win_rate[col] > 0]) / len(win_rate[col]))
            win_rate_col.index = [col]
            win_rate_all.append(win_rate_col.to_frame('all').T)
        win_rate_year = pd.concat(win_rate_year, axis=1)
        win_rate_all = pd.concat(win_rate_all, axis=1)
        win_rate = pd.concat([win_rate_year, win_rate_all])
        if self.cost:
            win_rate.columns = ['Gross Win Rate', 'Net Win rate',
                                'Gross TS-Win Rate', 'Net TS-Win rate',]
        else:
            win_rate.columns = ['Gross Win Rate', 'Gross TS-Win Rate']

        self.win_rate = win_rate

    def get_drawdown(self):
        """
        Get drawdown by simple rate.
        """
        if self.is_ret_calculated is None:
            self.get_ret()

        ret_daily_nav = 1 + self.ret_daily.cumsum(axis=0)

        # maximum_drawdown = max((nav - max(nav)) / max(nav))
        # if lost all money, just stop. This algorithm does not consider adding extra capital.
        # So the maximum drawdown is 1.

        dd = 1 - ret_daily_nav / ret_daily_nav.cummax()

        max_dd_year = self.ret_daily.reset_index()
        max_dd_year['year'] = pd.to_datetime(max_dd_year['time']).dt.year
        max_dd_year = max_dd_year.groupby('year')[self.ret_cols].apply(self._get_max_dd)
        max_dd_all = dd.max().to_frame('all').T
        max_dd = pd.concat([max_dd_year, max_dd_all])
        if self.cost:
            max_dd.columns = ['Max Gross Drawdown', 'Max Net Drawdown',
                              'TS-Max Gross Drawdown', 'TS-Max Net Drawdown', ]
        else:
            max_dd.columns = ['Max Gross Drawdown', 'TS-Max Gross Drawdown']

        self.dd = dd
        self.max_dd = max_dd

    @staticmethod
    def _get_max_dd(ret):
        ret_nav = 1 + ret.cumsum(axis=0)
        dd = 1 - ret_nav / ret_nav.cummax()
        return dd.max()

    def get_performance(self):
        """
        Get all the performance.
        """
        self.get_ic()
        self.get_ret()
        self.get_turnover()
        self.get_sharpe()
        self.get_win_rate()
        self.get_drawdown()

        datetime_index = self.data.index.get_level_values('time')
        start_year = datetime_index.min().year
        end_year = datetime_index.max().year
        df_time = pd.DataFrame(data={'year': list(range(start_year, end_year))})
        df_time.loc[len(df_time)] = 'all'
        df_list = [self.ic, self.sharpe, self.annualized_return, self.win_rate, self.annualized_volatility,
                   self.daily_average_turnover, self.max_dd]
        for i, df in enumerate(df_list):
            assert df is not None, f'Indicator in df_list with index {i} is not calculated.'
            df.index.name = 'year'
        df_list = [df_time] + df_list
        self.performance = reduce(lambda x, y: pd.merge(x, y, on='year', how='left', validate='1:1'), df_list)
        self.performance_all = self.performance.loc[self.performance['year'] == 'all']

