import sys
import os
curPath = os.path.abspath(os.path.dirname(__file__))
rootPath = os.path.split(curPath)[0]
sys.path.append(rootPath)

import numpy as np
import pandas as pd
from matplotlib import pyplot as plt


class FTEvaCore:
    _GROUPS_N: int = 10
    _ALPHA_DECAY_LAG: int = 40
    _chartpath = 'charts'

    def __init__(self, factor_exposure: pd.DataFrame):
        # init
        self.xy = factor_exposure
        self.ft_list = self.xy.columns[2:-1].tolist()
        # run
        self.eva_by_ft()

    def eva_by_ft(self):
        for ft in self.ft_list:
            xy = self.xy[['date', 'code', ft, 'r_e']].copy(deep=False)
            xy.fillna(0, inplace=True)
            xy.columns = ['date', 'code', 'x', 'y']

            grps = self.group_N(xy)
            # ic = self.get_IC_series(xy)# 分组
            ad, hlv = self.get_halflife_value(xy, FTEvaCore._ALPHA_DECAY_LAG) # 衰减
            direction = int(np.sign(ad.loc[0, 'ic']))
            xy['x'] = xy['x'] * direction
            nav_longshort = self.long_short(xy)
            nav_longonly = self.long_only(xy)
            self.plot_by_ft(ft, grps, ad, nav_longshort, nav_longonly)

    def plot_by_ft(self, ft_name: str, grps, ad, nav_longshort, nav_longonly):
        fig, ax = plt.subplots(4, figsize=(10, 24), layout='constrained')
        # grp
        ax[0].scatter(np.array(grps["mean_x"]), np.array(grps["mean_y"]))
        ax[0].plot(np.array(grps["mean_x"]), np.array(grps["yhat"]), 'r')
        ax[0].set_xlabel('Mean: factor exposure')
        ax[0].set_ylabel('Mean: returns')
        ax[0].set_ylim(-0.01, 0.01)
        ax[0].set_title('Average Return of Group ' + str(FTEvaCore._GROUPS_N))
        # ad
        ax[1].stem(ad["lag"], ad["ic"])
        ax[1].set_xlabel("Lag")
        ax[1].set_ylabel("Information Coefficient")
        ax[1].set_title(' Alpha Decay of Lag ' + str(FTEvaCore._ALPHA_DECAY_LAG))
        # nav_longshort
        ax[2].plot(np.cumsum(nav_longshort["return"]))
        ax[2].axhline(y=0.0, linestyle=':', c='lightgrey')
        ax[2].set_xlabel("Date")
        ax[2].set_ylabel("Cumulative Return")
        ax[2].set_title(' Long-Short Return of Group ' + str(FTEvaCore._GROUPS_N))
        # nav_longonly
        ax[3].plot(np.cumsum(nav_longonly["return"]))
        ax[3].axhline(y=0.0, linestyle=':', c='lightgrey')
        ax[3].set_xlabel("Date")
        ax[3].set_ylabel("Cumulative Return")
        ax[3].set_title(' Long-Only Return of Group 1')

        # wrap
        fig.suptitle(f"Performance Evaluation: {ft_name.upper()}")
        filename = f"{ft_name}.png"
        fig.savefig(os.path.join(FTEvaCore._chartpath, filename))
        plt.show(block=True)
        print('nice')

    # 分组
    def group_N(self, xy: pd.DataFrame):
        bins = np.percentile(xy['x'], np.linspace(0, 100, FTEvaCore._GROUPS_N+1))
        mean = xy.groupby(pd.cut(xy['x'], bins=bins), as_index=False)[['x', 'y']].mean()#.apply(lambda x: np.nanmean(x))
        std = xy.groupby(pd.cut(xy['x'], bins=bins), as_index=False)[['x', 'y']].std()
        mean.columns = ['mean_x', 'mean_y']
        mean['yhat'] = mean['mean_x'] * np.mean(mean['mean_x'] * mean['mean_y']) / np.std(mean['mean_x'])
        std.columns = ['std_x', 'std_y']
        mean = mean.join(std)
        return mean

    # 多空
    def long_short(self, xy: pd.DataFrame):
        xy_copy = xy.copy().sort_values(by=['date', 'x']).reset_index(drop=True)  # 因子值从小到大排序
        def inner(x: pd.DataFrame):
            num_codes = int(np.round(x.shape[0] / FTEvaCore._GROUPS_N, 0))
            assert num_codes > 0
            grp_head = np.mean(x.iloc[:num_codes, 3])
            grp_tail = np.mean(x.iloc[-num_codes:, 3])
            return grp_tail - grp_head  # (grp_tail - grp_head) / 2
        pnl = xy_copy.groupby('date', as_index=True).apply(lambda x: inner(x)).to_frame(name='return')
        return pnl

    # 多头
    def long_only(self, xy: pd.DataFrame):
        xy_copy = xy.copy().sort_values(by=['date', 'x']).reset_index(drop=True)  # 因子值从小到大排序
        def inner(x: pd.DataFrame):
            num_codes = int(np.round(x.shape[0] / FTEvaCore._GROUPS_N, 0))
            assert num_codes > 0
            grp_tail = np.mean(x.iloc[-num_codes:, 3])
            return grp_tail
        pnl = xy_copy.groupby('date', as_index=True).apply(lambda x: inner(x)).to_frame(name='return')
        return pnl

    def get_IC_series(self, xy: pd.DataFrame, groupby_key: str = 'date', method='spearman') -> pd.DataFrame:
        icall = xy.groupby(groupby_key).apply(lambda k: k.select_dtypes('float').corr(method=method).iloc[0, 1]).reset_index()
        icall.rename(columns={0: 'ic'}, inplace=True)
        return icall.set_index(groupby_key)

    @staticmethod
    def calc_halfIC(series: pd.Series) -> (np.array, str):
        target = abs(series[0] / 2)
        position = np.where(series.abs() < target)
        if len(position[0]) > 0:
            return position[0][0]
        else:
            return f"> {len(series)}"

    def get_halflife_value(self, xy: pd.DataFrame, lag: int = 20):  # applied
        dates = np.unique(xy["date"])
        date_map = pd.DataFrame({"date": dates, "num_date": list(range(len(dates)))})
        my_xy = pd.merge(xy, date_map, how="left", on=["date"])
        y = my_xy.loc[:, ['num_date', 'code', 'y']].copy()
        x = my_xy.loc[:, ['num_date', 'code', 'x']].copy()
        ics = list()
        for i in range(lag):
            x["num_date"] += 1
            this_xy = pd.merge(x, y, how="inner", on=["num_date", "code"])
            this_corr = self.get_IC_series(this_xy, groupby_key='num_date').mean()
            ics.append([i, this_corr['ic']])
        if len(ics) > 0:
            df = pd.DataFrame(ics, columns=["lag", "ic"])
        else:
            df = pd.DataFrame([], columns=["lag", "ic"])
        halflife_value = FTEvaCore.calc_halfIC(df['ic'])
        return df, halflife_value
