#!/usr/bin/python
# -*-coding:utf-8-*-

'''因子拥挤度计算'''

import os
import statsmodels.api as sm
import pandas as pd
import numpy as np

# import sys
# import pickle
# import copy
# from time import time
# import datetime
# from datetime import timedelta
# from scipy import stats
# import re
# from tqdm import tqdm

try:
    try:
        from zg_factor_analysis_module.base.FactorAnalysisLibDerivative import FactorAnalysisLibDerivative
        from zg_factor_analysis_module.factor_test import Factor_Analysis
    except:
        from .base.FactorAnalysisLibDerivative import FactorAnalysisLibDerivative
        from .factor_test import Factor_Analysis
except:
    from base.FactorAnalysisLibDerivative import FactorAnalysisLibDerivative
    from factor_test import Factor_Analysis

# 路径不可用
zbc_basic_data_dir= './zg_factors_lib/zbc/basic_data'
zbc_check_data_dir= './zg_factors_lib/zbc/check_data'

class Factor_Crowding(Factor_Analysis):
    def __init__(self,start_date,end_date,quantile=10,rebalance=21,pool='000905',dir_name='public'):
        super(Factor_Crowding,self).__init__(start_date,end_date)
        self.start_date=start_date
        self.end_date=end_date
        self.pool=pool
        self.dir_name=dir_name
        self.quantile=quantile
        self.rebalance=rebalance
        self.trade_date = self.read_basic_data_table(filename='processed_trade_date_data')\
            .drop_duplicates(subset=['trade_date']).reset_index(drop=True)
        self.trading_days= self.trade_date.loc[( self.trade_date['trade_date'] >= pd.to_datetime( self.start_date)) &
                                                ( self.trade_date['trade_date'] <= pd.to_datetime( self.end_date)),
                                                'trade_date'].tolist()
        self.position_data=self.get_factor_position_data('tech_turnover_cv_1m')
        self.rebalance_date=self.position_data.index.get_level_values('date').unique().tolist()
        self.start_date=self.rebalance_date[0]
        self.end_date=self.rebalance_date[-1]
        self.trade_days=self.get_trade_date(self.start_date,self.end_date)
        self.trade_date_map=self.get_trade_date_map()

    #TODO 数据初始化
    def initialize_all(self):
        #self.initialize()
        self.bp=self.get_total_factor_data('valuation_bp')
        self.ep = self.get_total_factor_data('valuation_ep_ttm')
        self.sp = self.get_total_factor_data('valuation_sp_ttm')
        self.daily_trade_data=self.get_trade_data()
        self.daily_trade_data['quote_rate']=self.daily_trade_data.groupby('code')['close_back'].pct_change(1)
        self.quote_rate = pd.pivot_table(self.daily_trade_data.reset_index(), index='date', columns='code', values='quote_rate')
        self.FF3_data=pd.read_hdf(os.path.join(zbc_basic_data_dir, 'FF3_reg_factor_data.h5'))
        self.FF3_data.set_index('date',inplace=True)
        print('initialize finish !')

    #TODO 获取因子的收益数据
    def get_factor_return_data(self,factor_name):
        df=pd.read_csv(os.path.join('/db/zg_factors_lib/wsc/factor_analysis_result/factor_quantile_return_result',
                                    factor_name+' factor groupby None equal weight 000905 pool 21 quantile return.csv'),index_col=0)
        df = df.dropna()
        return df

    #TODO 获取因子的分组持仓数据
    def get_factor_position_data(self,factor_name):
        df = pd.read_csv(os.path.join('/db/zg_factors_lib/wsc/factor_analysis_result/factor_quantile_return_result',
                                      factor_name + ' factor groupby None equal weight 000905 pool 21 daily_position.csv'))
        df.set_index(['date', 'quantile'], inplace=True)
        return df

    #TODO 获取给定日期的分组股票池
    def get_group_stocks(self,position_data,date):
        d=self.trade_date_map[date]
        df=position_data.xs(d)
        bottom_stocks=df.loc['q1']['stock_code'].tolist()
        top_stocks=df.loc['q10']['stock_code'].tolist()
        return bottom_stocks,top_stocks

    #TODO 交易日与对应调仓日的对应，用来获取股票池数据
    def get_trade_date_map(self):
        rebalance_raw= list(self.position_data.index.get_level_values('date').unique())
        trade_date_map=pd.Series(index=self.trading_days)
        for d in trade_date_map.index:
            d=d.strftime('%Y-%m-%d')
            for i in range(len(rebalance_raw) - 1):
                if d >= rebalance_raw[i] and d < rebalance_raw[i + 1]:
                    trade_date_map[d] = rebalance_raw[i]
            if d == rebalance_raw[i + 1]:
                trade_date_map[d] = rebalance_raw[i + 1]
        return trade_date_map.dropna()

    # TODO 计算log spread
    def cal_daily_log_spread(self,df, position_data,factor_direction=1):
        data = df.copy()
        try:
            date = data.index[0][0]
            data.index = data.index.droplevel('date')
            bs, ts = self.get_group_stocks(position_data, date)
            if factor_direction == 1:
                temp = np.log(data.ix[ts].median()[0] / data.ix[bs].median()[0])
            else:
                temp = -1*np.log(data.ix[ts].median()[0] / data.ix[bs].median()[0])
            return temp
        except Exception as e:
            print(e)
            return np.nan

    # TODO 计算log spread
    def cal_daily_minus_spread(self,df, position_data,factor_direction=1):
        data = df.copy()
        try:
            date = data.index[0][0]
            data.index = data.index.droplevel('date')
            bs, ts = self.get_group_stocks(position_data, date)
            if factor_direction==1:
                temp = data.ix[ts].median()[0] -data.ix[bs].median()[0]
            else:
                temp =-1*( data.ix[ts].median()[0] -data.ix[bs].median()[0])
            return temp
        except Exception as e:
            print(e)
            return np.nan

    #TODO 计算对数估值价差
    def cal_log_spread(self,median_data,factor_direction=1):
        df=median_data.copy()
        df=df.reset_index('group')
        if factor_direction==1:
            temp=np.log(df.loc[df['group']==1,median_data.name]/df.loc[df['group']==self.quantile,median_data.name])
        else:
            temp=-1*np.log(df.loc[df['group']==1,median_data.name]/df.loc[df['group']==self.quantile,median_data.name])
        return temp

    #TODO 计算原始估值价差
    def cal_minus_spread(self,median_data,factor_direction=1):
        df=median_data.copy()
        df=df.reset_index('group')
        if factor_direction == 1:
            temp =df.loc[df['group'] == 1, median_data.name]-df.loc[df['group'] == self.quantile, median_data.name]
        else:
            temp =-1*df.loc[df['group'] == 1, median_data.name]-df.loc[df['group'] == self.quantile, median_data.name]
        return temp

    #TODO 时间序列标准化
    def rolling_zscore(self,data,window=1000000):
        rolling_mean = data.rolling(window,min_periods=1).mean()
        rolling_std = data.rolling(window,min_periods=1).std()
        temp=(data-rolling_mean)/rolling_std
        return temp

    #TODO 计算因子的估值利差
    def cal_factor_valuation_spread_new(self,factor_direction = 1):
        factor_position_data=self.factor_position_data.copy()
        temp=pd.DataFrame(index=pd.to_datetime(self.trade_days),columns=['bp_spread','sp_spread','ep_spread'])
        bp_spread=self.bp.groupby('date').apply(lambda x:self.cal_daily_log_spread(x,factor_position_data,factor_direction))
        sp_spread=self.sp.groupby('date').apply(lambda x:self.cal_daily_log_spread(x,factor_position_data,factor_direction))
        ep_spread=self.sp.groupby('date').apply(lambda x:self.cal_daily_minus_spread(x,factor_position_data,factor_direction))
        temp.loc[bp_spread.index,'bp_spread']=bp_spread
        temp.loc[sp_spread.index,'sp_spread']=sp_spread
        temp.loc[ep_spread.index, 'ep_spread'] = ep_spread
        valuation_spread= temp.apply(lambda x: self.rolling_zscore(x))
        valuation_spread= valuation_spread.dropna()
        valuation_spread['valuation_spread']= valuation_spread.mean(axis=1)
        return valuation_spread


    #TODO 计算因子的估值价差
    def cal_valuation_spread_all(self,factor_name):
        factor=self.factor_data=self.read_factor_table(factor_name)
        factor.rename(columns={factor_name:'factor'},inplace=True)
        total_data=pd.concat([factor,self.bp,self.ep,self.sp],axis=1).dropna()
        total_data=total_data.reset_index()
        total_data['trade_date'] = total_data['date'].copy()
        total_data.set_index(['date','stock_code'],inplace=True)
        group = total_data.groupby('trade_date').apply(
            lambda x: pd.qcut(x['factor'].rank(method='first'), self.quantile, labels=False) + 1)
        group.index = group.index.droplevel('trade_date')
        total_data['group']=group
        bp_median=total_data.groupby(['trade_date','group'])['valuation_bp'].median()
        bp_spread=self.cal_log_spread(bp_median)
        sp_median=total_data.groupby(['trade_date','group'])['valuation_sp_ttm'].median()
        sp_spread=self.cal_log_spread(sp_median)
        ep_median=total_data.groupby(['trade_date','group'])['valuation_ep_ttm'].median()
        ep_spread=self.cal_minus_spread(ep_median)
        valuation_spread=pd.concat([bp_spread,sp_spread,ep_spread],axis=1)
        result=valuation_spread.apply(lambda x:self.rolling_zscore(x))
        result=result.dropna()
        result['valuation_spread']=result.mean(axis=1)
        return result

    #TODO 计算因子反转(累积收益率)
    def cal_factor_reversal(self,cal_window=63,rolling_window=252,factor_direction = 1):
        if factor_direction ==1:
            factor_ret=self.factor_ret_data['top_to_bottom'].copy()
        else:
            factor_ret =-1* self.factor_ret_data['top_to_bottom'].copy()
        factor_reversal=factor_ret.rolling(cal_window).sum()
        result =self.rolling_zscore(factor_reversal,rolling_window)
        return result.dropna()

    #TODO 计算因子的多空波动比率
    def cal_factor_volatility(self,cal_window=63,method='long',factor_direction = 1):#short,long_short
        factor_return =self.factor_ret_data.copy()
        if factor_direction ==1:
            factor_return['long_vol'] = factor_return['q10'].rolling(cal_window).std()
            factor_return['short_vol'] = factor_return['q1'].rolling(cal_window).std()
        else:
            factor_return['long_vol'] = factor_return['q1'].rolling(cal_window).std()
            factor_return['short_vol'] = factor_return['q10'].rolling(cal_window).std()
        factor_return['bench_vol'] = factor_return['bench'].rolling(cal_window).std()
        if method is 'long_short':
            factor_return['factor_vol'] = factor_return['long_vol'] / factor_return['short_vol']
        elif method is 'long':
            factor_return['factor_vol'] = factor_return['long_vol'] / factor_return['bench_vol']
        else:
            factor_return['factor_vol'] = factor_return['short_vol'] / factor_return['bench_vol']
        factor_return['factor_vol_std']=self.rolling_zscore(factor_return['factor_vol'])
        return factor_return['factor_vol_std'].dropna()

    #TODO 计算经Fama 三因子调整的残差收益率
    def cal_resid_ret(self,df):
        resid_ret=sm.OLS(df.fillna(0),self.FF3_data.loc[pd.to_datetime(df.index),['cir_cap_weight_MKT','SMB_cir_cap_weight',
        'HML_cir_cap_weight']].fillna(0)).fit().resid
        return resid_ret

    #TODO 计算一期的配对相关系数
    def cal_pairwise_c(self,df,residual=False):
        corr_temp=pd.Series(index=df.columns)
        if residual:
            df=self.cal_resid_ret(df)
        for stock in df.columns:
            stock_ret = df[stock].fillna(0)
            factor_ret = df.drop(stock, axis=1).mean(axis=1)
            corr_temp[stock]= np.corrcoef(stock_ret, factor_ret)[0][1]
        return corr_temp.mean()

    #TODO 计算因子的配对相关性
    def cal_pairwise_correlation(self,cal_window=63,method='long',factor_direction = 1,residual=True):
        factor_position_data =self.factor_position_data.copy()
        pairwise_correlation=pd.DataFrame(index=self.trade_days[cal_window:],columns=['long','short'])
        for date in self.trade_days[cal_window:]:
            inder=self.trade_days.index(date)
            cal_date=self.trade_days[inder-cal_window+1:inder+1]
            bs,ts=self.get_group_stocks(factor_position_data,date)
            bs_ret_temp=pd.DataFrame(index=cal_date)
            bs_ret_temp[bs]=self.quote_rate[bs].loc[pd.to_datetime(cal_date)]
            ts_ret_temp=pd.DataFrame(index=cal_date)
            ts_ret_temp[ts]=self.quote_rate[ts].loc[pd.to_datetime(cal_date)]
            if factor_direction == 1:
                pairwise_correlation['short'][date] = self.cal_pairwise_c(bs_ret_temp, residual)
                pairwise_correlation['long'][date]=self.cal_pairwise_c(ts_ret_temp,residual)
            else:
                pairwise_correlation['long'][date] = self.cal_pairwise_c(bs_ret_temp, residual)
                pairwise_correlation['short'][date]=self.cal_pairwise_c(ts_ret_temp,residual)
        if method is 'long':
            result=self.rolling_zscore(pairwise_correlation['long'])
            return result.dropna()
        else:
            result=self.rolling_zscore(pairwise_correlation['long']+pairwise_correlation['short'])
            return result

    #TODO 计算汇总得分
    def cal_factor_crowding_integrated_score(self,factor_name,
                                                       factor_direction=-1,
                                                       vol_window=63,
                                                       corr_window=63,
                                                       reversal_window=252,
                                                       cal_vol_method='long_short',
                                                       cal_correlation_method='long_short',
                                                        residual=True):
        self.factor_ret_data=self.get_factor_return_data(factor_name)
        self.factor_position_data=self.get_factor_position_data(factor_name)
        crowding_data=pd.DataFrame(index=self.trade_days,
                                   columns=['valuation_spread',
                                            'factor_reversal',
                                            'pairwise_correlation',
                                            'factor_volatility'])
        valuation_spread=self.cal_factor_valuation_spread_new(factor_direction=factor_direction)
        valuation_spread.to_excel(os.path.join(zbc_check_data_dir, factor_name + '_valuation_spread.xlsx'))
        factor_reversal=self.cal_factor_reversal(reversal_window,reversal_window,factor_direction=factor_direction)
        factor_reversal.to_excel(os.path.join(zbc_check_data_dir, factor_name + '_factor_reversal.xlsx'))
        factor_volatility=self.cal_factor_volatility(vol_window,cal_vol_method,factor_direction=factor_direction)
        factor_volatility.to_excel(os.path.join(zbc_check_data_dir, factor_name + '_factor_volatility.xlsx'))
        pairwise_correlation=self.cal_pairwise_correlation(corr_window,cal_correlation_method,factor_direction,residual)
        pairwise_correlation.to_excel(os.path.join(zbc_check_data_dir, factor_name + '_pairwise_correlation.xlsx'))
        crowding_data.loc[valuation_spread.index.strftime('%Y-%m-%d'),'valuation_spread']=valuation_spread['valuation_spread']
        crowding_data['factor_reversal'] = factor_reversal.ix[crowding_data.index]
        crowding_data['factor_volatility']=factor_volatility.ix[crowding_data.index]
        crowding_data['pairwise_correlation'] = pairwise_correlation.ix[crowding_data.index]
        crowding_data=crowding_data.dropna()
        crowding_data['crowding_integrated_score']=crowding_data.mean(axis=1)
        crowding_data.to_excel(os.path.join(zbc_check_data_dir, factor_name + '_factor_crowding_integrated_score.xlsx'))
        del self.factor_position_data
        return crowding_data

