# -*- coding: UTF-8 -*-
##########################################
## 文件名称：
## 功能说明：
## 
## 
## 创建人： 
## 创建时间：
## 修改：
##
##
##########################################

import math
import os
import pickle
import pandas as pd
import numpy as np
import datetime

import matplotlib.pyplot as plt

import warnings
from pandas.tseries.offsets import Day, MonthEnd
import statsmodels.api as sm

from WindPy import w
w.start()

warnings.filterwarnings('ignore')
# token='fddbd858e99a7a26ee9cc0db1083baf53baa1a979e225426ba5bfdad5c9e2e83'
# uqer.Client(token=token)
# source_data_dir = r'E:\项目文档\智能投顾\杰磊工作交接\fof\data_fetcher\\'
source_data_dir = r"C:\Users\\fengshimeng3\Documents\财富管理-智能投顾\Data\\"

# weekly_data.to_pickle(r'D:\work\fof\存档\weekly_data.pkl')
# daily_data.to_pickle(r'D:\work\fof\存档\daily_data.pkl')
# file = open(r'D:\work\fof\存档\trade_dates_weekly.pkl', 'wb')
# pickle.dump(trade_dates_weekly, file)
# file.close()
# file = open(r'D:\work\fof\存档\trade_dates_daily.pkl', 'wb')
# pickle.dump(trade_dates_daily, file)
# file.close()


# daily_data = pd.read_pickle(r'D:\work\fof\存档\daily_data.pkl')
# daily_data.index = pd.to_datetime(daily_data.index)
# weekly_data = pd.read_pickle(r'D:\work\fof\存档\weekly_data.pkl')
# weekly_data.index = pd.to_datetime(weekly_data.index)
# file = open(r'D:\work\fof\存档\trade_dates_weekly.pkl', 'rb')
# trade_dates_weekly = pd.to_datetime(pickle.load(file))
# file.close()
# file = open(r'D:\work\fof\存档\trade_dates_daily.pkl', 'rb')
# trade_dates_daily = pd.to_datetime(pickle.load(file))
# file.close()
# factor_data = pd.DataFrame([])
# 基钦周期计算代码(孙骥文版本)
# def func1(raw_data, start_time="2009-01-01", end_time="2018-03-17"):
#     # raw_data = w.wsd("000001.SH", "close", "1993-01-01", end_time, "Period=W")
#     # raw_data = pd.DataFrame(raw_data.Data[0], index=pd.to_datetime(raw_data.Times), columns=['上证综指'])

#     raw_data['上证综指ln'] = np.log(raw_data['上证综指'])
#     raw_data['上证综指ln_shift'] = raw_data['上证综指ln'].shift(50)
#     raw_data['上证综指同比序列'] = raw_data['上证综指ln'] - raw_data['上证综指ln_shift']
#     raw_data = raw_data.dropna()
#     raw_data.index = pd.to_datetime(raw_data.index)
#     ind1 = raw_data.index[raw_data.index >= pd.to_datetime(start_time)]
#     ind1 = ind1[ind1 <= pd.to_datetime(end_time)]

#     factor = pd.DataFrame()

#     for i in range(len(ind1)):
#         ind2 = raw_data.index[raw_data.index <= ind1[i]][-800:]
#         data = pd.DataFrame()
#         data['上证指数'] = raw_data.loc[ind2.tolist(), '上证综指同比序列']

#         N = len(data)
#         n0 = int(N / 2)
#         f0 = 50 / N
#         f_limit1 = 0.25  # 4年（48个月）
#         f_limit2 = 0.4  # 2.5年（30个月）
#         n_limit1 = int(f_limit1 / f0)
#         n_limit2 = int(f_limit2 / f0)

#         temp_fft = np.fft.fftshift(np.fft.fft(data['上证指数']))
#         temp_fre = abs(temp_fft)
#         n = np.argwhere(
#             temp_fre[(n0 + n_limit1):(n0 + n_limit2 + 1)] == temp_fre[(n0 + n_limit1):(n0 + n_limit2 + 1)].max())
#         n1 = n0 + n_limit1 + n[0][0]
#         n2 = n0 * 2 - n1

#         fre_new = np.array([0] * N).astype('complex')
#         fre_new[n1] = temp_fft[n1]
#         fre_new[n2] = temp_fft[n2]
#         temp_result = np.fft.ifft(np.fft.ifftshift(fre_new))
#         result = pd.DataFrame(temp_result.real, index=data.index, columns=['周期因子'])
#         factor.loc[ind1[i], 'value'] = result.iloc[-1, 0]

#     return factor

# # 基钦周期计算代码(更新版本)
# def func_cycle(raw_data,end_time="2018-11-02"):
#     # raw_data = w.wsd("000001.SH", "close", "1993-01-01", end_time, "Period=W")
#     # raw_data = pd.DataFrame(raw_data.Data[0], index=pd.to_datetime(raw_data.Times), columns=['上证综指'])
#     lag_fft = 52
#     cycle = 900  # 900周数据作为样本内频率训练集
#     floor = 4.5*lag_fft
#     roof = 2.5*lag_fft
#     length = 1200
#     raw_data['上证综指ln'] = np.log(raw_data['上证综指'])
#     raw_data['上证综指ln_shift'] = raw_data['上证综指ln'].shift(lag_fft)
#     raw_data['上证综指同比序列'] = raw_data['上证综指ln'] - raw_data['上证综指ln_shift']

#     ret_ttm12 = raw_data['上证综指同比序列'].dropna().values
#     fft_origin = np.fft.fft(ret_ttm12[:length])  # [:length]
#     fft_re = np.real(fft_origin)
#     #plot(fft_re, type="l")
#     fft_abs = abs(np.fft.fft(ret_ttm12[:length]))  # [:length]
#     #plot(fft_abs, type="l")
#     # fft_abs_shift <- ifftshift(fft_abs)
#     # plot(fft_abs_shift, type = "l")
#     fft_cycle = fft_abs[round(len(ret_ttm12) / floor)-1:round(len(ret_ttm12) / roof)].argmax() + round(
#         len(ret_ttm12) / floor) -1 # 找到最高振幅的对应周期数，横坐标为对应的周期数-1 因为python下标从0开始
#     cycle_week = len(ret_ttm12) / (fft_cycle+1)

#     kitchin_Cycle=np.zeros_like(ret_ttm12)
#     hold = np.zeros(cycle,dtype=complex)
#     #hold = np.zeros(len(ret_ttm12), dtype=complex)
#     hold[fft_cycle] = fft_origin[fft_cycle]
#     hold[cycle - fft_cycle + 2] = fft_origin[cycle - fft_cycle + 2]
#     kitchin_Cycle[:cycle] = np.real(np.fft.ifft(hold))
#     # import matplotlib.pyplot as plt
#     # plt.plot(kitchin_Cycle)
#     # plt.show()
#     cycle_week = [cycle_week]
#     for i in range(len(ret_ttm12) - cycle):
        
#         fft_origin_temp = np.fft.fft(ret_ttm12[(i+1): (i+1+cycle)])
#         fft_abs_temp = abs(fft_origin_temp)
#         fft_cycle_temp = fft_abs_temp[round(cycle / floor)-1:round(cycle / roof)].argmax() + round(cycle / floor) - 1
#         cycle_week_temp = cycle / (fft_cycle_temp )
#         cycle_week.append(cycle_week_temp)
#         hold_temp =np.zeros(cycle, dtype=complex)
#         hold_temp[fft_cycle_temp] = fft_origin_temp[fft_cycle_temp]
#         hold_temp[cycle - fft_cycle_temp + 2] = fft_origin[cycle - fft_cycle_temp + 2]
#         kitchin_Cycle_temp = np.real(np.fft.ifft(hold_temp))
#         kitchin_Cycle[i + cycle] = kitchin_Cycle_temp[cycle-1]
        

#     kitchin_Cycle = pd.Series(kitchin_Cycle)
#     kitchin_Cycle.index = raw_data['上证综指同比序列'].dropna().index
#     # plt.plot(kitchin_Cycle)
#     # plt.show()
#     return kitchin_Cycle

# 计算基钦周期，采用bp滤波做法
def func_cycle_bpMethod(var):
    lag_var = var.shift(52) #滞后52周即一年的数据
    # raw_data = (np.log(var) - np.log(lag_var)).dropna()
    raw_data = (var/lag_var - 1).dropna()
    cf_cycles, cf_trend = sm.tsa.filters.cffilter(raw_data, low=130, high=234)
    return cf_cycles

def hurstexp(x, d=50):
    """hurst计算函数 根据R中hurstexp移植 使用corrected empirical Hurst exponent算法 根据R中给的例子确定使用这个算法
    x:pandas.Series or np.array 传入的时间序列
    d:最小的box size 子区间最小的长度
        """

    def rssimple(x):
        # 简单算法 直接计算x的log_rs/log_n
        n = len(x)
        y = x - x.mean()
        s = y.cumsum()
        rs = (s.max() - s.min()) / x.std()  # 标准化极差
        return np.log(rs) / np.log(n)

    def rscalc(x, n):
        z = x.copy()
        m = len(z) // n

        y = z.reshape(m,n)
        e = y.mean(axis=1)
        s = y.std(axis=1,ddof=1)
        for i in range(m):
            y[i, :] = y[i, :] - e[i]
        y = y.cumsum(axis=1)
        mm = y.max(axis=1) - y.min(axis=1)
        return np.mean(mm / s)

    def divisors(n, n0=2):
        # 从n0到n/2 可等分的子区间长度
        n0n = np.array(range(n0, int(np.floor(n / 2)) + 1))
        dvs = n0n[n % n0n == 0]
        return dvs

    x = np.array(x)
    d = np.max([2, int(np.floor(d))])
    N = len(x)
    if (N % 2 != 0):
        x = np.append(x, (x[N - 2] + x[N - 1]) / 2)

    # 本段为了选取合适的N的长度的对应的等分区间  选取可等分区间最多的N
    N = len(x)
    dmin = d  # 此处的d为子区间最小的长度
    N0 = min([int(np.floor(0.99 * N)), N - 1])
    N1 = N0
    dv = divisors(N1, dmin)
    for i in range(N0 + 1, N + 1):
        dw = divisors(i, dmin)
        if (len(dw) > len(dv)):
            N1 = i
            dv = dw

    OptN = N1
    d = dv  # 此处的d为确定下来的子区间长度列表
    x = x[0:OptN]  # 取到OptN

    N = len(d)
    RSe =np.array([])
    ERS =np.array([])
    for i in range(N):
        RSe = np.append(RSe,rscalc(x, d[i]))
    for i in range(N):
        n = d[i]
        K = np.array(range(n-1,0,-1))/np.array(range(1,n))
        ratio = (n - 0.5) / n * sum(np.sqrt(K))
        if n > 340:
            ERS = np.append(ERS, ratio / np.sqrt(0.5 * np.pi * n))
        else:
            ERS = np.append(ERS, (math.gamma(0.5 * (n - 1)) * ratio) / (math.gamma(0.5 * n) * np.sqrt(np.pi)))

    ERSal = np.sqrt(0.5 * np.pi * d)
    Pal = np.polyfit(np.log10(d), np.log10(RSe - ERS + ERSal), 1)
    Hal = Pal[0]
    Hs = rssimple(x)
    return Hal


class DataEngine():
    def __init__(self,start_date,end_date,data_dir=r'D:\work\data\\',classify='Z_HS300'):
        self.start_date = start_date
        self.end_date = end_date
        self.data_dir = data_dir
        self.trade_date = pd.DataFrame([])
        self.daily_data = pd.DataFrame([])
        self.weekly_data = pd.DataFrame([])
        self.factor_data = pd.DataFrame([])#最后的因子数据  统一为周频
        self.factor_data_bond = pd.DataFrame([])  # 储存中证全债的因子值
        if os.path.exists(source_data_dir + 'daily_data_{}_{}.pkl'.format(self.start_date,self.end_date)):
            self.daily_data = pd.read_pickle(source_data_dir + 'daily_data_{}_{}.pkl'.format(self.start_date,self.end_date))
        else:
            self.get_daily_data()
        if os.path.exists(source_data_dir + 'weekly_data_{}_{}.pkl'.format(self.start_date,self.end_date)):
            self.weekly_data = pd.read_pickle(source_data_dir + 'weekly_data_{}_{}.pkl'.format(self.start_date,self.end_date))
        else:
            self.get_weekly_data()
        if os.path.exists(source_data_dir + 'trade_date_{}.pkl'.format(self.end_date)):
            self.trade_date = pd.read_pickle(source_data_dir + 'trade_date_{}.pkl'.format(self.end_date))
        else:
            self.get_daily_data()

    def get_daily_data(self):
        """下载日度数据 并保存为CSV"""

        error,wsd_data_hs300 = w.wsd("000300.SH", "close,dividendyield2,pe_ttm,free_float_shares,volume,AMT,tech_upnum,tech_downnum", self.start_date, self.end_date, "PriceAdj=F",usedf=True)#缺失填空值
        trade_dates_daily = wsd_data_hs300.iloc[:, 0]
        trade_dates_daily.index = pd.DatetimeIndex(trade_dates_daily.index)
        self.trade_date = trade_dates_daily
        trade_dates_daily.to_pickle(source_data_dir + 'trade_date_{}.pkl'.format(self.end_date))
        error, wsd_data_close_fill = w.wsd("IF.CFE,SHIBOR3M.IR,USDCNY.EX,891800.MI,990100.MI,AUFI.WI,B00.IPE,H11001.CSI,CCFI.WI,\
                                 USDX.FX,000001.SH", "close", self.start_date, self.end_date, "Fill=Previous;PriceAdj=F",usedf=True)
        #test = w.wsd("USDCNY.EX", "close", start_date, end_date, "Days=Alldays",usedf=True)

        error, edb_data = w.edb("M0060433,M0061614,M0062054,M0329503,M0329507,M0329498,M0043410,M0043413,M5207875,G8324465,G8324478,\
                                 M0020199,M0020276,M0075987,M0061606,M0061610,M5462036,M5528822,M5515072,M0010049,\
                                 M0009870,M5200002,M5207849,M0085865,M0000185,M0007501,M0044695,M0009969,M0009940,\
                                 M1001786,S0059744,S0059749,M0329500,M0013030,M5558005,M0010290, M0001428,S0059741\
                                 M0001707, M0043411, M0061518, M0010096, M0001385, M0000612, M0000545, M0009978, M0000273,\
                                 M0017142, S0059741, M0001693, S0029657, M0000613, M0001227, M0000610, M0000604, M0044700,\
                                 M5207655, M0000554, M0009973, M0009976, M0009974, M0009975, M0010131, M0001381, M0001383,\
                                 S0059776, M0041746, M0041747, M0041753, M0041739, M0001386, M0041742, M0041741, M0041740,\
                                 M0020192, M0020255, M0024559, M0010339, M0010359, M1004263, S0059771, M0041664, G0000886",self.start_date, self.end_date, usedf=True)#缺失填前值

        # 节省数据
        #error, edb_data = w.edb("M0060433", self.start_date, self.end_date, usedf=True) # 缺失填前值

        # #优矿涨跌家数数据
        # uqer_data = uqer.DataAPI.MktIdxFactorDateRangeGet(secID=u"",ticker=u"000300",beginDate=self.start_date.replace('-',''),endDate=self.end_date.replace('-',''),
        #                                                   field=u"tradeDate,UpPct,DownPct",pandas="1")
        # uqer_data.set_index('tradeDate',inplace=True)
        # uqer_data.index = [pd.to_datetime(x).date() for x in uqer_data.index]
        #error, test = w.edb("M0060433,M0061614,M0062054",start_date, end_date, usedf=True)#缺失填空值
        #error, test = w.edb("M0329503", start_date, end_date, usedf=True)  # 缺失填空值
        #edb_data_nofill = edb_data_nofill.fillna(0)
        daily_data = pd.concat([wsd_data_hs300,wsd_data_close_fill,edb_data],axis=1)
        daily_data.index = pd.to_datetime(daily_data.index)
        self.daily_data = daily_data
        daily_data.to_pickle(source_data_dir + 'daily_data_{}_{}.pkl'.format(self.start_date,self.end_date))


    def get_weekly_data(self):
        error, wsd_data_pctchg = w.wsd("801010.SI,801020.SI,801030.SI,801040.SI,801050.SI,801080.SI,801110.SI,801120.SI,\
                                  801130.SI,801140.SI,801150.SI,801160.SI,801170.SI,801180.SI,801200.SI,\
                                  801210.SI,801230.SI,801710.SI,801720.SI,801730.SI,801740.SI,801750.SI,\
                                  801760.SI,801770.SI,801780.SI,801790.SI,801880.SI,801890.SI", "pct_chg", self.start_date,
                                       self.end_date, "Period=W", usedf=True)
        weekly_data = wsd_data_pctchg#共计53周
        weekly_data.index = pd.to_datetime(weekly_data.index)
        #trade_dates_weekly = weekly_data.index
        self.weekly_data = weekly_data
        weekly_data.to_pickle(source_data_dir + 'weekly_data_{}_{}.pkl'.format(self.start_date, self.end_date))
        # weekly_data = pd.merge(daily_data, edb_data, how='outer')
        # weekly_data['date'] = pd.to_datetime(weekly_data['date'])
        # weekly_data = weekly_data.sort_values(by='date', axis=0, ascending=True)
        # weekly_data.index = weekly_data['date']
        # weekly_data = weekly_data.fillna(method='pad')
        #
        # trade_dates['date'] = pd.to_datetime(trade_dates['date'])
        # trade_dates_use = trade_dates[(trade_dates['date'] >= start_date) & (trade_dates['date'] <= end_date)]
        # trade_dates_use.index = trade_dates_use['date']
        # trade_dates_use['if'] = 0
        # trade_dates_use['if'].iloc[1:] = list(
        #     map(lambda x, y: 1 if x >= y else 0, trade_dates_use['week_day'].iloc[:-1],
        #         trade_dates_use['week_day'].iloc[1:]))
        # weekly_data = pd.merge(weekly_data, trade_dates_use, how='outer')
        # weekly_data = weekly_data.sort_values(by='date', axis=0, ascending=True)
        # weekly_data.index = weekly_data['date']
        # weekly_data = weekly_data.fillna(method='pad')
        # weekly_data = weekly_data[weekly_data['if'] == 1]


    """计算因子部分   将日期统一为每周的周五 部分周六周日更新的数据其实计算在内"""
    def get_Z_HS300(self):
        self.factor_data['Z_HS300'] = self.daily_data['CLOSE'].resample('W-FRI').last().pct_change()

    def get_CAPITALIN(self):
        keep = ['M0062054']
        data = self.daily_data[keep]
        CAPITALIN = data['M0062054'].resample('W-SUN').sum().resample('W-FRI',label='left').last()
        self.factor_data['ZJ_CAPITALIN'] = CAPITALIN


    def get_HKCapitalin(self):
        keep = ['M0329503','M0329498','M0329507','M0329500']
        data = self.daily_data[keep]
        data =data.fillna(method='ffill').fillna(0)
        data1 = data.diff()#每天的净流入流出
        data1['net_in_hu'] = data1['M0329498'] - data1['M0329503']#沪市每日净流入
        data1['net_in_shen'] = data1['M0329500'] - data1['M0329507']#深市每日净流入
        data1['net_in'] = data1['net_in_hu'] + data1['net_in_shen']#加总
        data1['net_in_m5'] = data1['net_in'].rolling(window=5,min_periods=1).mean()#净流入5日滚动平均
        HKCapitalin = data1['net_in_m5'].resample('W-FRI').last()
        self.factor_data['ZJ_HKCapitalin'] = HKCapitalin

    def get_NEWFUND(self):
        keep = ['M0060433']
        data = self.daily_data[keep]
        data =data.fillna(0)
        NEWFUND = data.cumsum().resample('W-FRI').last().diff()
        self.factor_data['ZJ_NEWFUND'] = NEWFUND

    def get_SAVE_MKTVL(self):
        keep=[  'M0043410',
                'M0043413',
                'M5207875',
                'G8324465',
                'G8324478'
                ]
        data = self.daily_data[keep]
        """此时时间戳为每月末   数据在次月的18日更新 开始神奇的时间转化！"""
        #x=data[['M0043410','M0043413']].to_period()#变为PeriodIndex 方便添加月份 废弃了 直接使用DatetimeIndex吧
        #x = data[['M0043410', 'M0043413']].dropna().shift(freq=DateOffset(months=1,days=18))#不完美 不一定是下个月末再位移18天
        x = data[['M0043410', 'M0043413']].resample('M').last().fillna(0) #startdate如果有缺失值填0
        x.index = x.index + MonthEnd(1) + Day(18)#时间戳位移
        y = x.asof(data.index) #找到按源index索引 最近一个不是NaN的值
        data[['M0043410', 'M0043413']] = y
        data =data.fillna(method='ffill').fillna(0)
        data['save_all'] = data[['M0043410','M0043413','M5207875']].sum(axis=1)
        data['market_value'] = data[['G8324465','G8324478']].sum(axis=1)
        data['SAVE_MKTVL'] = data['save_all']/data['market_value']
        #
        data.sort_index(inplace=True)
        MKTVL = data['SAVE_MKTVL'].resample('W-FRI').last().diff().fillna(0)
        self.factor_data['ZJ_SAVE_MKTVL'] = MKTVL; ## 近3个数值有变动。
        a = 0
        
    def get_Leverage_AMT(self):
        keep = ['M0020199',
                'M0020276',
                'M0075987'
        ]
        data = self.daily_data[keep]
        data = data.sort_index()
        data =data.fillna(method='ffill').fillna(0)
        data['amount'] = data[['M0020199','M0020276']].sum(axis=1)
        data['borrow'] = data['M0075987']
        data['borrow_m5'] = data['borrow'].rolling(window=5,min_periods=1).mean()
        data['amount_m5'] = data['amount'].rolling(window=5, min_periods=1).mean()
        data['pct'] = data['borrow_m5']/data['amount_m5']
        data.to_csv("d://get_Leverage_AMT.csv")
        Leverage_AMT = data['pct'].resample('W-FRI').last().diff()
        # Leverage_AMT.to_csv("d://get_Leverage_AMT2.csv")

        self.factor_data['QX_Leverage_AMT'] = Leverage_AMT

    def get_Basis(self):
        keep = ['IF.CFE','CLOSE']
        data = self.daily_data[keep]
        data['CLOSE'] =data['CLOSE'].fillna(method='ffill')
        data['Basis'] = data['CLOSE']/data['IF.CFE'] - 1
        Basis = data['Basis'].resample('W-FRI').last().fillna(0)
        # 【2020-01-20 王新博
        # 根据对比文件，增加了取相反数的处理】
        # self.factor_data['Basis'] = Basis
        self.factor_data['QX_Basis'] = - Basis  

    def get_HS300AMT(self):
        keep = ['AMT']
        data = self.daily_data[keep]
        HS300AMT = data['AMT'].resample('W-FRI').mean().fillna(method='ffill').map(np.log).diff()
        self.factor_data['QX_HS300AMT'] = HS300AMT

    def get_Leverage(self):
        keep = ['M0061606',
                'M0061610',
                'G8324465',
                'G8324478'
                ]
        data = self.daily_data[keep]
        data = data.fillna(method='ffill').fillna(0)
        data['融资余额'] = data['M0061606'] + data['M0061610']
        data['两市总市值'] = (data['G8324465'] + data['G8324478'])*10000#统一单位
        temp = pd.DataFrame()
        temp['融资余额'] = data['融资余额'].resample('W-FRI').last()
        temp['两市总市值_周平均'] =  data['两市总市值'].resample('W-FRI').mean()#周平均市值
        temp['比例'] = temp['融资余额']/temp['两市总市值_周平均']
        # temp.to_csv("d://get_Leverage.csv")

        Leverage = temp['比例'].diff()
        self.factor_data['QX_Leverage'] = Leverage

    def get_NEWINVSTOR(self):
        keep = ['M5558005']
        data = self.daily_data[keep]
        data = data.fillna(method='ffill').dropna()
        """先前停止更新的沪深合计：新增股票账户数 数据 可能需要填充"""
        if self.start_date<= '2015-04-30':
            try:
                error, account_add1 = w.edb("M0013030", '2005/7/8', '2015-04-10', usedf=True)  # 缺失填空值
                account_add1 = account_add1/10000
                account_add1.index = pd.to_datetime(account_add1.index)
                account_add1.columns=['M5558005']
                #辰轩根据回归求出的三个点数据
                account_add2 = pd.DataFrame({'M5558005':[116.71,152.13,103.03]},index= pd.to_datetime(['2015-04-17','2015-04-24','2015-04-30']))
                #拼接
                account_add = account_add1.append(account_add2)
            except:
                print('读取wind新增股票账户数数据失败 代码M0013030')
            #根据M0013030拼接数据  之后数据用M5558005
            data = account_add.append(data)
        """本来是下周二公布上周五的数据 把数据放在本周周五 需要shift"""
        NEWINVSTOR = data['M5558005'].resample('W-FRI').last().shift().map(np.log).diff().fillna(method='ffill').fillna(0)
        self.factor_data['QX_NEWINVSTOR'] = NEWINVSTOR

    def get_DVDM10YTRY(self):
        keep = ['DIVIDENDYIELD2', 'S0059749']
        data = self.daily_data[keep]
        data = data.fillna(method='ffill').fillna(0)
        data['DVDM10YTRY'] = data['DIVIDENDYIELD2'] - data['S0059749']
        DVDM10YTRY  = data['DVDM10YTRY'].resample('W-FRI').last().diff()
        self.factor_data['GZ_DVDM10YTRY'] = DVDM10YTRY

    def get_PELevel(self):
        keep = ['PE_TTM']
        data = self.daily_data[keep]
        data = data.sort_index()
        data = data.fillna(method='ffill').fillna(0)
        data['PELevel'] = 0
        for i in range(len(data)):
            data['PELevel'].iloc[i] = (data['PELevel'].iloc[:i + 1].rank(ascending=True, method='max').iloc[
                                              i] - 1) / (i + 1)
        PELevel = data['PELevel'].resample('W-FRI').last().diff()
        self.factor_data['GZ_PELevel'] = PELevel


    def get_SHIBOR3M(self):
        keep = ['SHIBOR3M.IR']
        data = self.daily_data[keep]
        data = data.dropna()
        data['SHIBOR3M'] = data['SHIBOR3M.IR'].rolling(window=5,min_periods=1, center=False).mean()
        SHIBOR3M = data['SHIBOR3M'].resample('W-FRI').last().fillna(method='ffill').diff()
        self.factor_data['HG_SHIBOR3M'] = SHIBOR3M

    def get_TEDSPRD(self):
        keep = ['SHIBOR3M.IR','S0059741']
        data = self.daily_data[keep]
        data = data.sort_index()
        data = data.fillna(method='ffill').fillna(0)
        data['TEDSPRD'] = data['SHIBOR3M.IR'] - data['S0059741']
        # data.to_csv("d://get_TEDSPRD.csv")
        self.factor_data['HG_TEDSPRD'] = data['TEDSPRD']
        
    def get_AUFI(self):
        keep = ['AUFI.WI']
        data = self.daily_data[keep]
        data = data.fillna(method='ffill').fillna(0)
        self.factor_data['HG_AUFI'] = data['AUFI.WI'].resample('W-FRI').last().map(np.log).diff()

    def get_ICEOIL(self):
        keep = ['B00.IPE']
        data = self.daily_data[keep]
        data = data.fillna(method='ffill').fillna(0)
        #将周日的数据放到周五的时间戳
        self.factor_data['HW_ICEOIL'] = data['B00.IPE'].resample('W-SUN').last().resample('W-FRI',label='left').last().map(np.log).diff()

    def get_CBOND(self):
        keep = ['H11001.CSI']
        data = self.daily_data[keep]
        data = data.fillna(method='ffill').fillna(0)
        self.factor_data['HG_CBOND'] = data['H11001.CSI'].resample('W-FRI').last().map(np.log).diff()


    def get_COMMEDITY(self):#
        keep = ['CCFI.WI']
        data = self.daily_data[keep]
        data = data.fillna(method='ffill').fillna(0)
        self.factor_data['HG_COMMEDITY'] = data['CCFI.WI'].resample('W-FRI').last().map(np.log).diff()

    def get_USDCNY(self):
        keep = ['USDCNY.EX']
        USDCNY = self.daily_data[keep]
        USDCNY = USDCNY.dropna()#删除非交易日
        USDCNY['USDCNY'] = USDCNY['USDCNY.EX'].rolling(window=5, min_periods=1, center=False).mean()
        self.factor_data['HW_USDCNY'] = USDCNY['USDCNY.EX'].resample('W-FRI').last().fillna(method='ffill').map(np.log).diff()

    def get_OMO(self):
        keep = ['M0061614',
                'M5462036',
                'M5528822',
                'M5515072'
                ]
        data = self.daily_data[keep]
        data['M0061614'] = data['M0061614'].fillna(0)
        data = data.fillna(method='ffill')
        OMO_origin = data.resample('W-SUN').last().resample('W-FRI',label='left').last()
        #
        OMO = OMO_origin.diff() #累计值变为当期值(存量数据变为流量数据)
        OMO['M0061614'] = data['M0061614'].resample('W-SUN').sum().resample('W-FRI',label='left').last() #货币净投放为当期值（流量数据加总）
        OMO_all = OMO.sum(axis=1).fillna(0)
        self.factor_data['HG_OMO'] = OMO_all

    def get_HOTMONEY(self):
        keep = ['M0010049',
                'M0009870',
                'M5200002',
                'M5207849'
                ]
        data = self.daily_data[keep]
        #data = data.fillna(method='ffill')
        # 【2020-01-20 王新博
        # 根据对比文件，将时间戳调整为上月】
        # temp = data.resample('M').last().fillna(0)
        temp = data.resample('M', label='left').last().fillna(0)
        '''M5207849公布最晚 每月25日公布数据 因子月更新 '''
        temp['外储变动'] = temp['M0010049'].diff()
        temp['投资差额'] = temp['M0009870'] - temp['M5200002']
        temp['银行结售汇差额'] = temp['M5207849']
        temp['HOTMONEY'] = temp['外储变动'] - temp['投资差额'] - temp['银行结售汇差额']
        temp['HOTMONEY_diff'] = temp['HOTMONEY'].diff()
        temp.index = temp.index + Day(25)
        #self.factor_data['HOTMONEY'] = temp['HOTMONEY_diff'].resample('W-THU',fill_method='ffill').resample('W-FRI',label='left').last()
        self.factor_data['HW_HOTMONEY'] = temp['HOTMONEY_diff'].resample('W-THU').last().fillna(0).resample('W-FRI',label='left').last()

    def get_TRUST(self):
        keep = ['M0085865'
                ]
        data = self.daily_data[keep]
        data = data.fillna(method='ffill').fillna(0)
        print(data.tail(20))
        temp = data.resample('M').last()
        print("Monthly")
        print(temp.tail(20))

        # TRUST = temp['M0085865'].diff().resample('W-THU',fill_method='ffill').resample('W-FRI',label='left').last()
        # #TRUST收益率数据可能更新不及时 最新一期可能会是0 我们需要将0替换成最新的可获得数据
        # TRUST = TRUST.replace(0,np.nan).fillna(method='ffill')
        # 【2020-01-20 王新博
        # 根据对比文件，将时间戳调整为下周】
        # TRUST = temp['M0085865'].diff().resample('W-THU').last().fillna(0).resample('W-FRI', label='left').last()
        TRUST = temp['M0085865'].diff().resample('W-THU').last().fillna(0).resample('W-FRI').last()
        # temp.to_csv("d://Trust.csv")
        self.factor_data['HG_TRUST'] = TRUST

    def get_XCHANGE(self):
        keep = ['M0010049']
        data = self.daily_data[keep]
        data = data.fillna(method='ffill').fillna(0)
        # 【2020-01-20 王新博
        # 根据对比文件，将时间戳向前调整一个月】
        # temp = data.resample('M').last()
        temp = data.resample('M', label='left').last()
        """每月7日公布上月数据 节假日顺延 如果周末+周中公布经商议还是放在上周的周五"""
        temp.index = temp.index + Day(7)
        #XCHANGE = temp['M0010049'].map(np.log).diff().resample('W-THU',fill_method='ffill').resample('W-FRI',label='left').last()
        XCHANGE = temp['M0010049'].map(np.log).diff().resample('W-THU').last().fillna(0).resample('W-FRI', label='left').last()
        self.factor_data['HW_XChange'] = XCHANGE
        
    def get_ASS_LBT(self):
        keep = ['M0007501', 'M0044695']
        data = self.daily_data[keep]
        """每月月末更新上一个月数据 需要月频shift"""
        data = data.resample('M').last().shift()
        data['ASS_LBT'] = data['M0044695'] / data['M0007501']
        # ASS_LBT = data['ASS_LBT'].diff().resample('W-THU',fill_method='ffill').resample('W-FRI',label='left').last()
        # ASS_LBT = ASS_LBT.replace(0,np.nan).fillna(method='ffill')
        ASS_LBT = data['ASS_LBT'].diff().resample('W-THU').last().resample('W-FRI', label='left').last().fillna(0)
        self.factor_data['HG_ASS_LBT'] = ASS_LBT

    def get_SAVE_LOAN(self):
        keep = ['M0009969',
                'M0009940'
                ]
        data = self.daily_data[keep]
        #data = data.fillna(method='ffill')
        data = data.resample('M').last()
        """10到15日公布上月数据新闻 这里取13日"""
        data.index = data.index + Day(13)
        data['SAVE_LOAN'] = data['M0009969'] / data['M0009940']
        # SAVE_LOAN = data['SAVE_LOAN'].diff().resample('W-THU',fill_method='ffill').resample('W-FRI',label='left').last()
        # SAVE_LOAN = SAVE_LOAN.replace(0,np.nan).fillna(method='ffill')
        SAVE_LOAN= data['SAVE_LOAN'].diff().resample('W-THU').last().resample('W-FRI',label='left').last().fillna(0)
        self.factor_data['HG_SAVE_LOAN'] = SAVE_LOAN

    def get_IRS1Y(self):
        keep = ['M1001786', 'S0059744']
        data = self.daily_data[keep]
        IRS1Y = data.fillna(method='ffill')
        IRS1Y['IRS1Y'] = IRS1Y['M1001786'] - IRS1Y['S0059744']
        self.factor_data['HW_IRS1Y'] = IRS1Y['IRS1Y'].resample('W-SUN').last().resample('W-FRI',label='left').last().diff()

    def get_USDCNY_USDX_Spread(self):
        keep = ['M0000185','USDX.FX']
        data = self.daily_data[keep]
        data = data.fillna(method='ffill')
        ret = data.resample('W-SUN').last().resample('W-FRI',label='left').last().applymap(np.log).diff()
        ret['USDCNY_USDX_Spread'] = ret['M0000185'] - ret['USDX.FX']
        self.factor_data['HW_USDCNYMUSDX'] = ret['USDCNY_USDX_Spread']

    def get_EPS_VL(self):
        keep = ['PE_TTM','AMT']
        data = self.daily_data[keep]
        data = data.dropna()#删除非交易日
        data['EPS_TTM'] = 1/data['PE_TTM']
        data['EPS_VL'] = data['EPS_TTM']/data['AMT']
        self.factor_data['QX_EPS_VL'] = data['EPS_VL'].resample('W-FRI').last().fillna(method='ffill')

    def get_upmdown(self):
        keep = ['TECH_UPNUM', 'TECH_DOWNNUM']
        data = self.daily_data[keep]
        data = np.round(data.dropna()).shift(1)#删除非交易日
        # 【uqer为不带%的百分比数据，故分母除以100
        # wind为股指成分涨跌家数，除以沪深300股票数量300
        # 2020-01-20 wangxinbo revised】
        # temp = data.rolling(window=5,min_periods=1).mean() / 100
        # data.to_csv("d://get_upmdown2.csv")
        temp = data.rolling(window=5,min_periods=1).mean() / 300
        temp['upmdown'] = temp['TECH_UPNUM'] - temp['TECH_DOWNNUM']
        print(temp)
        # temp.to_csv("d://get_upmdown.csv")
        temp2 = temp['upmdown'].resample('W-FRI').last().fillna(method='ffill')
        self.factor_data['QX_upmdown'] = temp2
        print(temp2)
        a = 0

    def get_Turnover(self):
        keep = ['FREE_FLOAT_SHARES','VOLUME']
        data = self.daily_data[keep]
        data = data.dropna()#删除非交易日
        tmp = data.rolling(window=20,min_periods=1).mean()
        tmp['turnover'] = tmp['VOLUME']/tmp['FREE_FLOAT_SHARES']
        tmp['turnover_level'] = 0
        for i in range(len(data)):
            tmp['turnover_level'].iloc[i] = (tmp['turnover'].iloc[:i + 1].rank(ascending=True, method='max').iloc[
                                              i] - 1) / (i + 1)
        self.factor_data['QX_Turnover'] = tmp['turnover_level'].resample('W-FRI').last().fillna(method='ffill')

    def get_MSCI_EM(self):
        keep = ['891800.MI']
        data = self.daily_data[keep]
        MSCI_EM = data.fillna(method='ffill').fillna(0)
        self.factor_data['HW_MSCI_EM'] = MSCI_EM['891800.MI'].resample('W-SUN').last().resample('W-FRI',label='left').last().map(np.log).diff()

    def get_MSCI_DM(self):
        keep = ['990100.MI']
        data = self.daily_data[keep]
        MSCI_DM = data.fillna(method='ffill').fillna(0)
        self.factor_data['HW_MSCI_DM'] = MSCI_DM['990100.MI'].resample('W-SUN').last().resample('W-FRI',label='left').last().map(np.log).diff()

    def get_Industries_Rotation(self):
        keep = ["801010.SI", "801020.SI", "801030.SI", "801040.SI", "801050.SI", "801080.SI", "801110.SI", "801120.SI",
                "801130.SI", "801140.SI", "801150.SI", "801160.SI", "801170.SI", "801180.SI", "801200.SI", "801210.SI",
                "801230.SI", "801710.SI", "801720.SI", "801730.SI", "801740.SI", "801750.SI", "801760.SI", "801770.SI",
                "801780.SI", "801790.SI", "801880.SI", "801890.SI"]
        Industries_Rotation = self.weekly_data[keep]
        # #求最近四周的标准差
        # result = pd.Series()
        # for x in range(Industries_Rotation.shape[0]):
        #     if x<3:
        #         result[Industries_Rotation.index[x]] = np.nan
        #     else:
        #         result[Industries_Rotation.index[x]] = np.reshape(np.array(Industries_Rotation.iloc[x-3:x+1]), (1, -1)).std()
        #当周各行业数据的标准差
        result = Industries_Rotation.std(axis=1)

        self.factor_data['QX_Industries_Rotation'] = result.resample('W-FRI').last().fillna(0)

    """该因子单独获取了数据，因用到历史所有指数的数据 """
    def get_cycle(self):
        if os.path.exists(source_data_dir + 'cycle_data_{}.pkl'.format(self.end_date)):
            raw_data = pd.read_pickle(source_data_dir + 'cycle_data_{}.pkl'.format(self.end_date))
        else:
            error,raw_data = w.wsd("000001.SH", "close", "1993-01-01", self.end_date, "Period=W",usedf=True)
            raw_data.columns=['上证综指']
            if len(raw_data) == 0:
                raise Exception('因子cycle获取数据失败！请检查')
            raw_data.index = pd.to_datetime(raw_data.index)
            raw_data.to_pickle(source_data_dir + 'cycle_data_{}.pkl'.format(self.end_date))
        res = func_cycle_bpMethod(raw_data)
        self.factor_data['HG_cycle'] = res.resample('W-FRI').last().fillna(method='ffill')


    """该因子单独获取了数据，因用到历史所有指数的数据"""
    def get_hurst(self):
        if os.path.exists(source_data_dir + 'hurst_data_{}.pkl'.format(self.end_date)):
            raw_data = pd.read_pickle(source_data_dir + 'hurst_data_{}.pkl'.format(self.end_date))
        else:
            error,raw_data = w.wsd("000300.SH", "close", "2002-01-01", self.end_date, "Period=W",usedf=True)
            if len(raw_data)==0:
                raise Exception('因子hurst获取数据失败！请检查')
            raw_data.index = pd.to_datetime(raw_data.index)
            raw_data.to_pickle(source_data_dir + 'hurst_data_{}.pkl'.format(self.end_date))
        lgIndexHurst = raw_data['CLOSE'].map(np.log).diff().dropna()
        gap = 51
        dd = 2
        lgHP = lgIndexHurst.rolling(window=gap).apply(hurstexp,kwargs={'d':dd})
        # 【2020-01-20 王新博
        # 根据对比文件，发现是原来函数的-0.5没有做运算，修改了函数（已解决）】
        # self.factor_data['hurst'] = lgHP.resample('W-FRI').last().fillna(0)-0.5
        factor = lgHP.resample('W-FRI').last().fillna(0)-0.5
        self.factor_data['QX_Hurst'] = factor
        
    def get_Rd(self):
        keep = ['M0010290']
        # data = self.daily_data[keep].dropna()
        data = self.daily_data[keep].fillna(method='ffill').dropna()
        # data = data.fillna(method='ffill')
        # data = data.resample('M').last()
        #self.factor_data['Rd'] = data['M0061518'].diff().resample('W-THU').ffill().resample('W-FRI',label='left').last()
        # self.factor_data['HG_Rd'] = data['M0010290'].diff().resample('W-THU').last().fillna(0).resample('W-FRI', label='left').last()
        self.factor_data['HG_Rd'] = data['M0010290'].diff().resample('W-FRI', label='left').last()

    def get_Consumption(self):
        keep = ['M0001428']
        data = self.daily_data[keep]
        #每年1 2月数据一起更新 因此1月末数据缺失 填前值 每月14-15日更新上一月数据 这里用14
        data = data.resample('M').last().fillna(method='ffill')
        data.index = data.index + Day(14)
        #self.factor_data['Consumption'] = data['M0001428'].diff().resample('W-THU').ffill().resample('W-FRI',label='left').last()
        self.factor_data['Consumption'] = data['M0001428'].diff().resample('W-THU').last().fillna(0).resample('W-FRI',label='left').last()
        
# ---------------------------------------------------------------------------
# 2020-01-14 王新博整理
        
    def get_H11001(self):
        keep = ['H11001.CSI']
        data = self.daily_data[keep]
        data = data.fillna(method='ffill').fillna(0).resample('M').last()
        factor = data.apply(np.log).diff()
        self.factor_data_bond['H11001'] = factor['H11001.CSI']
        
    def get_OMO_bond(self):
        keep = ['M0061614']
        data = self.daily_data[keep].fillna(0)
        factor = data.resample('M').last()
        self.factor_data_bond['OMO'] = factor
    
    def get_SLFMLFPSL(self):
        '''利率引导'''
        # 此三者为存量数据，做差分变为流量数据
        keep = ['M5462036', 'M5528822', 'M5515072']
        data = self.daily_data[keep]
        # 处理成月度数据
        data = data.fillna(method='ffill').resample('M').last()
        # 按照《因子文件2019Q2》要求，先求和再做差分
        factor = data.sum(axis=1).diff().fillna(0)
        self.factor_data_bond['SLFMLFPSL'] = factor
    
    def get_GOVT_leverage(self):
        '''政府杠杆变动率估算'''
        keep = ['M0001707', 'M0043411']
        data = self.daily_data[keep]
        data = data.fillna(method='ffill').resample('M').last()
        leve = (data['M0001707'] / data['M0043411'])
        factor = leve.apply(np.log).diff().fillna(0)
        self.factor_data_bond['GOVT_leverage'] = factor
        
    def get_DIFLN_FXreserve(self):
        '''外汇储备变动率'''
        keep = ['M0010049']
        data = self.daily_data[keep]
        data = data.fillna(method='ffill').resample('M').last()
        factor = data.apply(np.log).diff().fillna(0)
        self.factor_data_bond['DIFLN_FXreserve'] = factor
        
    def get_DIF_HOTMONEY(self):
        '''热钱估算变动'''
        keep = ['M0010049', 'M0009870', 'M5200002', 'M5207849']
        data = self.daily_data[keep]
        temp = data.resample('M').last().fillna(0)
        temp['外储变动'] = temp['M0010049'].diff()
        temp['投资差额'] = temp['M0009870'] - temp['M5200002']
        temp['银行结售汇差额'] = temp['M5207849']
        temp['HOTMONEY'] = temp['外储变动'] - temp['投资差额'] - temp['银行结售汇差额']
        factor = temp['HOTMONEY'].diff().fillna(0)
        self.factor_data_bond['DIF_HOTMONEY'] = factor
    
    def get_Rd_bond(self):
        keep = ['M0061518']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        factor = data['M0061518'].diff()
        self.factor_data_bond['Rd'] = factor
    
    def get_DIF_Rdd(self):
        keep = ['M0010096']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        factor = data['M0010096'].diff()
        self.factor_data_bond['DIF_Rdd'] = factor
    
    def get_lqdtysurplus(self):
        keep = ['M0001385', 'M0000612', 'M0000545']
        # 三个数据依次是M2, CPI, 工业增加值
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill')
        factor = (data['M0001385'] - data['M0000612'] - data['M0000545']).fillna(0)
        self.factor_data_bond['lqdtysurplus'] = factor
        
    def get_banksurplus(self):
        keep = ['M0009969',	'M0009978',	'M0000545',	'M0000273']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill')
        loan_r = data['M0009969'] / (data['M0009969'] + data['M0009978'])
        entity_gro_r = (data['M0000545'] + data['M0000273']) / 100
        factor = (1 - loan_r - entity_gro_r).fillna(0)
        self.factor_data_bond['banksurplus'] = factor
        
    def get_save_loan(self):
        keep = ['M0009969',	'M0009978']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill')
        loan_r = data['M0009969'] / (data['M0009969'] + data['M0009978'])
        factor = loan_r.diff().fillna(0)
        self.factor_data_bond['save_loan'] = factor
        
    def get_TEDSPRD_bond(self):
        keep = ['M0017142', 'S0059741']
        data = self.daily_data[keep]
        data = data.fillna(method='ffill').fillna(0)
        shibor_3m = data['M0017142'].resample('M').mean()
        nation_debt_3m = data['S0059741'].resample('M').mean()
        factor = shibor_3m - nation_debt_3m
        self.factor_data_bond['TEDSPRD'] = factor
    
    def get_DIFLN_invstableCPT(self):
        keep = ['M0009978', 'M0001693']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        factor = (data['M0009978'] + data['M0001693']).apply(np.log).diff()
        self.factor_data_bond['DIFLN_invstableCPT'] = factor
        
    def get_DIF_indAV(self):
        keep = ['M0000545']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        factor = data.diff()
        self.factor_data_bond['DIF_indAV'] = factor
    
    def get_DIF_realestate(self):
        keep = ['S0029657']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        factor = data.diff()
        self.factor_data_bond['DIF_realestate'] = factor
        
    def get_accfixinvs(self):
        keep = ['M0000273']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        factor = data.diff()
        self.factor_data_bond['accfixinvs'] = factor
        
    def get_CAPITALINCPImPPI(self):
        keep = ['M0000613', 'M0001227']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        factor = data['M0000613'] - data['M0001227']
        self.factor_data_bond['CPImPPI'] = factor
        
    def get_DIFLN_USDCHN(self):
        keep = ['M0000185']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        factor = data.apply(np.log).diff()
        self.factor_data_bond['DIFLN_USDCHN'] = factor
        
    def get_EXIM(self):
        keep = ['M0000610', 'M0000604']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        factor = data['M0000610'] / data['M0000604']
        self.factor_data_bond['EXIM'] = factor
    
    def get_DIF_Liability(self):
        keep = ['M0044700']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        factor = data.diff()
        self.factor_data_bond['DIF_Liability'] = factor
        
    def get_DIF_Profmargin(self):
        keep = ['M5207655']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        factor = data.diff()
        self.factor_data_bond['DIF_Profmargin'] = factor
        
    def get_DIF_ATT(self):
        keep = ['M0000554', 'M0007501']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        # 计算主营业务收入的环比增加
        L_oper_income = data['M0000554'].shift(1)
        temp = (data['M0000554'] / L_oper_income - 1) * 100
        factor = (temp / data['M0007501']).diff()
        self.factor_data_bond['DIF_ATT'] = factor
    
    def get_newloan(self):
        keep = ['M0009973']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        factor = data.diff()
        self.factor_data_bond['newloan'] = factor
    
    def get_newloan_resid_prop(self):
        keep = ['M0009976', 'M0009973']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        factor = data['M0009976'] / data['M0009973']
        self.factor_data_bond['newloan_resid_prop'] = factor
        
    def get_newloan_shortprop(self):
        keep = ['M0009974', 'M0009973']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        factor = data['M0009974'] / data['M0009973']
        self.factor_data_bond['newloan_shortprop'] = factor
        
    def get_newloan_longprop(self):
        keep = ['M0009975', 'M0009973']
        data = self.daily_data[keep]
        # 【2020-01-20 王新博
        # 根据对比文件，将时间戳向后推一个月】
        # data = data.resample('M').last().fillna(method='ffill').fillna(0)
        data = data.resample('M').last().shift(1).fillna(method='ffill').fillna(0)
        factor = data['M0009975'] / data['M0009973']
        self.factor_data_bond['newloan_longprop'] = factor
        
    def get_DIFLN_multiplier(self):
        keep = ['M0010131']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        factor = data.apply(np.log).diff()
        self.factor_data_bond['DIFLN_multiplier'] = factor
    
    # 对齐处理不知道是不是这样做的
    def get_M0(self):
        keep = ['M0001381']
        data = self.daily_data[keep]
        factor = data.resample('M').last().fillna(method='ffill').fillna(0)
        self.factor_data_bond['M0'] = factor
    
    def get_M2mM1(self):
        keep = ['M0001385', 'M0001383']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        factor = data['M0001385'] - data['M0001383']
        self.factor_data_bond['M2mM1'] = factor
        
    def get_timespread(self):
        keep = ['S0059749','S0059744']
        data = self.daily_data[keep]      
        data = data.fillna(method='ffill').fillna(0)
        nation_debt_10Y = data['S0059749'].resample('M').mean()
        nation_debt_1Y = data['S0059744'].resample('M').mean()
        factor = nation_debt_10Y - nation_debt_1Y
        self.factor_data_bond['timespread'] = factor
    
    def get_creditspd10Y(self):
        keep = ['S0059776', 'S0059749']
        data = self.daily_data[keep]      
        data = data.fillna(method='ffill').fillna(0)
        firm_debt_10Y = data['S0059776'].resample('M').mean()
        nation_debt_10Y = data['S0059749'].resample('M').mean()
        factor = firm_debt_10Y - nation_debt_10Y
        # 将结果在2006年三月之前（不包括三月）的数据重置为0
        factor[factor.index < '2006-03-01'] = 0
        self.factor_data_bond['creditspd10Y'] = factor
    
    def get_Bondleverage(self):
        keep = ['M0041746', 'M0041747', 'M0041753']
        data = self.daily_data[keep] 
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        # 将 M0041746 在2005年之前的数据用 M0041747*1.05 拼接
        data['M0041746'][data.index < '2015'] \
            = data['M0041747'][data.index < '2015'] * 1.05
        factor = data['M0041746'] / (data['M0041746'] - data['M0041753'])
        self.factor_data_bond['Bondleverage'] = factor
    
    def get_MMleverage(self):
        keep = ['M0041739', 'M0010096', 'M0009940', 'M0001386']
        data = self.daily_data[keep].fillna(method='ffill').fillna(0)
        daily = data['M0041739'].resample('M').mean()
        monthly = data[['M0010096', 'M0009940', 'M0001386']].resample('M').last()
        factor = (daily + monthly['M0010096']/100 * monthly['M0009940'] \
                  - monthly['M0001386']) / (monthly['M0010096']/100 \
                  * monthly['M0009940'] - monthly['M0001386'])
        self.factor_data_bond['MMleverage'] = factor
	
    def get_Bond_Stock(self):
        keep = ['M0041742', 'M0041741', 'M0041740', 'M0041739', \
                'M0020192', 'M0020255']
        data = self.daily_data[keep].fillna(method='ffill').fillna(0)
        data = data.resample('M').mean()
        bond_volume = data[['M0041742', 'M0041741', 'M0041740', 'M0041739']].sum(axis=1)
        stock_volume = data[['M0020192', 'M0020255']].sum(axis=1)
        factor = bond_volume / stock_volume
        self.factor_data_bond['Bond_Stock'] = factor
    
    def get_Bond_Future(self):
        keep = ['M0041742', 'M0041741', 'M0041740', 'M0041739', \
                'M0024559']
        data = self.daily_data[keep].fillna(method='ffill').fillna(0)
        bond = data[['M0041742', 'M0041741', 'M0041740', 'M0041739']].resample('M').mean()
        bond_volume = bond.sum(axis=1)
        future_volume = data['M0024559'].resample('M').last()
        factor = bond_volume / future_volume
        self.factor_data_bond['Bond_Future'] = factor
        
    def get_DIFLN_MKT_STKBD(self):
        keep = ['M0041746', 'M0041747', 'M0010339', 'M0010359']
        data = self.daily_data[keep]
        data = data.resample('M').last().fillna(method='ffill').fillna(0)
        data['M0041746'][data.index < '2015'] \
            = data['M0041747'][data.index < '2015'] * 1.05
        stock_mv = data[['M0010339', 'M0010359']].sum(axis=1)
        factor = (data['M0041746'] / stock_mv).apply(np.log).diff()
        self.factor_data_bond['DIFLN_MKT_STKBD'] = factor
    
    def get_DIF_GKMargin(self):
        keep = ['M1004263', 'S0059744']
        data = self.daily_data[keep].fillna(method='ffill').fillna(0)
        data = data.resample('M').mean()
        temp = data['M1004263'] / data['S0059744'] - 1
        factor = temp.diff()
        self.factor_data_bond['DIF_GKMargin'] = factor
        
    def get_DIF_3AMargin(self):
        keep = ['S0059771', 'S0059744']
        data = self.daily_data[keep].fillna(method='ffill').fillna(0)
        data = data.resample('M').mean()
        temp = data['S0059771'] / data['S0059744'] - 1
        factor = temp.diff()
        self.factor_data_bond['DIF_3AMargin'] = factor
    
    def get_DIF_7repo(self):
        keep = ['M0041664']
        data = self.daily_data[keep].fillna(method='ffill').fillna(0)
        data = data.resample('M').last()
        factor = data.diff()
        self.factor_data_bond['DIF_7repo'] = factor
	
    def get_7repoSD(self):
        keep = ['M0041664']
        data = self.daily_data[keep].fillna(method='ffill').fillna(0)
        temp = data.rolling(window=22, min_periods=1).std()
        factor = temp.resample('M').last()
        self.factor_data_bond['7repoSD'] = factor
	
    def get_DIF_CHN_US_IRS1Y(self):
        keep = ['S0059744', 'G0000886']
        data = self.daily_data[keep].fillna(method='ffill').fillna(0)
        data = data.resample('M').mean()
        temp = data['S0059744'] - data['G0000886']
        factor = temp.diff()
        self.factor_data_bond['DIF_CHN_US_IRS1Y'] = factor
    
    def get_cycle_bond(self):
        # 从沪深300的cycle计算中copy过来的
        if os.path.exists(source_data_dir + 'cycle_data_{}.pkl'.format(self.end_date)):
            raw_data = pd.read_pickle(source_data_dir + 'cycle_data_{}.pkl'.format(self.end_date))
        else:
            error,raw_data = w.wsd("000001.SH", "close", "1993-01-01", self.end_date, "Period=W",usedf=True)
            raw_data.columns=['上证综指']
            if len(raw_data) == 0:
                raise Exception('因子cycle获取数据失败！请检查')
            raw_data.index = pd.to_datetime(raw_data.index)
            raw_data.to_pickle(source_data_dir + 'cycle_data_{}.pkl'.format(self.end_date))
        # res = func_cycle(raw_data,end_time=self.end_date)
        res = func_cycle_bpMethod(raw_data)
        self.factor_data_bond['cycle'] = res.resample('M').last().fillna(method='ffill')
        
    def get_hurst_bond(self):
        # 从沪深300的hurst计算中copy过来的
        if os.path.exists(source_data_dir + 'hurst_data_{}.pkl'.format(self.end_date)):
            raw_data = pd.read_pickle(source_data_dir + 'hurst_data_{}.pkl'.format(self.end_date))
        else:
            error,raw_data = w.wsd("000300.SH", "close", "2002-01-01", self.end_date, "Period=W",usedf=True)
            if len(raw_data)==0:
                raise Exception('因子hurst获取数据失败！请检查')
            raw_data.index = pd.to_datetime(raw_data.index)
            raw_data.to_pickle(source_data_dir + 'hurst_data_{}.pkl'.format(self.end_date))
        lgIndexHurst = raw_data['CLOSE'].map(np.log).diff().dropna()
        gap = 51
        dd = 2
        lgHP = lgIndexHurst.rolling(window=gap).apply(hurstexp,kwargs={'d':dd})
        # 【2020-01-20 王新博
        # 根据对比文件，去掉了最后的-0.5】
        # self.factor_data_bond['hurst'] = lgHP.resample('M').last().fillna(0)-0.5
        self.factor_data_bond['hurst'] = lgHP.resample('M').last().fillna(0)
    
        
    def update_factor_data(self):

        # self.get_Z_HS300()
        # self.get_CAPITALIN()
        # self.get_HKCapitalin()
        # self.get_NEWFUND()
        # self.get_SAVE_MKTVL()
        # self.get_Leverage_AMT()
        # self.get_Basis()
        # self.get_HS300AMT()
        # self.get_Leverage()
        # # self.get_NEWINVSTOR()  2020-02-21停用
        # self.get_DVDM10YTRY()
        self.get_PELevel()
        self.get_SHIBOR3M()
        self.get_TEDSPRD()
        self.get_AUFI()
        self.get_ICEOIL()
        self.get_CBOND()
        self.get_COMMEDITY()
        self.get_USDCNY()
        self.get_OMO()
        self.get_HOTMONEY()
        self.get_TRUST()
        self.get_XCHANGE()
        self.get_ASS_LBT()
        self.get_SAVE_LOAN()
        # self.get_IRS1Y()
        self.get_USDCNY_USDX_Spread()
        self.get_EPS_VL()
        self.get_upmdown()
        self.get_Turnover()
        self.get_MSCI_EM()
        self.get_MSCI_DM()
        self.get_Industries_Rotation()
        self.get_cycle()
        self.get_hurst()
        # self.get_Rd()
        # self.get_Consumption()  2020-02-21停用
        
        # 将不在交易周的数据剔除
        ##check is none。修改set为list modify by zmj 20200502
        
#         temp_check = set(pd.DatetimeIndex(self.trade_date.index).year * 100 + pd.DatetimeIndex(self.trade_date.index).week)
#         check = temp_check.add(201501)  # 2015年2月这天的日期被误删除了，提前加入

        check = set(pd.DatetimeIndex(self.trade_date.index).year * 100 + pd.DatetimeIndex(self.trade_date.index).week)
        check.add(201501)  # 2015年2月这天的日期被误删除了，提前加入

#         temp_check = list(pd.DatetimeIndex(self.trade_date.index).year * 100 + pd.DatetimeIndex(self.trade_date.index).week)
#         check = temp_check.append(201501)  # 2015年2月这天的日期被误删除了，提前加入
#         
        for i in range(567):
            if i == self.factor_data.shape[0]:
                    break
            if (self.factor_data.index[i].year * 100 + self.factor_data.index[i].week) not in check:
                self.factor_data.drop(self.factor_data.index[i], inplace=True)
                i = i - 1

        return self.factor_data

    def update_factor_data_bond(self):
        self.get_H11001()
        self.get_OMO_bond()
        self.get_SLFMLFPSL()
        self.get_GOVT_leverage()
        self.get_DIFLN_FXreserve()
        self.get_DIF_HOTMONEY()
        self.get_Rd_bond()
        self.get_DIF_Rdd()
        self.get_lqdtysurplus()
        self.get_banksurplus()
        self.get_save_loan()
        self.get_TEDSPRD_bond()
        self.get_DIFLN_invstableCPT()
        self.get_DIF_indAV()
        self.get_DIF_realestate()
        self.get_accfixinvs()
        self.get_CAPITALINCPImPPI()
        self.get_DIFLN_USDCHN()
        self.get_EXIM()
        self.get_DIF_Liability()
        self.get_DIF_Profmargin()
        self.get_DIF_ATT()
        self.get_newloan()
        self.get_newloan_resid_prop()
        self.get_newloan_shortprop()
        self.get_newloan_longprop()
        self.get_DIFLN_multiplier()
        self.get_M0()
        self.get_M2mM1()
        self.get_timespread()
        self.get_creditspd10Y()
        self.get_Bondleverage()
        self.get_MMleverage()
        self.get_Bond_Stock()
        self.get_Bond_Future()
        self.get_DIFLN_MKT_STKBD()
        self.get_DIF_GKMargin()
        self.get_DIF_3AMargin()
        self.get_DIF_7repo()
        self.get_7repoSD()
        self.get_DIF_CHN_US_IRS1Y()
        self.get_cycle_bond()
        self.get_hurst_bond()
        return self.factor_data_bond


import dataframeUtils as dataframeUtils
##获取沪深300因子
def get_hs300_sd():
    pass
    ##r'FCST_HS300_zd_'+today+'.csv'
    today = time.strftime("%Y-%m-%d", time.localtime())
    file_name="factor_data_hs300_sd.csv"
    dataDF = dataframeUtils.getDataframe(file_name)
    
    tempDF = dataframeUtils.df_pivot(dataDF,pivotJson=None)
    cols=["ZJ_ISPO","GZ_PEG","GZ_Erating","HW_IRS1Y","GZ_EROE","QX_FUNDPROP","GZ_EPB","QX_Investor_Confi","QX_Invstor_Confi_Global","QX_Invstor_Confi_GZ","HG_Rd"]
    tempDF['GZ_Erating']=''
    tempDF=tempDF.ix[:,cols]
    
    ##保存数据
#     tempDF.to_csv("factor_hs300_sd.csv")
    source_file=source_data_dir + r'FCST_HS300_sd_'+today+'.csv'
    tempDF.to_csv(source_file)
    print("source_file:",source_file)

#hs300自动代码
def get_hs300_zd(start_date,end_date): 
    pass
    today = time.strftime("%Y-%m-%d", time.localtime())

    de = DataEngine(start_date,end_date)
    # factor_data_bond = de.update_factor_data_bond()
    # factor_data_bond.to_csv(r'D:\work\fof\存档\factor_data_bond.csv')
    factor_data = de.update_factor_data()
      
    source_file=source_data_dir + r'FCST_HS300_zd_'+today+'.csv'
    factor_data.to_csv(source_file)
    print("source_file:",source_file)       

import time
if __name__ == "__main__":
    start_date = "2020-1-1"
    end_date = "2020-08-09"
    
    get_hs300_zd(start_date,end_date)
    
#   get_hs300_sd();