import datetime
import Core.Gadget as Gadget
import Core.JDMySQLDB as JDMySQLDB
from pandas.tseries.offsets import Day, MonthEnd
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import math
from WindPy import w
# ---本例介绍如何操作系统层因子---

# 从数据库读取因子



def WSD_Quote(symbols, fields, datetime1, datetime2, param=""):

    fields = [fields] #要取得指标
    #
    if not isinstance(symbols, list):
        symbols = [symbols]

    asset_number = len(symbols)
    #
    if datetime2 == None:
        datetime2 = datetime1
    #
    data = w.wsd(symbols, fields, datetime1, datetime2, param)
    data_raw = []
    for i in range(len(data.Times)):
        entry = []
        for j in range(asset_number):
            entry.append(data.Data[j][i])
        data_raw.append(entry)

    df = pd.DataFrame(data=data_raw, index=data.Times, columns=fields)
    df.index = pd.to_datetime(df.index)
    # print(df)
    return df

def LoadFactor(database,
               databaseName, # 库名
               tableName, # 表名
               factorName, # 因子名
               datetime1=None, datetime2=None): # 要读取的时间区间
    #
    filter = {"Name": factorName}
    if datetime1 != None and datetime2 != None:
        filter["Date"] = {">=": datetime1, "<=": datetime2}
    #
    data = database.Find(databaseName, tableName, filter, sort=[("Date", 1)])
    df = Gadget.DocumentsToDataFrame(data)
    df = df[["date", 'value']]

    # Process
    df['date'] = pd.to_datetime(df['date'])
    df.rename(columns={"value": factorName}, inplace=True)
    #
    return df




# 利用Wind EDB 函数下载宏观数据
def EDB(name,  # 返回dataframe时，该数据自定义的字段名，比如 CPI_YoY, GDP_YoY
        code,  # wind code，比如 'M0000612'
        datetime1, datetime2,  # 索取数据的时间区间
        dateAsIndex=True):  # 返回的dataframe是否以Date（Report_Date）作为索引
    #
    if datetime1 == None:
        datetime1 = datetime.datetime(2000, 1, 1)
    #
    mydata = w.edb(code, datetime1, datetime2, 'Fill=Previous')
    mydata_df = pd.DataFrame(data=mydata.Data[0], columns=[name], index=mydata.Times)
    # mydata_df.columns = [name]

    # 如果Date是普通列，不是Index
    if not dateAsIndex:
        mydata_df.reset_index(inplace=True)
        mydata_df.rename(columns={"index": "Report_Date"}, inplace=True)

    #print(mydata_df)
    mydata_df.index = pd.to_datetime(mydata_df.index)
    return mydata_df


# 补充发布日
# lag_release_month: 相对于Date中的月份延后n个月
# release_day：发布日
# 比如 5月CPI的发布日是6月10日，两个参数分别是1和10
def Fill_ReleaseDate(df, lag_release_month=1, release_day=1):
    #
    def AddReleaseDate(row, lag_release_month, release_day, datetime_field):
        report_date = row[datetime_field]
        cur_month = report_date.month
        #
        if cur_month == 12:
            year = report_date.year + 1
            month = 1
        else:
            year = report_date.year
            month = report_date.month + lag_release_month
        #
        release_date = datetime.datetime(year, month, release_day)
        return release_date
    #
    df["Release_Date"] = df.apply(lambda x: AddReleaseDate(x, lag_release_month, release_day, "Report_Date"), axis=1)
    # print(df)


def Sava_Systmetic_Factor_To_Database(database, df, savedName, fieldName=""):
    Sava_Systmetic_To_Database(database, df, savedName, fieldName, tableName="a_sys_factor") # 向测试表中填写内容


def Sava_Systmetic_Raw_To_Database(database, df, savedName, fieldName=""):
    Sava_Systmetic_To_Database(database, df, savedName, fieldName, tableName="a_sys_factor_raw") # 向测试表中填写内容


# 将Dataframe存入数据表
def Sava_Systmetic_To_Database(database,
                               df,  # 要存入的Dataframe
                               savedName,  # 要以何种名字存在数据库中，对应数据表中“name”字段
                               fieldName="",   # 要存储Dataframe的哪列数据，对应dataframe的column名字
                               tableName=""):  # 要存入那张数据表，一般是 A_sys_raw 或者 A_sys_factor
    #
    if fieldName == "":
        fieldName = savedName
    #
    newDocuments = []
    for index, row in df.iterrows():
        value = row[fieldName]
        reportDate = row["Report_Date"]
        releaseDate = row["Release_Date"]
        #
        newDocument = {}
        newDocument["Name"] = savedName


        if str(value)=='NaT':
            value = None
        elif str(value)=='NaN' :
            value = None
        else:
            value = float(value)

        newDocument["Value"] = value
        newDocument["Report_Date"] = reportDate
        newDocument["Release_Date"] = releaseDate
        newDocument["Date"] = releaseDate
        newDocument["DateTime"] = releaseDate
        newDocument["Key2"] = savedName + "_" + Gadget.ToDateString(reportDate)
        newDocuments.append(newDocument)
    #
    database.Upsert_Many("Factor", tableName, [], newDocuments)
    #print("Saved Systematic Data", savedName, "#", len(newDocuments))






def Download_PriceLevel_fnotday(database,code,lag_release_month,release_day, datetime1, datetime2):
    # 请求Wind数据
    df_cpi = EDB(code, code, datetime1, datetime2, dateAsIndex=False)
    # 补充发布日期
    Fill_ReleaseDate(df_cpi, lag_release_month, release_day)
    # 储存原始数据
    Sava_Systmetic_Raw_To_Database(database, df_cpi, savedName=code, fieldName=code)
    # 储存为因子数据
    #Sava_Systmetic_Factor_To_Database(database, df_cpi, savedName='CPI_YoY')


def Download_PriceLevel_fday(database, code,datetime1, datetime2):
        # 请求Wind数据
        df_cpi = EDB(code, code, datetime1, datetime2, dateAsIndex=False)
        # 补充发布日期
        #Fill_ReleaseDate(df_cpi, lag_release_month=1, release_day=1)
        # 储存原始数据
        df_cpi['Release_Date']=df_cpi['Report_Date']
        Sava_Systmetic_Raw_To_Database(database, df_cpi, savedName=code, fieldName=code)




def Query_Data(database,code,datetime1,datetime2):
    # 以下语句相当于
    # Select * From factor.cap where symbol = "000001.SZ" and date >= 20200101 and date <= 20200201 order by date desc

    # 构造筛选条件
    filter = []
    filter.append(["name", code])
    filter.append(["report_date", ">=", datetime1])
    filter.append(["report_date", "<=", datetime2])

    # 构造排序条件
    sort = [("date", 1)] # 因为可以多列排序，所以采用的是List结构，-1是降序

    #
    documents = database.Find(databaseName="factor",  # 库名
                              tableName="a_sys_factor_raw",  # 表名
                              filter=filter,  # 筛选条件
                              sort=sort)  # 排序
    documents= Gadget.DocumentsToDataFrame(documents)
    documents=documents[['report_date','value']]
    documents.set_index('report_date',inplace=True)
    documents.columns=[code]
    documents.index = pd.to_datetime(documents.index)
    return documents
    #for item in documents:
    #    print(item)



#############################################################################################################
# 开始计算因子
# Calc_CAPITALIN
def get_CAPITALIN(database,datetime1,datetime2):

    data = Query_Data(database, 'M0062054', datetime1, datetime2)
    CAPITALIN = data['M0062054'].resample('W-FRI').sum().resample('W-FRI',label='left').last()
    CAPITALIN=pd.DataFrame(CAPITALIN)
    CAPITALIN['Report_Date'] = CAPITALIN.index
    Fill_ReleaseDate(CAPITALIN, lag_release_month=1, release_day=10)
    print(CAPITALIN)
    Sava_Systmetic_Factor_To_Database(database, CAPITALIN, savedName='CAPITALIN', fieldName='M0062054')


def get_HKCapitalin(database,datetime1,datetime2):

    df1 = Query_Data(database, 'M0329503', datetime1, datetime2)  # 港股通沪市 每日流入（南下）
    df2 = Query_Data(database, 'M0329498', datetime1, datetime2)  # 沪港通 每日流入（北上）
    df3 = Query_Data(database, 'M0329507', datetime1, datetime2)
    df4 = Query_Data(database, 'M0329500', datetime1, datetime2)
    data = pd.concat([df1, df2,df3,df4], axis=1)
    data =data.fillna(method='ffill').fillna(0)
    data1 = data.diff()#每天的净流入流出
    data1['net_in_hu'] = data1['M0329498'] - data1['M0329503'] # 沪市每日净流入
    data1['net_in_shen'] = data1['M0329500'] - data1['M0329507'] # 深市每日净流入
    data1['net_in'] = data1['net_in_hu'] + data1['net_in_shen']  # 加总
    data1['net_in_m5'] = data1['net_in'].rolling(window=5,min_periods=1).mean() # 净流入5日滚动平均
    HKCapitalin = data1['net_in_m5'].resample('W-FRI').last()
    HKCapitalin = pd.DataFrame(HKCapitalin)
    HKCapitalin.columns = ['HKCapitalin']
    HKCapitalin['Report_Date'] = HKCapitalin.index
    Fill_ReleaseDate(HKCapitalin, lag_release_month=1, release_day=10)
    print(HKCapitalin)
    Sava_Systmetic_Factor_To_Database(database, HKCapitalin, savedName='HKCapitalin', fieldName='HKCapitalin')


# 迁移
def get_SAVE_MKTVL(database,datetime1,datetime2):

    df1 = Query_Data(database, 'M0043410', datetime1, datetime2)
    df2 = Query_Data(database, 'M0043413', datetime1, datetime2)
    df3 = Query_Data(database, 'M5207875', datetime1, datetime2)
    df4 = Query_Data(database, 'G8324465', datetime1, datetime2)
    df5 = Query_Data(database, 'G8324478', datetime1, datetime2)
    data = pd.concat([df1, df2, df3, df4,df5], axis=1)
    """此时时间戳为每月末   数据在次月的18日更新 开始神奇的时间转化！"""
    x = data[['M0043410', 'M0043413']].resample('M').last().fillna(0) #startdate如果有缺失值填0
    x.index = x.index + MonthEnd(1) + Day(18) #时间戳位移
    y = x.asof(data.index) #找到按源index索引 最近一个不是NaN的值
    data[['M0043410', 'M0043413']] = y
    data = data.fillna(method='ffill').fillna(0)
    data['save_all'] = data[['M0043410','M0043413','M5207875']].sum(axis=1)
    data['market_value'] = data[['G8324465','G8324478']].sum(axis=1)
    data['SAVE_MKTVL'] = data['save_all']/data['market_value']
    MKTVL = data['SAVE_MKTVL'].resample('W-FRI').last().diff().fillna(0)
    ## 近3个数值有变动。
    MKTVL = pd.DataFrame( MKTVL)
    MKTVL.columns = ['MKTVL']
    MKTVL['Report_Date'] =  MKTVL.index
    Fill_ReleaseDate( MKTVL, lag_release_month=1, release_day=1)
    #print( MKTVL)
    Sava_Systmetic_Factor_To_Database(database, MKTVL, savedName='MKTVL', fieldName='MKTVL')


def get_Leverage_AMT(database,datetime1,datetime2):

    df1 = Query_Data(database, 'M0020199', datetime1, datetime2)  # 上海股成交金额
    df2 = Query_Data(database, 'M0020276', datetime1, datetime2)  # 深圳成交金额
    df3 = Query_Data(database, 'M0075987', datetime1, datetime2)  # 融资买入额
    data = pd.concat([df1, df2, df3], axis=1)
    data =data.fillna(method='ffill').fillna(0)
    data['amount'] = data[['M0020199','M0020276']].sum(axis=1)
    data['borrow'] = data['M0075987']
    data['borrow_m5'] = data['borrow'].rolling(window=5,min_periods=1).mean()
    data['amount_m5'] = data['amount'].rolling(window=5, min_periods=1).mean()
    data['pct'] = data['borrow_m5']/data['amount_m5']
    Leverage_AMT = data['pct'].resample('W-FRI').last().diff()
    Leverage_AMT = pd.DataFrame(Leverage_AMT)
    Leverage_AMT.columns = ['Leverage_AMT']
    Leverage_AMT['Report_Date'] = Leverage_AMT.index
    Fill_ReleaseDate(Leverage_AMT, lag_release_month=1, release_day=1)
    print(Leverage_AMT)
    Sava_Systmetic_Factor_To_Database(database, Leverage_AMT, savedName='Leverage_AMT', fieldName='Leverage_AMT')


def get_HS300AMT(database,datetime1,datetime2):

    data = WSD_Quote('000300.SH','AMT', datetime1, datetime2, param="")
    HS300AMT = data['AMT'].resample('W-FRI').mean().fillna(method='ffill').map(np.log).diff()
    HS300AMT = pd.DataFrame(HS300AMT)
    HS300AMT.columns = ['HS300AMT']
    HS300AMT['Report_Date'] = HS300AMT.index
    Fill_ReleaseDate(HS300AMT, lag_release_month=1, release_day=1)
    print(HS300AMT)
    Sava_Systmetic_Factor_To_Database(database, HS300AMT, savedName='HS300AMT', fieldName='HS300AMT')



def get_Leverage(database,datetime1,datetime2):

    df1 = Query_Data(database, 'M0061606', datetime1, datetime2)
    df2 = Query_Data(database, 'M0061610', datetime1, datetime2)
    df3 = Query_Data(database, 'G8324465', datetime1, datetime2)
    df4 = Query_Data(database, 'G8324478', datetime1, datetime2)
    data = pd.concat([df1, df2, df3,df4], axis=1)
    data = data.fillna(method='ffill').fillna(0)
    data['融资余额'] = data['M0061606'] + data['M0061610']
    data['两市总市值'] = (data['G8324465'] + data['G8324478'])*10000#统一单位
    temp = pd.DataFrame()
    temp['融资余额'] = data['融资余额'].resample('W-FRI').last()
    temp['两市总市值_周平均'] =  data['两市总市值'].resample('W-FRI').mean()#周平均市值
    temp['比例'] = temp['融资余额']/temp['两市总市值_周平均']
    Leverage = temp['比例'].diff()
    Leverage = pd.DataFrame(Leverage)
    Leverage.columns = ['Leverage']
    Leverage['Report_Date'] = Leverage.index
    Fill_ReleaseDate(Leverage, lag_release_month=1, release_day=1)
    print(Leverage)
    Sava_Systmetic_Factor_To_Database(database, Leverage, savedName='Leverage', fieldName='Leverage')



def get_SHIBOR3M(datetime1,datetime2):

     data = EDB('SHIBOR3M.IR', 'SHIBOR3M.IR', datetime1, datetime2, dateAsIndex=True)
     data = data.dropna()
     data['SHIBOR3M'] = data['SHIBOR3M.IR'].rolling(window=5, min_periods=1, center=False).mean()
     SHIBOR3M = data['SHIBOR3M'].resample('W-FRI').last().fillna(method='ffill').diff()
     SHIBOR3M = pd.DataFrame(SHIBOR3M)
     SHIBOR3M.columns = ['SHIBOR3M']
     SHIBOR3M['Report_Date'] = SHIBOR3M.index
     Fill_ReleaseDate(SHIBOR3M, lag_release_month=1, release_day=1)
     print(SHIBOR3M)
     Sava_Systmetic_Factor_To_Database(database, SHIBOR3M, savedName='SHIBOR3M', fieldName='SHIBOR3M')



def get_TEDSPRD(datetime1,datetime2):

     data1 = EDB('SHIBOR3M.IR', 'SHIBOR3M.IR', datetime1, datetime2, dateAsIndex=True)
     data2 = EDB('S0059741','S0059741', datetime1, datetime2, dateAsIndex=True)
     data = pd.concat([data1,data2],axis=1)
     data = data.fillna(method='ffill').fillna(0)
     data['TEDSPRD'] = data['SHIBOR3M.IR'] - data['S0059741']
     data['Report_Date'] = data.index
     Fill_ReleaseDate(data, lag_release_month=1, release_day=1)
     print(data)
     Sava_Systmetic_Factor_To_Database(database, data, savedName='TEDSPRD', fieldName='TEDSPRD')



def get_AUFI(datetime1,datetime2):

     data = EDB('AUFI.WI','AUFI.WI', datetime1, datetime2, dateAsIndex=True)
     data = data.fillna(method='ffill').fillna(0)
     HG_AUFI = data['AUFI.WI'].resample('W-FRI').last().map(np.log).diff()
     HG_AUFI = pd.DataFrame(HG_AUFI)
     HG_AUFI.replace([np.inf, -np.inf], np.nan, inplace=True)
     HG_AUFI.columns = ['AUFI']
     HG_AUFI['Report_Date'] = HG_AUFI.index
     Fill_ReleaseDate(HG_AUFI, lag_release_month=1, release_day=1)
     print(HG_AUFI)
     Sava_Systmetic_Factor_To_Database(database, HG_AUFI, savedName='AUFI', fieldName='AUFI')



def get_ICEOIL(datetime1,datetime2):

     data = EDB('B00.IPE', 'B00.IPE', datetime1, datetime2, dateAsIndex=True)
     data = data.fillna(method='ffill').fillna(0)
     # 将周日的数据放到周五的时间戳
     HW_ICEOIL = data['B00.IPE'].resample('W-SUN').last().resample('W-FRI',label='left').last().map(np.log).diff()
     HW_ICEOIL= pd.DataFrame(HW_ICEOIL)
     HW_ICEOIL.columns = ['ICEOIL']
     HW_ICEOIL['Report_Date'] = HW_ICEOIL.index
     Fill_ReleaseDate(HW_ICEOIL, lag_release_month=1, release_day=1)
     print(HW_ICEOIL)
     Sava_Systmetic_Factor_To_Database(database,HW_ICEOIL, savedName='ICEOIL', fieldName='ICEOIL')


def get_CBOND(datetime1,datetime2):

     data = EDB('H11001.CSI', 'H11001.CSI', datetime1, datetime2, dateAsIndex=True)
     data = data.fillna(method='ffill').fillna(0)
     HG_CBOND = data['H11001.CSI'].resample('W-FRI').last().map(np.log).diff()
     HG_CBOND.replace([np.inf, -np.inf], np.nan, inplace=True)
     HG_CBOND = pd.DataFrame(HG_CBOND)
     HG_CBOND.columns = ['CBOND']
     HG_CBOND['Report_Date'] = HG_CBOND.index
     Fill_ReleaseDate(HG_CBOND, lag_release_month=1, release_day=1)
     print(HG_CBOND)
     Sava_Systmetic_Factor_To_Database(database,HG_CBOND, savedName='CBOND', fieldName='CBOND')


def get_COMMEDITY(datetime1,datetime2):

     data = EDB('CCFI.WI', 'CCFI.WI', datetime1, datetime2, dateAsIndex=True)
     data = data.fillna(method='ffill').fillna(0)
     HG_COMMEDITY = data['CCFI.WI'].resample('W-FRI').last().map(np.log).diff()
     HG_COMMEDITY = pd.DataFrame(HG_COMMEDITY)
     HG_COMMEDITY.columns = ['COMMEDITY']
     HG_COMMEDITY['Report_Date'] = HG_COMMEDITY.index
     Fill_ReleaseDate(HG_COMMEDITY, lag_release_month=1, release_day=1)
     print(HG_COMMEDITY)
     Sava_Systmetic_Factor_To_Database(database,HG_COMMEDITY, savedName='COMMEDITY', fieldName='COMMEDITY')


def get_USDCNY(datetime1,datetime2):

    USDCNY = EDB('USDCNY.EX', 'USDCNY.EX', datetime1, datetime2, dateAsIndex=True)
    USDCNY = USDCNY.dropna()  # 删除非交易日
    USDCNY['USDCNY'] = USDCNY['USDCNY.EX'].rolling(window=5, min_periods=1, center=False).mean()
    HW_USDCNY = USDCNY['USDCNY.EX'].resample('W-FRI').last().fillna(method='ffill').map(np.log).diff()
    HW_USDCNY = pd.DataFrame(HW_USDCNY)
    HW_USDCNY.columns = ['USDCNY']
    HW_USDCNY['Report_Date'] = HW_USDCNY.index
    Fill_ReleaseDate(HW_USDCNY, lag_release_month=1, release_day=1)
    print(HW_USDCNY)
    Sava_Systmetic_Factor_To_Database(database,HW_USDCNY, savedName='USDCNY', fieldName='USDCNY')

######################################################################

def get_TRUST(database,datetime1,datetime2):

    data = Query_Data(database, 'M0085865', datetime1, datetime2)
    data = data.fillna(method='ffill').fillna(0)
    temp = data.resample('M').last()
    # TRUST = temp['M0085865'].diff().resample('W-THU',fill_method='ffill').resample('W-FRI',label='left').last()
    # #TRUST收益率数据可能更新不及时 最新一期可能会是0 我们需要将0替换成最新的可获得数据
    # TRUST = TRUST.replace(0,np.nan).fillna(method='ffill')
    # 【2020-01-20 王新博
    # 根据对比文件，将时间戳调整为下周】
    # TRUST = temp['M0085865'].diff().resample('W-THU').last().fillna(0).resample('W-FRI', label='left').last()
    TRUST = temp['M0085865'].diff().resample('W-THU').last().fillna(0).resample('W-FRI').last()
    TRUST = pd.DataFrame(TRUST)
    TRUST.columns = ['TRUST']
    TRUST['Report_Date'] = TRUST.index
    Fill_ReleaseDate(TRUST, lag_release_month=1, release_day=1)
    print(TRUST)
    Sava_Systmetic_Factor_To_Database(database, TRUST, savedName='TRUST', fieldName='TRUST')


def get_XCHANGE(database,datetime1,datetime2):

    data = Query_Data(database, 'M0010049', datetime1, datetime2)
    data = data.fillna(method='ffill').fillna(0)
    # 【2020-01-20 王新博
    # 根据对比文件，将时间戳向前调整一个月】
    # temp = data.resample('M').last()
    temp = data.resample('M', label='left').last()
    """每月7日公布上月数据 节假日顺延 如果周末+周中公布经商议还是放在上周的周五"""
    temp.index = temp.index + Day(7)
    #XCHANGE = temp['M0010049'].map(np.log).diff().resample('W-THU',fill_method='ffill').resample('W-FRI',label='left').last()
    XCHANGE = temp['M0010049'].map(np.log).diff().resample('W-THU').last().fillna(0).resample('W-FRI', label='left').last()
    XCHANGE = pd.DataFrame(XCHANGE)
    XCHANGE.columns = ['XCHANGE']
    XCHANGE['Report_Date'] = XCHANGE.index
    Fill_ReleaseDate(XCHANGE, lag_release_month=1, release_day=1)
    print(XCHANGE)
    Sava_Systmetic_Factor_To_Database(database, XCHANGE, savedName='XCHANGE', fieldName='XCHANGE')


def get_ASS_LBT(database,datetime1,datetime2):

    df1 = Query_Data(database, 'M0007501', datetime1, datetime2)
    df2 = Query_Data(database, 'M0044695', datetime1, datetime2)
    data = pd.concat([df1, df2], axis=1)
    """每月月末更新上一个月数据 需要月频shift"""
    data = data.resample('M').last().shift()
    data['ASS_LBT'] = data['M0044695'] / data['M0007501']
    # ASS_LBT = data['ASS_LBT'].diff().resample('W-THU',fill_method='ffill').resample('W-FRI',label='left').last()
    # ASS_LBT = ASS_LBT.replace(0,np.nan).fillna(method='ffill')
    ASS_LBT = data['ASS_LBT'].diff().resample('W-THU').last().resample('W-FRI', label='left').last().fillna(0)
    ASS_LBT = pd.DataFrame(ASS_LBT)
    ASS_LBT.columns = ['ASS_LBT']
    ASS_LBT['Report_Date'] = ASS_LBT.index
    Fill_ReleaseDate(ASS_LBT, lag_release_month=1, release_day=1)
    print(ASS_LBT)
    Sava_Systmetic_Factor_To_Database(database, ASS_LBT, savedName='ASS_LBT', fieldName='ASS_LBT')


def get_SAVE_LOAN(database,datetime1,datetime2):

    df1 = Query_Data(database, 'M0009969', datetime1, datetime2)
    df2 = Query_Data(database, 'M0009940', datetime1, datetime2)
    data = pd.concat([df1, df2], axis=1)
    #data = data.fillna(method='ffill')
    data = data.resample('M').last()
    """10到15日公布上月数据新闻 这里取13日"""
    data.index = data.index + Day(13)
    data['SAVE_LOAN'] = data['M0009969'] / data['M0009940']
    # SAVE_LOAN = data['SAVE_LOAN'].diff().resample('W-THU',fill_method='ffill').resample('W-FRI',label='left').last()
    # SAVE_LOAN = SAVE_LOAN.replace(0,np.nan).fillna(method='ffill')
    SAVE_LOAN = data['SAVE_LOAN'].diff().resample('W-THU').last().resample('W-FRI',label='left').last().fillna(0)
    SAVE_LOAN = pd.DataFrame(SAVE_LOAN)
    SAVE_LOAN.columns = ['SAVE_LOAN']
    SAVE_LOAN['Report_Date'] = SAVE_LOAN.index
    Fill_ReleaseDate(SAVE_LOAN, lag_release_month=1, release_day=1)
    print(SAVE_LOAN)
    Sava_Systmetic_Factor_To_Database(database, SAVE_LOAN, savedName='SAVE_LOAN', fieldName='SAVE_LOAN')


def get_IRS1Y(database,datetime1,datetime2):

    df1 = Query_Data(database, 'M1001786', datetime1, datetime2)
    df2 = Query_Data(database, 'S0059744', datetime1, datetime2)
    data = pd.concat([df1, df2], axis=1)
    IRS1Y = data.fillna(method='ffill')
    IRS1Y['IRS1Y'] = IRS1Y['M1001786'] - IRS1Y['S0059744']
    HW_IRS1Y = IRS1Y['IRS1Y'].resample('W-SUN').last().resample('W-FRI',label='left').last().diff()
    HW_IRS1Y = pd.DataFrame(HW_IRS1Y)
    HW_IRS1Y.columns = ['IRS1Y']
    HW_IRS1Y['Report_Date'] = HW_IRS1Y.index
    Fill_ReleaseDate(HW_IRS1Y, lag_release_month=1, release_day=1)
    print(HW_IRS1Y)
    Sava_Systmetic_Factor_To_Database(database, HW_IRS1Y, savedName='IRS1Y', fieldName='IRS1Y')


def get_USDCNY_USDX_Spread(database,datetime1,datetime2):

    data1 = EDB('M0000185', 'M0000185', datetime1, datetime2, dateAsIndex=True)
    data2 = EDB('USDX.FX', 'USDX.FX', datetime1, datetime2, dateAsIndex=True)
    data = pd.concat([data1,data2],axis=1)
    data = data.fillna(method='ffill')
    ret = data.resample('W-SUN').last().resample('W-FRI', label='left').last().applymap(np.log).diff()
    ret['USDCNY_USDX_Spread'] = ret['M0000185'] - ret['USDX.FX']
    ret['Report_Date'] = ret.index
    Fill_ReleaseDate(ret, lag_release_month=1, release_day=1)
    print(ret)
    Sava_Systmetic_Factor_To_Database(database, ret, savedName='USDCNY_USDX_Spread', fieldName='USDCNY_USDX_Spread')



#######################################################


def get_MSCI_EM(database,datetime1,datetime2):

     data = EDB('891800.MI', '891800.MI', datetime1, datetime2, dateAsIndex=True)
     MSCI_EM = data.fillna(method='ffill').fillna(0)
     HW_MSCI_EM= MSCI_EM['891800.MI'].resample('W-SUN').last().resample('W-FRI',label='left').last().map(np.log).diff()
     HW_MSCI_EM = pd.DataFrame(HW_MSCI_EM)
     HW_MSCI_EM.columns = ['MSCI_EM']
     HW_MSCI_EM['Report_Date'] = HW_MSCI_EM.index
     Fill_ReleaseDate(HW_MSCI_EM, lag_release_month=1, release_day=1)
     print(HW_MSCI_EM)
     Sava_Systmetic_Factor_To_Database(database, HW_MSCI_EM, savedName='MSCI_EM', fieldName='MSCI_EM')


def get_MSCI_DM(database,datetime1,datetime2):

     data = EDB('990100.MI', '990100.MI', datetime1, datetime2, dateAsIndex=True)
     MSCI_DM = data.fillna(method='ffill').fillna(0)
     HW_MSCI_DM = MSCI_DM['990100.MI'].resample('W-SUN').last().resample('W-FRI',label='left').last().map(np.log).diff()
     HW_MSCI_DM = pd.DataFrame(HW_MSCI_DM)
     HW_MSCI_DM.columns = ['MSCI_DM']
     HW_MSCI_DM['Report_Date'] = HW_MSCI_DM.index
     Fill_ReleaseDate(HW_MSCI_DM, lag_release_month=1, release_day=1)
     print(HW_MSCI_DM)
     Sava_Systmetic_Factor_To_Database(database, HW_MSCI_DM, savedName='MSCI_DM', fieldName='MSCI_DM')


def get_Turnover(database,datetime1,datetime2):

    data1 = WSD_Quote('000300.SH', 'free_float_shares', datetime1, datetime2, param="")
    data2 = WSD_Quote('000300.SH', 'volume', datetime1, datetime2, param="")
    data = pd.concat([data1,data2],axis=1)
    data = data.dropna()#删除非交易日
    tmp = data.rolling(window=20,min_periods=1).mean()
    tmp['turnover'] = tmp['volume']/tmp['free_float_shares']
    tmp['turnover_level'] = 0
    for i in range(len(data)):
         tmp['turnover_level'].iloc[i] = (tmp['turnover'].iloc[:i + 1].rank(ascending=True, method='max').iloc[i] - 1) / (i + 1)
    QX_Turnover= tmp['turnover_level'].resample('W-FRI').last().fillna(method='ffill')
    QX_Turnover = pd.DataFrame(QX_Turnover)
    QX_Turnover.columns = ['Turnover']
    QX_Turnover['Report_Date'] = QX_Turnover.index
    Fill_ReleaseDate(QX_Turnover, lag_release_month=1, release_day=1)
    print(QX_Turnover)
    Sava_Systmetic_Factor_To_Database(database, QX_Turnover, savedName='Turnover', fieldName='Turnover')



def get_Industries_Rotation(database,datetime1,datetime2):

    keep = ["801010.SI", "801020.SI", "801030.SI", "801040.SI", "801050.SI", "801080.SI", "801110.SI", "801120.SI",
                 "801130.SI", "801140.SI", "801150.SI", "801160.SI", "801170.SI", "801180.SI", "801200.SI", "801210.SI",
                 "801230.SI", "801710.SI", "801720.SI", "801730.SI", "801740.SI", "801750.SI", "801760.SI", "801770.SI",
                 "801780.SI", "801790.SI", "801880.SI", "801890.SI"]
    Industries_Rotation = pd.DataFrame()
    for i in keep:
        datai = WSD_Quote(i, 'pct_chg', datetime1, datetime2, param="")
        datai.columns = [i]
        Industries_Rotation = pd.concat([Industries_Rotation,datai],axis=1)
    #print(data)
     # #求最近四周的标准差
     # result = pd.Series()
     # for x in range(Industries_Rotation.shape[0]):
     #     if x<3:
     #         result[Industries_Rotation.index[x]] = np.nan
      #     else:
      #         result[Industries_Rotation.index[x]] = np.reshape(np.array(Industries_Rotation.iloc[x-3:x+1]), (1, -1)).std()
    #当周各行业数据的标准差
    result = Industries_Rotation.std(axis=1)
    QX_Industries_Rotation = result.resample('W-FRI').last().fillna(0)
    QX_Industries_Rotation = pd.DataFrame(QX_Industries_Rotation)
    QX_Industries_Rotation.columns = ['Industries_Rotation']
    QX_Industries_Rotation['Report_Date'] = QX_Industries_Rotation.index
    Fill_ReleaseDate(QX_Industries_Rotation, lag_release_month=1, release_day=1)
    print(QX_Industries_Rotation)
    # Sava_Systmetic_Factor_To_Database(database, QX_Industries_Rotation, savedName='Industries_Rotation', fieldName='Industries_Rotation')



###########################################################

def get_Consumption(database,datetime1,datetime2):

    data = Query_Data(database, 'M0001428', datetime1, datetime2)
    # 每年1 2月数据一起更新 因此1月末数据缺失 填前值 每月14-15日更新上一月数据 这里用14
    data = data.resample('M').last().fillna(method='ffill')
    data.index = data.index + Day(14)
    Consumption  = data['M0001428'].diff().resample('W-THU').last().fillna(0).resample('W-FRI',label='left').last()
    Consumption = pd.DataFrame( Consumption)
    Consumption.columns = [' Consumption']
    Consumption['Report_Date'] =  Consumption.index
    Fill_ReleaseDate( Consumption, lag_release_month=1, release_day=10)
    print( Consumption)
    # Sava_Systmetic_Factor_To_Database(database,  Consumption, savedName=' Consumption', fieldName=' Consumption')


if __name__ == '__main__':

    config =  \
    {
      "factor": { "Username": "m_Factor", "Password":"mOaruP"}
    }

    # 连接数据库
    database = JDMySQLDB.JDMySQLDB("172.25.4.218", "3306", config=config)


    # 基于wind数据制作并储存因子
    w.start()
    datetime1 = datetime.datetime(2000, 1, 1)
    datetime2 = datetime.datetime(2020, 5, 1)
    print('finish！')


    #导入宏观原始数据，原始数据的代号在df_index_raw.xlsx中
    #pathfilename = os.getcwd() + "\..\Config\df_index_raw.xlsx"  # 确保config文件位于正确的位置
    #df_index = pd.read_excel(pathfilename)


    #将日和月的原始宏观数据存入数据库
    # for i in df_index[df_index['fre']!='日'].index:
    #    Download_PriceLevel_fnotday(database,df_index['index'][i],int(df_index['lag_month'][i]),int(df_index['lag_day'][i]),datetime1,datetime2)
    #
    # for i in df_index[df_index['fre']=='日'].index:
    #     Download_PriceLevel_fday(database,df_index['index'][i],datetime1,datetime2)
    # print('finished！')



    get_CAPITALIN(database,datetime1,datetime2)
    get_HKCapitalin(database,datetime1,datetime2)
    get_SAVE_MKTVL(database,datetime1,datetime2)
    get_Leverage_AMT(database, datetime1, datetime2)
    get_HS300AMT(database, datetime1, datetime2)
    get_Leverage(database, datetime1, datetime2)
    get_SHIBOR3M(datetime1, datetime2)
    get_TEDSPRD(datetime1, datetime2)    #算出来是每天的，不是每周五
    get_AUFI(datetime1, datetime2)
    get_ICEOIL(datetime1, datetime2)
    get_CBOND(datetime1, datetime2)
    get_COMMEDITY(datetime1, datetime2)
    get_USDCNY(datetime1, datetime2)
    get_TRUST(database, datetime1, datetime2)   #数据不对
    get_XCHANGE(database, datetime1, datetime2)
    get_ASS_LBT(database, datetime1, datetime2)
    get_SAVE_LOAN(database, datetime1, datetime2)
    get_IRS1Y(database, datetime1, datetime2)
    get_USDCNY_USDX_Spread(database, datetime1, datetime2)
    get_MSCI_EM(database, datetime1, datetime2)
    get_MSCI_DM(database, datetime1, datetime2)
    get_Turnover(database, datetime1, datetime2)
    get_Industries_Rotation(database, datetime1, datetime2)  #数据不对
    get_Consumption(database, datetime1, datetime2)
    print('finished!')
