from SystematicFactors.General import *
import os
import Core.Gadget as Gadget
from HoldingAnalysis.shareholder_change_plan import load_share_holding_change_plan_info


# 计算IPO因子
def Calc_IPO(database, datetime1, datetime2):
    #
    filter = {}
    filter["Issue_Date"] = {">=": datetime1, "<=": datetime2}
    # filter["Issue_Date"] = {">=": datetime.datetime(2000,1,1)}
    documents = database.Find("financial_data", "stock_ipo", filter)
    df_IPO = Gadget.DocumentsToDataFrame(documents, keep=["issue_amount_fore", "issue_amount", "issue_date"])
    dfGroups = df_IPO.groupby("issue_date").sum()
    dfGroups.rename(columns={"issue_amount_fore": "IPO_Amt_Fore", "issue_amount": "IPO_Amt"}, inplace=True)
    dfGroups["Date"] = dfGroups.index
    print(dfGroups)
    #
    real_datetime1 = dfGroups.index[0]
    real_datetime1 = datetime.datetime(real_datetime1.year, real_datetime1.month, real_datetime1.day)
    #
    dfGroups.index = pd.to_datetime(dfGroups.index)
    dfGroups["date"] = dfGroups.index
    #
    df_calender = Gadget.Generate_Calender_Days_DataFrame(real_datetime1, datetime2, date_field_name="date",
                                                          date_as_index=False)
    df = pd.merge(df_calender, dfGroups, how="left", on="date")
    df["Release_Date"] = df["date"]
    df.set_index("date", inplace=True)
    print(df)
    #
    df_weekly = df.resample("w").agg({"IPO_Amt":"sum", "IPO_Amt_Fore":"sum"})
    df_weekly["Report_Date"] = df_weekly.index
    df_weekly["Release_Date"] = df_weekly["Report_Date"].apply(lambda x: x - datetime.timedelta(days=2))
    df_weekly["IPO_Amt_Dif"] = df_weekly["IPO_Amt"].diff(1)
    df_weekly["IPO_Amt_Fore_Dif"] = df_weekly["IPO_Amt_Fore"].diff(1)
    # relese_date 有空值
    print(df_weekly)
    #
    Save_Systematic_Factor_To_Database(database, dfGroups, save_name="IPO_Amt_Fore")
    Save_Systematic_Factor_To_Database(database, dfGroups, save_name="IPO_Amt")
    #
    Save_Systematic_Factor_To_Database(database, df_weekly, save_name="IPO_Amt_Fore_Weekly", field_name="IPO_Amt_Fore")
    Save_Systematic_Factor_To_Database(database, df_weekly, save_name="IPO_Amt_Fore_Weekly_Dif",
                                       field_name="IPO_Amt_Fore_Dif")
    Save_Systematic_Factor_To_Database(database, df_weekly, save_name="IPO_Amt_Weekly", field_name="IPO_Amt")
    Save_Systematic_Factor_To_Database(database, df_weekly, save_name="IPO_Amt_Weekly_Dif", field_name="IPO_Amt_Dif")


# 计算SEO增发因子
def Calc_SEO(database, datetime1, datetime2):
    #
    filter = {}
    filter["Issue_Date"] = {">=": datetime1, "<=": datetime2}
    # filter["Issue_Date"] = {">=": datetime.datetime(2000,1,1)}
    documents = database.Find("financial_data", "stock_SEO", filter)

    if len(documents) == 0:
        print("No SEO data to Save")
        return

    df_SEO = Gadget.DocumentsToDataFrame(documents, keep=["issue_amount_fore", "issue_amount", "issue_date"])
    print(df_SEO)
    dfGroups = df_SEO.groupby("issue_date").sum()
    dfGroups.rename(columns={"issue_amount_fore": "SEO_Amt_Fore", "issue_amount": "SEO_Amt"}, inplace=True)
    dfGroups["SEO_Amt_Fore"] = dfGroups["SEO_Amt_Fore"] / 100000000
    print(dfGroups)
    #
    real_datetime1 = dfGroups.index[0]
    real_datetime1 = datetime.datetime(real_datetime1.year, real_datetime1.month, real_datetime1.day)
    #
    dfGroups.index = pd.to_datetime(dfGroups.index)
    dfGroups["date"] = dfGroups.index
    #
    df_calender = Gadget.Generate_Calender_Days_DataFrame(real_datetime1, datetime2, date_field_name="date",
                                                          date_as_index=False)
    df = pd.merge(df_calender, dfGroups, how="left", on="date")
    df["Release_Date"] = df["date"]
    df.set_index("date", inplace=True)
    print(df)
    #
    df_weekly = df.resample("w").agg({"SEO_Amt":"sum", "SEO_Amt_Fore":"sum"})
    df_weekly["Report_Date"] = df_weekly.index
    df_weekly["Release_Date"] = df_weekly["Report_Date"].apply(lambda x: x - datetime.timedelta(days=2))
    df_weekly["SEO_Amt_Dif"] = df_weekly["SEO_Amt"].diff(1)
    df_weekly["SEO_Amt_Fore_Dif"] = df_weekly["SEO_Amt_Fore"].diff(1)
    # relese_date 有空值
    print(df_weekly)
    #
    Save_Systematic_Factor_To_Database(database, dfGroups, save_name="SEO_Amt_Fore")
    Save_Systematic_Factor_To_Database(database, dfGroups, save_name="SEO_Amt")
    #
    Save_Systematic_Factor_To_Database(database, df_weekly, save_name="SEO_Amt_Fore_Weekly", field_name="SEO_Amt_Fore")
    Save_Systematic_Factor_To_Database(database, df_weekly, save_name="SEO_Amt_Fore_Weekly_Dif",
                                       field_name="SEO_Amt_Fore_Dif")
    Save_Systematic_Factor_To_Database(database, df_weekly, save_name="SEO_Amt_Weekly", field_name="SEO_Amt")
    Save_Systematic_Factor_To_Database(database, df_weekly, save_name="SEO_Amt_Weekly_Dif", field_name="SEO_Amt_Dif")


def Test_SEO_Issue(database):
    #
    datetime1 = datetime.datetime(2000,1,1)
    datetime2 = datetime.datetime(2020,7,1)
    #
    # SEO wind按照listing_date 口径计算, 但是listing date （对应API中 placement_list_date）有很多 0001-01-01
    # 目前采用了手动解决方案，之后重构数据库时候，可以采用以下SQL，快速定位到错误字段
    # # SELECT * FROM financial_data.stock_seo where listing_date = "00010101" order by issue_date desc

    filter = []
    date_field = "listing_date"
    filter.append([date_field, ">=", datetime1])
    filter.append([date_field, "<=", datetime2])
    #
    documents = database.Find("financial_data", "stock_SEO", filter)
    df = Gadget.DocumentsToDataFrame(documents)
    print(df)
    #
    Gadget.AddMonthColumn(df, date_field=date_field)
    df_by_month = df[["key2","Month"]].groupby("Month").count()
    df_by_month2 = df[["issue_amount", "Month"]].groupby("Month").sum()
    df_by_month2["count"] = df_by_month["key2"]
    #
    print(df_by_month2)
    #
    df_by_month2.to_csv("d://seo_data_by_month.csv")


def Test_CB_Issue(database):

    # 统计退市股票
    documents = database.Find("instruments", "stock")
    df_instruments = Gadget.DocumentsToDataFrame(documents, keep=["symbol", "datetime1", "datetime2"])
    count = len(df_instruments)
    df_instruments = df_instruments[df_instruments["datetime2"] > datetime.datetime(2050,1,1)]
    count2 = len(df_instruments)

    datetime1 = datetime.datetime(2000,1,1)
    datetime2 = datetime.datetime(2020,7,1)

    # interest_start_date 相当于 issue_date
    date_field = "listing_date"

    filter = []
    filter.append([date_field, ">=", datetime1])
    filter.append([date_field, "<=", datetime2])
    #
    documents = database.Find("financial_data", "stock_convertible_bond", filter)
    df = Gadget.DocumentsToDataFrame(documents, keep=["issue_amount_fore", "issue_amount", "listing_date"])

    # df = pd.merge(df, df_instruments, on="symbol", how="inner")


    Gadget.AddMonthColumn(df, date_field=date_field)
    df_by_month = df.groupby("Month").count()
    print(df_by_month)
    df_by_month2 = df.groupby("Month").sum()
    print(df_by_month2)

    df_by_month2.to_csv("d://cb_data_by_month.csv")


def Test_IPO_Issue(database):
    #
    documents = database.Find("instruments", "stock")
    df_instruments = Gadget.DocumentsToDataFrame(documents, keep=["symbol", "datetime1", "datetime2"])
    count = len(df_instruments)
    df_instruments = df_instruments[df_instruments["datetime2"] > datetime.datetime(2050,1,1)]
    count2 = len(df_instruments)

    #
    datetime1 = datetime.datetime(2000, 1, 1)
    datetime2 = datetime.datetime(2020, 6, 30)
    #
    filter = []
    date_field = "ipo_date"
    filter.append([date_field, ">=", datetime1])
    filter.append([date_field, "<=", datetime2])
    #
    documents = database.Find("Stock", "IPO", filter)
    df = Gadget.DocumentsToDataFrame(documents)
    count_ipo = len(df)
    df = pd.merge(df, df_instruments, on="symbol", how="inner")
    count_valid = len(df)
    print(df[["symbol", "name", "ipo_date", "datetime1", "datetime2"]])

    Gadget.AddMonthColumn(df, date_field=date_field)

    df_by_month = df[["symbol","Month"]].groupby("Month").count()
    df_by_month2 = df[["issue_amount","Month"]].groupby("Month").sum()
    df_by_month2["count"] = df_by_month["symbol"]
    print(df_by_month2)
    #
    total_value = df_by_month2["issue_amount"].sum()
    total_count = df_by_month2["count"].sum()
    print(total_value, total_count)

    df_by_month2.to_csv("d://ipo_data_by_month.csv")


# 计算所有股权融资（首发 + 增发）相关因子
# IPO：Issue_date 发行日, IPO_date 上市日
# SEO: Issue_date, listing_date
# EBCB ：Interest_Start_Date 起息日为发行日，Listing_Date 上市日
# 配股：Record_Date 登记日为发行日 # 真正缴款日应介于 登记日和除权日之间， listing_date 上市日
# 优先股：没有适当的issue_date参考，listing_date 上市日
def Calc_Equity_Issue(database, datetime1, datetime2):
    # 生成标准日期
    df = Gadget.Generate_Calender_Days_DataFrame(datetime1, datetime2)

    # ---IPO信息按日聚合---
    # data_field = "ipo_date"
    data_field = "issue_date"
    filter = []
    filter.append([data_field, ">=", datetime1])
    filter.append([data_field, "<=", datetime2])
    documents = database.Find("financial_data", "stock_IPO", filter)
    df_IPO = Gadget.DocumentsToDataFrame(documents)
    df_IPO[data_field] = pd.to_datetime(df_IPO[data_field])
    df_IPO.rename(columns={"issue_amount_fore":"IPO_Fore", "issue_amount":"IPO"}, inplace=True)
    df_IPO_by_date = df_IPO[[data_field, "IPO", "IPO_Fore"]].groupby(data_field).sum()

    # Merge IPO to 标准日期
    df = pd.merge(df, df_IPO_by_date, how="left", left_on="Date", right_on=data_field)
    # print(df)

    # ---SEO增发数据按日聚合---
    # data_field = "listing_date"
    data_field = "issue_date"
    filter = []
    filter.append([data_field, ">=", datetime1])
    filter.append([data_field, "<=", datetime2])
    documents = database.Find("financial_data", "stock_SEO", filter)
    if len(documents) == 0:
        print("No SEO data to Save")
        df_SEO = pd.DataFrame(data=[], columns=[data_field,"SEO","SEO_Fore"])
    else:
        df_SEO = Gadget.DocumentsToDataFrame(documents)
        df_SEO[data_field] = pd.to_datetime(df_SEO[data_field])
        df_SEO.rename(columns={"issue_amount_fore": "SEO_Fore", "issue_amount": "SEO"}, inplace=True)
    #
    df_SEO_by_date = df_SEO[[data_field, "SEO", "SEO_Fore"]].groupby(data_field).sum()
    df = pd.merge(df, df_SEO_by_date, how="left", left_on="Date", right_on=data_field)
    # print(df)

    # ---EBCB按日聚合---
    # data_field = "listing_date"
    data_field = "interest_start_date"
    filter = []
    filter.append([data_field, ">=", datetime1])
    filter.append([data_field, "<=", datetime2])
    documents = database.Find("financial_data", "stock_Convertible_Bond", filter)
    if len(documents) == 0:
        print("No EBCB data to Save")
        df_EBCB = pd.DataFrame(data=[], columns=[data_field,"EBCB"])
    else:
        df_EBCB = Gadget.DocumentsToDataFrame(documents)
        df_EBCB[data_field] = pd.to_datetime(df_EBCB[data_field])
        df_EBCB.rename(columns={"issue_amount": "EBCB"}, inplace=True)
    #
    df_EBCB_by_date = df_EBCB[[data_field, "EBCB"]].groupby(data_field).sum()
    df = pd.merge(df, df_EBCB_by_date, how="left", left_on="Date", right_on=data_field)
    # print(df)

    # ---配股数据按日聚合---
    # data_field = "listing_date"
    data_field = "record_date"
    filter = []
    filter.append([data_field, ">=", datetime1])
    filter.append([data_field, "<=", datetime2])
    documents = database.Find("financial_data", "stock_Rights_Issue", filter)
    if len(documents) == 0:
        print("No Rights_Issue data to Save")
        df_rights = pd.DataFrame(data=[], columns=[data_field,"Rights"])
    else:
        df_rights = Gadget.DocumentsToDataFrame(documents)
        df_rights[data_field] = pd.to_datetime(df_rights[data_field])
        df_rights.rename(columns={"issue_amount": "Rights"}, inplace=True)
    #
    df_rights_by_date = df_rights[[data_field, "Rights"]].groupby(data_field).sum()
    df = pd.merge(df, df_rights_by_date, how="left", left_on="Date", right_on=data_field)
    # print(df)

    # ---优先股数据按照日聚合---
    data_field = "listing_date" # 没有可参考的 issue_date
    filter = []
    filter.append([data_field, ">=", datetime1])
    filter.append([data_field, "<=", datetime2])
    documents = database.Find("financial_data", "stock_Preferred_Stock", filter)
    if len(documents) == 0:
        print("No Preferred_Stock data to Save")
        df_ps = pd.DataFrame(data=[], columns=[data_field, "PS"])
    else:
        df_ps = Gadget.DocumentsToDataFrame(documents)
        df_ps[data_field] = pd.to_datetime(df_ps[data_field])
        df_ps.rename(columns={"issue_amount": "PS"}, inplace=True)
    #
    df_ps_by_date = df_ps[[data_field, "PS"]].groupby(data_field).sum()
    df = pd.merge(df, df_ps_by_date, how="left", left_on="Date", right_on=data_field)
    # print(df)

    # 按月聚合
    df.set_index("Date", inplace=True)
    df.fillna(0, inplace=True)
    df["Total"] = df["IPO"] + df["SEO"] + df["EBCB"] + df["Rights"] + df["PS"]
    # Gadget.AddMonthColumn(df, date_field="Date")
    # df_by_month = df.groupby("Month").sum()
    df_by_month = df.resample("M").sum()
    df_by_month["Report_Date"] = df_by_month.index
    df_by_month["Release_Date"] = df_by_month.index
    df_by_month["Dif"] = df_by_month["Total"] - df_by_month["Total"].shift(1)
    print(df_by_month[["Dif","Total"]])
    #

    # 按照周聚合
    df_by_week = df.resample("W").sum()
    df_by_week["Report_Date"] = df_by_week.index
    df_by_week["Release_Date"] = df_by_week["Report_Date"].apply(lambda x: x - datetime.timedelta(days=2))
    df_by_week["Dif"] = df_by_week["Total"] - df_by_week["Total"].shift(1)
    print(df_by_week)
    df_by_week.dropna(subset=["Dif"], inplace=True)
    #
    Save_Systematic_Factor_To_Database(database, df_by_month, save_name="Equity_Issue_Monthly", field_name="Total")
    Save_Systematic_Factor_To_Database(database, df_by_week, save_name="Equity_Issue_Weekly", field_name="Total")
    Save_Systematic_Factor_To_Database(database, df_by_week, save_name="Equity_Issue_Weekly_Dif", field_name="Dif")

    #
    # df.to_csv("d://Equity_Issue.csv")


def Calc_UnRestrict(database, datetime1, datetime2):
    #
    filter = {}
    filter["Date"] = {">=": datetime1, "<=": datetime2}
    # filter["Issue_Date"] = {">=": datetime.datetime(2000,1,1)}
    documents = database.Find("financial_data", "stock_UnRestrict", filter)
    df_UnRestrict = Gadget.DocumentsToDataFrame(documents,
                                                keep=["date", "symbol", "unrestrict_volume"],
                                                rename={"date":"Date", "symbol":"Symbol", "unrestrict_volume":"UnRestrict_Volume"}
                                                )
    #
    df_UnRestrict["UnRestrict_Volume"] = df_UnRestrict["UnRestrict_Volume"].astype("float")
    data = []
    dfGroups = df_UnRestrict.groupby("Date")
    for dfGroup in dfGroups:
        #
        date = dfGroup[0]
        dfTemp = dfGroup[1].copy()
        print("Process UnRestrict", date)
        # Process Symbol
        df_symbol = dfTemp[["Symbol"]]
        df_symbol = df_symbol.drop_duplicates()
        symbols = df_symbol["Symbol"].tolist()
        # Request Price
        df_price = WSD_Quote_Snapshot(symbols, date)
        df_price["Symbol"] = df_price.index
        # print(df_price)

        dfTemp = pd.merge(dfTemp, df_price, how="left", on="Symbol")
        dfTemp["UnRestrict_Amount"] = dfTemp["UnRestrict_Volume"] * dfTemp["Close"] * 10000 # 单位万股
        sum = dfTemp["UnRestrict_Amount"].sum()
        # print(dfGroup)

        data.append([date, sum])
    #
    df_amount = pd.DataFrame(data, columns=["Date", "UnRestrict_Amt"])
    df_amount["Date"] = pd.to_datetime(df_amount["Date"])
    df_amount.set_index(df_amount["Date"], drop=True, inplace=True)
    #
    Save_Systematic_Factor_To_Database(database, df_amount, save_name="UnRestrict_Amt")


def Load_Bond_Net_Issue(database, datetime1, datetime2, bond_type):
    #
    data_field = "date"
    filter = []
    filter.append(["bond_type", bond_type])
    filter.append([data_field, ">=", datetime1])
    filter.append([data_field, "<=", datetime2])
    #
    documents = database.Find("financial_data", "Bond_Issue_Reduction", filter)
    if len(documents) == 0:
        df = pd.DataFrame(data=[], columns=["date", bond_type])
    else:
        df = Gadget.DocumentsToDataFrame(documents, keep=["date", "net_issue_amount"])
        df.rename(columns={"net_issue_amount": bond_type, "date": "Date"}, inplace=True)
    #
    df["Date"] = pd.to_datetime(df["Date"])
    return df


def Calc_Bond_Net_Issue_Sovereign(database, datetime1, datetime2):
    # 生成标准日期
    df = Gadget.Generate_Calender_Days_DataFrame(datetime1, datetime2)
    # print(df)

    bond_types = ["国债", "地方政府债", "央行票据", "政策银行债"]
    for bond_type in bond_types:
        df_tmp = Load_Bond_Net_Issue(database, datetime1, datetime2, bond_type)
        print(bond_type)
        print(df_tmp)

        # Merge IPO to 标准日期
        df = pd.merge(df, df_tmp, how="left", on="Date")

    # 行求和
    df.set_index("Date", inplace=True)
    df['Total'] = df.apply(lambda x: x.sum(), axis=1)
    print(df.tail(10))

    # 按照周聚合
    df_by_week = df.resample("W").sum()
    df_by_week["Report_Date"] = df_by_week.index
    df_by_week["Release_Date"] = df_by_week.index
    print(df_by_week)
    Save_Systematic_Factor_To_Database(database, df_by_week, save_name="Bond_Net_Issue_Sover_Weekly",
                                       field_name="Total")

    # 按照月聚合
    df_by_month = df.resample("M").sum()
    df_by_month["Report_Date"] = df_by_month.index
    df_by_month["Release_Date"] = df_by_month.index
    print(df_by_month)
    #
    Save_Systematic_Factor_To_Database(database, df_by_month, save_name="Bond_Net_Issue_Sover_Monthly",
                                       field_name="Total")

    # df.to_csv("d://Equity_Issue.csv")


def Calc_Bond_Net_Issue_Credit(database, datetime1, datetime2):
    # 生成标准日期
    df = Gadget.Generate_Calender_Days_DataFrame(datetime1, datetime2)
    # print(df)

    bond_types = []
    #
    bond_types.append("公司债") # 7.68
    bond_types.append("企业债") # 2.20
    bond_types.append("中期票据") # 7.06
    bond_types.append("短期融资券") # 2.20

    # 金融债
    bond_types.append("商业银行债") # 1.68
    bond_types.append("商业银行次级债券") # 1.68
    bond_types.append("保险公司债") # 0.31
    bond_types.append("证券公司债") # 1.36
    bond_types.append("证券公司短期融资券") # 0.23
    bond_types.append("其它金融机构债") # 0.64

    # 其他融资工具
    bond_types.append("资产支持证券")  # 3.5
    bond_types.append("政府支持机构债")  # 1.59 # 类似于金融债
    bond_types.append("定向工具")  # PPN  2.1 # 类似于信用债

    for bond_type in bond_types:
        df_tmp = Load_Bond_Net_Issue(database, datetime1, datetime2, bond_type)
        # print(df_tmp)

        # Merge IPO to 标准日期
        df = pd.merge(df, df_tmp, how="left", on="Date")

    # 行求和
    df.set_index("Date", inplace=True)
    df['Total'] = df.apply(lambda x: x.sum(), axis=1)
    print(df.tail(10))

    # 按照周聚合
    df_by_week = df.resample("W").sum()
    df_by_week["Report_Date"] = df_by_week.index
    df_by_week["Release_Date"] = df_by_week.index
    print(df_by_week)
    Save_Systematic_Factor_To_Database(database, df_by_week, save_name="Bond_Net_Issue_Credit_Weekly",
                                       field_name="Total")

    # 按照月聚合
    df_by_month = df.resample("M").sum()
    df_by_month["Report_Date"] = df_by_month.index
    df_by_month["Release_Date"] = df_by_month.index
    print(df_by_month)
    Save_Systematic_Factor_To_Database(database, df_by_month, save_name="Bond_Net_Issue_Credit_Monthly",
                                       field_name="Total")


# 消费者信心指数
def Calc_Investor_Confidence(database, datetime1, datetime2):

    def one_factor(database, datetime1, datetime2, wind_code, factor_name):
        # 信心指数是月度数据，计算差分至少需要连个月数据, 发布日在月底，再往前推一个月
        datetime0 = datetime1 - datetime.timedelta(days=3*31)
        # 请求Wind数据
        df = EDB(wind_code, datetime0, datetime2, factor_name, dateAsIndex=False)
        # 补充发布日期
        Fill_ReleaseDate(df, lag_release_month=1, release_day=-1)
        print(df.tail())

        # 储存原始数据
        Save_Systematic_Raw_To_Database(database, df, saved_name=wind_code, field_name=factor_name)

        #
        Save_Systematic_Factor_To_Database(database, df, save_name=factor_name)

        #
        df.fillna(method="ffill", inplace=True)
        df["lnMoM"] = np.log(df[factor_name] / df[factor_name].shift(1))
        # print(df)
        df.dropna(inplace=True) # 计算MoM第一个为空值

        # 变频-先变成日数据
        # 可能会产生未来数据
        # 使用Release_Date避免未来数据
        df_calender = Gadget.Generate_Calender_Days_DataFrame(datetime0, datetime2)
        df = pd.merge(df_calender, df, left_on="Date", right_on="Release_Date", how="left")
        # print(df)
        df.fillna(method="ffill", inplace=True) # 向前填充
        # print(df)

        #
        df = df[(df["Date"] >= datetime1) & (df["Date"] <= datetime2)]
        df["date_t"] = pd.to_datetime(df["Date"])
        df.set_index("date_t", inplace=True)
        # print(df)
        df_weekly = df.resample("W").last()
        df_weekly["key_date"] = df_weekly.index # 变频后，相当于复制了ReportDate，为了创造唯一的Key，特意让其等于周度聚合日期
        print(df_weekly[["Report_Date", "Release_Date", factor_name, "lnMoM"]].tail(20))

        # 储存为因子数据
        Save_Systematic_Factor_To_Database(database, df_weekly, save_name=factor_name + "_Monthly_lnDif",
                                           field_name="lnMoM", key_date="key_date")
        a = 0

    #
    one_factor(database, datetime1, datetime2, "M5452815", "Investor_Confi")
    one_factor(database, datetime1, datetime2, "M5452818", "Investor_Confi_Global") # 已经停止更新
    one_factor(database, datetime1, datetime2, "M5452819", "Investor_Confi_Valuation")


# 三段数据拼接
# 2015-4-10 之前使用 M0013030 新增股票账户数，周数据，户数 （截止至2015-5-29）
# 4月17到4月30使用辰轩回归的数据:116.71,152.13,103.03
# 2015-4-30 之后使用 M5558005 新增投资者数量，周数据，万人
# 本数据每周二更新上周数据，时间戳对应到本周五
# 截止2019年2月，数据不再更新
def Calc_New_Investor_old(database, datetime1, datetime2):
    pass
    # keep = ['M5558005']
    # data = self.daily_data[keep]
    # data = data.fillna(method='ffill').dropna()
    # """先前停止更新的沪深合计：新增股票账户数 数据 可能需要填充"""
    # if self.start_date <= '2015-04-30':
    #     try:
    #         error, account_add1 = w.edb("M0013030", '2005/7/8', '2015-04-10', usedf=True)  # 缺失填空值
    #         account_add1 = account_add1 / 10000
    #         account_add1.index = pd.to_datetime(account_add1.index)
    #         account_add1.columns = ['M5558005']
    #         # 辰轩根据回归求出的三个点数据
    #         account_add2 = pd.DataFrame({'M5558005': [116.71, 152.13, 103.03]},
    #                                     index=pd.to_datetime(['2015-04-17', '2015-04-24', '2015-04-30']))
    #         # 拼接
    #         account_add = account_add1.append(account_add2)
    #     except:
    #         print('读取wind新增股票账户数数据失败 代码M0013030')
    #     # 根据M0013030拼接数据  之后数据用M5558005
    #     data = account_add.append(data)
    # """本来是下周二公布上周五的数据 把数据放在本周周五 需要shift"""
    # NEWINVSTOR = data['M5558005'].resample('W-FRI').last().shift().map(np.log).diff().fillna(method='ffill').fillna(0)
    # self.factor_data['QX_NEWINVSTOR'] = NEWINVSTOR


# 信用账户新增个人投资者数
def Calc_New_Investor(database, datetime1, datetime2):
    #
    # newcreditaccount_individual = EDB('newcreditaccount_individual', 'M0067863', datetime1, datetime2)
    # print(newcreditaccount_individual)
    # SaveBigFactorToDatabase(database, newcreditaccount_individual['newcreditaccount_individual'], 'FinancCredit_New_Monthly',
    #                         "market_factor")
    #

    # 20200721 不再公布周度数据，只有月度数据
    df1 = EDB('M6193262', datetime1, datetime2, 'Investor_Total')  # 期末投资者数量
    df2 = EDB('M6193263', datetime1, datetime2, 'Investor_Individual')  # 期末投资者数量：自然人
    df3 = EDB('M6193266', datetime1, datetime2, 'Investor_Company')  # 期末投资者数量：非自然人
    #
    df4 = EDB('M6193259', datetime1, datetime2, 'Investor_Total_New_Monthly')  # 新增投资者数量
    df5 = EDB('M6193260', datetime1, datetime2, 'Investor_Individual_New_Monthly')  # 新增投资者数量：自然人
    df6 = EDB('M6193261', datetime1, datetime2, 'Investor_Company_New_Monthly')  # 新增投资者数量：非自然人
    #
    df_investor = pd.concat([df1, df2, df3, df4, df5, df6], axis=1, sort=True)
    df_investor["Report_Date"] = df_investor.index
    Fill_ReleaseDate(df_investor, lag_release_month=1, release_day=20)
    #
    print(df_investor)
    #
    Save_Systematic_Factor_To_Database(database, df_investor, 'Investor_Total')
    Save_Systematic_Factor_To_Database(database, df_investor, 'Investor_Individual')
    Save_Systematic_Factor_To_Database(database, df_investor, 'Investor_Company')
    Save_Systematic_Factor_To_Database(database, df_investor, 'Investor_Total_New_Monthly')
    Save_Systematic_Factor_To_Database(database, df_investor, 'Investor_Individual_New_Monthly')
    Save_Systematic_Factor_To_Database(database, df_investor, 'Investor_Company_New_Monthly')


def Calc_New_MarginTrade_Investor(database, datetime1, datetime2):
    df = EDB('M0096662', datetime1, datetime2, 'investor')  # 融资融券客户数量
    df["Release_Date"] = df.index
    df["Report_Date"] = df.index
    df["date_t"] = pd.to_datetime(df["Release_Date"])
    df.set_index("date_t", inplace=True)
    print(df)
    Save_Systematic_Factor_To_Database(database, df, 'MarginTrade_Investor', field_name="investor")
    #
    df_weekly = df.resample("W").last()
    df_weekly["Report_Date"] = df_weekly.index
    df_weekly["diff"] = df_weekly["investor"].diff()
    print(df_weekly)
    Save_Systematic_Factor_To_Database(database, df_weekly, 'MarginTrade_Investor_Weekly_Dif', field_name="diff")
    #
    df_monthly = df.resample("M").last()
    df_monthly["Report_Date"] = df_monthly.index
    df_monthly["diff"] = df_monthly["investor"].diff()
    print(df_monthly)
    df_monthly = df_monthly[:-1]
    Save_Systematic_Factor_To_Database(database, df_monthly, 'MarginTrade_Investor_Monthly_Dif', field_name="diff")


def Calc_New_MF_Equity_Shares(database, datetime1, datetime2):
    #
    df = EDB('M0060433', datetime1, datetime2, 'NewShares', dateAsIndex=True)  # 新成立基金份额，偏股型
    # df["Release_Date"] = df["Report_Date"]
    #
    # df["date_t"] = pd.to_datetime(df["Report_Date"])
    # df.set_index("date_t", inplace=True)
    df_weekly = df.resample("W-Fri").sum() # 有周末新发的基金，以周五作为计算截止日
    # print(df_weekly)

    df_weekly2 = df.resample("W").sum() # 以周日为结束日期
    # print(df_weekly2)

    # 一般第一周都输数据不齐备的，抛弃第一周数据
    df_weekly = df_weekly[1:]
    df_weekly2 = df_weekly2[1:]

    # 出现长度不一致现象，一般是周日Resample多一个
    if len(df_weekly) != len(df_weekly2):
        df_weekly2 = df_weekly2[1:]

    df_weekly["Report_Date"] = df_weekly2.index # 周日
    df_weekly["Release_Date"] = df_weekly.index # 周五
    # print(df_weekly.tail())

    # ---月度聚合---
    df_monthly = df.resample("M").sum()
    df_monthly["Report_Date"] = df_monthly.index
    df_monthly["Release_Date"] = df_monthly.index
    df_monthly = df_monthly[1:]  # 抛弃头尾数据
    # print(df_monthly)
    # 日度新发
    Save_Systematic_Factor_To_Database(database, df, save_name="MF_Equity_New_Shares", field_name="NewShares")
    # 周度新发
    Save_Systematic_Factor_To_Database(database, df_weekly, save_name="MF_Equity_New_Shares_Weekly",field_name="NewShares")
    # 月度新发
    Save_Systematic_Factor_To_Database(database, df_monthly, save_name="MF_Equity_New_Shares_Monthly",field_name="NewShares")


# 公募基金，股票+混合，数量，份额，变化，月度数据
# 私募基金，全部类型，数量，AUM，变化
def Calc_Fund_Aum(database, datetime1, datetime2):
    # ---公募基金数量---
    df1 = EDB('M5207856', datetime1, datetime2)  # 公募基金-开放式基金-股票型
    df2 = EDB('M5207857', datetime1, datetime2)  # 公募基金-开放式基金-混合型
    df_count = pd.concat([df1, df2], axis=1, sort=True)
    #
    df_count["MF_Equity_Count"] = df_count["M5207856"] + df_count["M5207857"]
    df_count["MF_Equity_Count_Monthly_Dif"] = df_count["MF_Equity_Count"].diff(1)
    print(df_count)
    #
    Save_Systematic_Factor_To_Database(database, df_count, save_name="MF_Equity_Count")
    df_count.dropna(subset=["MF_Equity_Count_Monthly_Dif"], inplace=True)
    Save_Systematic_Factor_To_Database(database, df_count, save_name="MF_Equity_Count_Monthly_Dif")

    # ---公募基金份额---
    df3 = EDB('M5207864', datetime1, datetime2)  # 公募基金-开放式基金-股票型
    df4 = EDB('M5207865', datetime1, datetime2)  # 公募基金-开放式基金-混合型
    df_shares = pd.concat([df3, df4], axis=1, sort=True)
    #
    df_shares["MF_Equity_Shares"] = df_shares["M5207864"] + df_shares["M5207865"]
    df_shares["MF_Equity_Shares_Monthly_Dif"] = df_shares["MF_Equity_Shares"].diff(1)
    print(df_count)
    #
    Save_Systematic_Factor_To_Database(database, df_shares, save_name="MF_Equity_Shares")
    df_shares.dropna(subset=["MF_Equity_Shares_Monthly_Dif"], inplace=True)
    Save_Systematic_Factor_To_Database(database, df_shares, save_name="MF_Equity_Shares_Monthly_Dif")

    # ---私募基金数量, Aum, 证券类---
    df_HF1 = EDB('M5543213', datetime1, datetime2, field_name="HF_Count")  # 私募基金管理:证券:管理只数
    df_HF2 = EDB('M5543215', datetime1, datetime2, field_name="HF_Aum")  # 私募基金管理:证券:管理规模
    df_HF = pd.concat([df_HF1, df_HF2], axis=1, sort=True)

    df_HF["HF_Count_Monthly_Dif"] = df_HF["HF_Count"].diff(1)
    df_HF["HF_Aum_Monthly_Dif"] = df_HF["HF_Aum"].diff(1)

    print(df_HF)
    #
    Save_Systematic_Factor_To_Database(database, df_HF, save_name="HF_Count")
    Save_Systematic_Factor_To_Database(database, df_HF, save_name="HF_Aum")
    df_HF.dropna(subset=["HF_Count_Monthly_Dif"], inplace=True)
    Save_Systematic_Factor_To_Database(database, df_HF, save_name="HF_Count_Monthly_Dif")
    df_HF.dropna(subset=["HF_Aum_Monthly_Dif"], inplace=True)
    Save_Systematic_Factor_To_Database(database, df_HF, save_name="HF_Aum_Monthly_Dif")


def Calc_Big_HedgeFund(database, datetime1, datetime2):
    df1 = EDB('M5543237', datetime1, datetime2, 'Aum20_50')  # 私募基金规模在20至50亿元家数
    df2 = EDB('M5543238', datetime1, datetime2, 'Aum50_100')  # 私募基金规模在50至100亿元的家数
    df3 = EDB('M5543239', datetime1, datetime2, 'Aum100')  # 私募基金规模在100亿元以上的家数
    df_bigHF = pd.concat([df1, df2, df3], axis=1, sort=True)
    #
    df_bigHF["HF_Aum_Abv20"] = df_bigHF["Aum20_50"] + df_bigHF["Aum50_100"] + df_bigHF["Aum100"]
    df_bigHF["HF_Aum_Abv20_Monthly_Dif"] = df_bigHF["HF_Aum_Abv20"].diff(1)
    #
    Save_Systematic_Factor_To_Database(database, df_bigHF, save_name="HF_Aum_Abv20")
    df_bigHF.dropna(subset=["HF_Aum_Abv20_Monthly_Dif"], inplace=True)
    Save_Systematic_Factor_To_Database(database, df_bigHF, save_name="HF_Aum_Abv20_Monthly_Dif")


def Calc_ShareHoldingChg_Planning(database, datetime1, datetime2, realtime=None):
    #
    def UpperLoewer(x):
        upper = x["Change_Upper"]
        lower = x["Change_Lower"]
        if not np.isnan(upper) and upper > 0:
            return upper
        elif not np.isnan(lower) and lower > 0:
            return lower
        else:
            return 0
    #
    def Direction(x):
        direction = x["Direction"]
        if direction == "增持":
            return 1
        elif direction == "减持":
            return -1
        else:
            return 0

    #
    sDateTime1 = Gadget.ToDateString(datetime1)
    sDateTime2 = Gadget.ToDateString(datetime2)
    #
    filter = {}
    filter["DateTime"] = {">=": datetime1, "<=": datetime2}
    documents = database.Find("financial_data", "stock_ShareHolding_Chg_Plan", filter)
    df_shareplan = Gadget.DocumentsToDataFrame(documents, drop=["Name"])
    #
    df_shareplan.rename(columns={"change_upper": "Change_Upper",
                                 "change_lower":"Change_Lower",
                                 "direction":"Direction",
                                 "date":"Date", "symbol":"Symbol"}, inplace=True)
    #
    df_shareplan["Change_Upper"] = df_shareplan["Change_Upper"].astype("float")
    df_shareplan["Change_Lower"] = df_shareplan["Change_Lower"].astype("float")

    # ---Convert To Daily---
    df_shareplan["Change"] = df_shareplan.apply(UpperLoewer, axis=1)
    df_shareplan["Sign"] = df_shareplan.apply(Direction, axis=1)

    #
    data = []
    dfGroups = df_shareplan.groupby("Date")
    for dfGroup in dfGroups:
        #
        date = dfGroup[0]
        dfTemp = dfGroup[1].copy()
        #
        print("Process ShareHoldingChg", date, "#", len(dfTemp))

        # Process Price
        df_symbol = dfTemp[["Symbol"]]
        df_symbol = df_symbol.drop_duplicates()
        symbols = df_symbol["Symbol"].tolist()
        # Request Price
        if realtime == None:
            df_price = WSD_Quote_Snapshot(symbols, date)
        else:
            df_price = RealTime_Quote(database, realtime, symbols, date)
        # print(df_price)

        dfTemp = pd.merge(dfTemp, df_price, how="left", on="Symbol")
        dfTemp["Amount"] = dfTemp["Change"] * dfTemp["Close"] * dfTemp["Sign"]
        sum = dfTemp["Amount"].sum()
        # print(dfTemp.head())
        data.append([date, sum])
    #
    df_amount = pd.DataFrame(data, columns=["Date", "Amount"])
    df_amount["Date"] = pd.to_datetime(df_amount["Date"])
    # print(df_amount.dtypes)

    # ---补齐减持为0的日子---
    tradedays = w.tdays(sDateTime1, sDateTime2, "")
    df_date = pd.DataFrame(data=tradedays.Data[0], columns=['Date'])
    # print(df_date.dtypes)
    #
    df = pd.merge(df_date, df_amount, how="left", on="Date")
    df.fillna(0, inplace=True)
    # df.set_index('Date', inplace=True)
    print(df)

    # ---Save Factor---
    SaveSystematic_MarketFactorToDatabase(database, df, factorName="ShareHoldingChg_Plan", fieldName="Amount")


#
def Calc_ShareHoldingChg_Planning_v2(database, datetime1, datetime2, realtime=None):
    #
    df = load_share_holding_change_plan_info(database, datetime1, datetime2, direction=None)
    df_date_agg = df.groupby(["date"]).agg({"estimate_money": np.sum, "estimate_ratio": np.sum, "num_entry": np.sum})
    df_date_agg.reset_index(inplace=True)
    #
    SaveSystematic_MarketFactorToDatabase(database, df_date_agg, factorName="ShareHoldingChg_Plan", fieldName="estimate_money")



# ---港股通---
def Calc_Stock_Connect(database, datetime1=None, datetime2=None):
    #
    df_sh_south = Query_Data(database, 'M0329503', datetime1, datetime2)  # 沪市南下（港股通沪市）累计流入
    df_sh_north = Query_Data(database, 'M0329498', datetime1, datetime2)  # 沪市北上（沪股通）累计流入
    df_sz_south = Query_Data(database, 'M0329507', datetime1, datetime2)  # 深市南下（港股通深市）累计流入
    df_sz_north = Query_Data(database, 'M0329500', datetime1, datetime2)  # 深圳北上（深股通）累计流入
    #
    # df_buyin = EDB('M0329526', datetime1, datetime2, 'buyin')  # 陆股通：当日买入成交额
    # df_sellout = EDB('M0329527', datetime1, datetime2, 'sellout')  # 陆股通：当日卖出成交额
    df_total_amt = EDB('M0329530', datetime1, datetime2, 'total_amt')  # 陆股通：当日成交金额
    df_net_buy = EDB('M0329532', datetime1, datetime2, 'net_buy')  # 当日买入成交净额（当日资金净流入） = M0329497 + M03229499
    df_net_buy_aggr = EDB('M0329533', datetime1, datetime2, 'net_buy_aggr')  # 累计买入成交净额

    df_sh_amt = EDB('M0020199', datetime1, datetime2, "amt_sh")  # 上海 成交金额 上证A股指数
    df_sz_amt = EDB('M0020276', datetime1, datetime2, "amt_sz")  # 深圳 成交金额 深圳综合指数
    # amt_sh_df = EDB('G8324468', datetime1, datetime2, 'amt_sh')  # 上证所：A股总成交金额 # 港交所数据
    # amt_sz_df = EDB('G8324481', datetime1, datetime2, 'amt_sz')  # 深交所：A股总成交金额 # 港交所数据
    df_sh_cap = EDB('G8324465', datetime1, datetime2, 'cap_sh')  # 上证所：总流通市值
    df_sz_cap = EDB('G8324478', datetime1, datetime2, 'cap_sz')  # 深交所：总流通市值

    # ---北上南下差---
    dfs = [df_sh_south, df_sh_north, df_sz_south, df_sz_north]
    df_gap = pd.concat(dfs, axis=1, sort=True)
    df_gap["Report_Date"] = df_gap["Release_Date"] = df_gap.index
    df_gap["gap"] = df_gap["M0329498"] - df_gap["M0329503"] + df_gap["M0329500"] - df_gap["M0329507"]
    df_gap["Northbound_Southbound_Gap"] = df_gap["gap"].diff(1)
    df_gap['avg'] = df_gap['Northbound_Southbound_Gap'].rolling(window=5, min_periods=1).mean()  # 净流入5日滚动平均
    df_gap_weekly = df_gap[['avg',"Release_Date"]].resample('W').last()
    df_gap_weekly["Report_Date"] = df_gap_weekly.index
    #
    print(df_gap_weekly)
    Save_Systematic_Factor_To_Database(database, df_gap, save_name="Northbound_Southbound_Gap")
    Save_Systematic_Factor_To_Database(database, df_gap_weekly, save_name="Northbound_Southbound_Gap_Weekly",
                                       field_name="avg")

    # ---北上净买入---
    df = pd.concat([df_net_buy_aggr, df_net_buy, df_total_amt, df_sh_amt, df_sz_amt, df_sh_cap, df_sz_cap], axis=1, sort=True)
    df["Report_Date"] = df["Release_Date"] = df.index
    #
    df['Northbound_Amt_To_TotalAmt'] = df['total_amt'] / (df['amt_sh'] + df['amt_sz'])  # 当日成交金额 / 总成交金额 占比
    df['Northbound_NetBuy_To_Cap'] = df['net_buy_aggr'] / (df['cap_sh'] + df['cap_sz'])  # 净买入 / 总市值占比
    #
    df_weekly = df[["net_buy", "Release_Date"]].resample("W").agg({"net_buy": "sum", "Release_Date": "last"})
    df_weekly["Report_Date"] = df_weekly.index
    df_weekly["Northbound_NetBuy_Weekly"] = df_weekly["net_buy"]
    df_weekly["Northbound_NetBuy_Weekly_Dif"] = df_weekly["net_buy"].diff(1)
    #
    print(df[["Northbound_Amt_To_TotalAmt", "Northbound_NetBuy_To_Cap"]])
    print(df_weekly)

    # ---Save To Database---
    Save_Systematic_Factor_To_Database(database, df, save_name="Northbound_NetBuy_Aggr", field_name="net_buy_aggr")
    Save_Systematic_Factor_To_Database(database, df, save_name="Northbound_NetBuy", field_name="net_buy")
    Save_Systematic_Factor_To_Database(database, df, save_name="Northbound_Amt_To_TotalAmt")
    Save_Systematic_Factor_To_Database(database, df, save_name="Northbound_NetBuy_To_Cap")
    Save_Systematic_Factor_To_Database(database, df_weekly, save_name="Northbound_NetBuy_Weekly")
    Save_Systematic_Factor_To_Database(database, df_weekly, save_name="Northbound_NetBuy_Weekly_Dif")


# 注意依赖于 Calc_UnRestrict 函数
def Calc_UnRestrict_Ratio(database, datetime1, datetime2):
    #
    df = Load_Systematic_Factor(database, "UnRestrict_Amt", datetime1, datetime2)
    df.set_index("date", inplace=True)
    df.index = pd.to_datetime(df.index)
    # print(df)
    #
    df_weekly = df.resample("w").agg({"UnRestrict_Amt": "sum", "release_date": "last"})
    # print(df_weekly)

    # 没有解禁数据的日子，用标准日期补齐
    df_calender = Gadget.Generate_Calender_Days_DataFrame(datetime1, datetime2, date_field_name="date",
                                                          date_as_index=True)
    df_calender = df_calender.resample("w").last()
    df_weekly = pd.merge(df_calender, df_weekly, how="left", left_index=True, right_index=True)
    df_weekly["Report_Date"] = df_weekly.index
    df_weekly['release_date'] = df_weekly.apply(lambda x: Fix_Nan_DateTime(x['release_date'], x["Report_Date"]), axis=1)
    #
    df_weekly["UnRestrict_Amt_Weekly_Dif"] = df_weekly["UnRestrict_Amt"].diff(1)
    # print(df_weekly)
    # print(df_weekly[["UnRestrict_Amt", "UnRestrict_Amt_Weekly_Dif"]])

    Save_Systematic_Factor_To_Database(database, df_weekly, save_name='UnRestrict_Amt_Weekly',
                                       field_name='UnRestrict_Amt')
    Save_Systematic_Factor_To_Database(database, df_weekly, save_name='UnRestrict_Amt_Weekly_Dif')
    #
    df1 = EDB('G8324465', datetime1, datetime2, 'sh_cap')  # 上证所:A股:总流通市值
    df2 = EDB('G8324478', datetime1, datetime2, 'sz_cap')  # 深交所:A股:总流通市值
    df_cap = pd.concat([df1, df2], axis=1, sort=True)
    df_cap["cap_sum"] = df_cap["sh_cap"] + df_cap["sz_cap"]
    # print(df_cap)
    df_cap = df_cap[["cap_sum"]]
    df_weekly_cap = df_cap.resample("w").agg({"cap_sum": "mean"})
    df_weekly = pd.merge(df_weekly, df_weekly_cap, how="inner", left_index=True, right_index=True)
    df_weekly["UnRestrict_Amt_tmp"] = df_weekly["UnRestrict_Amt"] / 100000000
    df_weekly["UnRestrict_Ratio"] = df_weekly["UnRestrict_Amt_tmp"] / df_weekly["cap_sum"]
    df_weekly["UnRestrict_Ratio_Weekly_Dif"] = df_weekly["UnRestrict_Ratio"].diff(1)
    df_weekly.rename(columns={"UnRestrict_Ratio": "UnRestrict_Ratio_Weekly"}, inplace=True)

    #
    # print(df_weekly.dtypes)
    # print(df_weekly[["Release_Date", "cap_sum", "UnRestrict_Amt"]])
    # print(df_weekly[["Release_Date", "UnRestrict_Ratio_Weekly", "UnRestrict_Ratio_Weekly_Dif"]])

    Save_Systematic_Factor_To_Database(database, df_weekly, save_name='UnRestrict_Ratio_Weekly')
    Save_Systematic_Factor_To_Database(database, df_weekly, save_name='UnRestrict_Ratio_Weekly_Dif')


# get_SAVE_MKTVL
def Calc_Deposit_To_Market(database, datetime1, datetime2):

    early_date1 = datetime1 + datetime.timedelta(days=-60)
    early_date2 = datetime1 + datetime.timedelta(days=-30)
    #
    df1 = Query_Data(database, 'M0043410', early_date1, datetime2)  # 金融机构:企业存款余额
    df2 = Query_Data(database, 'M0043413', early_date1, datetime2)  # 金融机构:储蓄存款余额
    df3 = Query_Data(database, 'M5207875', early_date1, datetime2)  # 货币基金净值
    df4 = Query_Data(database, 'G8324465', early_date2, datetime2)  # A股上证总流通市值
    df5 = Query_Data(database, 'G8324478', early_date2, datetime2)  # A股深圳证总流通市值

    # data = pd.concat([df1, df2, df3, df4,df5], axis=1, sort=True)
    # print(data)
    # """此时时间戳为每月末   数据在次月的18日更新 开始神奇的时间转化！"""
    # x = data[['M0043410', 'M0043413']].resample('M').last().fillna(0) # startdate如果有缺失值填0
    # x.index = x.index + MonthEnd(1) + Day(18) #时间戳位移
    # y = x.asof(data.index) # 找到按源index索引 最近一个不是NaN的值
    # data[['M0043410', 'M0043413']] = y
    # data = data.fillna(method='ffill').fillna(0)
    # data['save_all'] = data[['M0043410','M0043413','M5207875']].sum(axis=1)
    # data['market_value'] = data[['G8324465','G8324478']].sum(axis=1)
    # data['SAVE_MKTVL'] = data['save_all']/data['market_value']
    # MKTVL = data['SAVE_MKTVL'].resample('W-FRI').last().diff().fillna(0)
    # print(MKTVL)

    df_calender = Gadget.Generate_Calender_Days_DataFrame(early_date1, datetime2, date_field_name="date",
                                                          date_as_index=False)

    # 计算存款
    df_save = pd.concat([df1, df2, df3], axis=1, sort=True)
    df_save["Report_Date"] = df_save.index
    Fill_ReleaseDate(df_save, lag_release_month=1, release_day=18) # 货币基金净值是基金业协会发布，发布日期接近，统一处理
    # print(df_calender.head(60))
    # print(df_save)
    #
    df = pd.merge(df_calender, df_save, how="left", left_on="date", right_on="Release_Date")
    # df.to_csv("d:\save_market1.csv")

    # 计算市值
    df_market = pd.concat([df4, df5], axis=1, sort=True)
    df_market["date"] = df_market.index
    df = pd.merge(df, df_market, how="left", left_on="date", right_on="date")
    # df.to_csv("d:\save_market2.csv")
    #
    df.fillna(method="ffill", inplace=True)
    df.index = df["date"]
    # print(df)
    # df.to_csv("d:\save_market.csv")
    #
    df_weekly = df.resample("W").last()
    df_weekly["Release_Date"] = df_weekly["date"]
    df_weekly["Report_Date"] = df_weekly.index
    #
    # print(df_weekly)
    df_weekly["Total_Save"] = df_weekly["M0043410"] + df_weekly["M0043413"] + df_weekly["M5207875"]
    df_weekly["Total_Market"] = df_weekly["G8324465"] + df_weekly["G8324478"]
    df_weekly["Deposit_To_Market"] = df_weekly["Total_Save"] / df_weekly["Total_Market"]
    df_weekly["Deposit_To_Market_Weekly_Dif"] = df_weekly["Deposit_To_Market"].diff(1)
    #
    print(df_weekly[["Release_Date", "Total_Save", "Total_Market"]])
    print(df_weekly[["Release_Date", "Deposit_To_Market"]])
    print(df_weekly[["Release_Date", "Deposit_To_Market_Weekly_Dif"]])
    df_weekly = df_weekly[["Release_Date", "Report_Date", "Deposit_To_Market", "Deposit_To_Market_Weekly_Dif"]]
    #
    Save_Systematic_Factor_To_Database(database, df_weekly, save_name='Deposit_To_Market')
    Save_Systematic_Factor_To_Database(database, df_weekly, save_name='Deposit_To_Market_Weekly_Dif')


def Calc_Capital_In(database, datetime1, datetime2):

    data = EDB('M0062054', datetime1, datetime2, "HS300_Capital_Inflow")  # 资金净主动买入额:沪深300
    #
    data["Release_Date"] = data.index
    print(data)

    CAPITALIN = data['HS300_Capital_Inflow'].resample('W-SUN').sum().resample('W-FRI', label='left').last()
    print(CAPITALIN)

    CAPITALIN = data.resample("W").agg({"HS300_Capital_Inflow":"sum", "Release_Date": "last"})
    CAPITALIN['Report_Date'] = CAPITALIN.index
    CAPITALIN['HS300_Capital_Inflow_Weekly_Dif'] = CAPITALIN['HS300_Capital_Inflow'].diff(1)
    print(CAPITALIN)
    Save_Systematic_Factor_To_Database(database, CAPITALIN, save_name='HS300_Capital_Inflow_Weekly',
                                       field_name="HS300_Capital_Inflow")
    Save_Systematic_Factor_To_Database(database, CAPITALIN, save_name='HS300_Capital_Inflow_Weekly_Dif')


def download_money_flow(database, datetime1, datetime2):
    #
    symbol = "000300.SH"

    # 主力净流入额, 无 trader type 参数
    # unit=1 元
    df_big_inflow = WSD_Multi_Fields(symbol, datetime1, datetime2, fields=["mfd_inflow_m"], param="unit=1")
    Save_Systematic_Factor_To_Database(database, df_big_inflow, save_name='Money_Flow_Big_Player_Inflow_HS300', field_name="mfd_inflow_m")

    # 机构买入额 （wind中叫“流入额”，参数：机构）
    # unit=1 元
    # traderType = 1 机构
    df_institute_buy_amt = WSD_Multi_Fields(symbol, datetime1, datetime2, fields=["mfd_buyamt_d"], param="unit=1;traderType=1")
    Save_Systematic_Factor_To_Database(database, df_institute_buy_amt, save_name='Money_Flow_Institute_Buy_Amt_HS300', field_name="mfd_buyamt_d")


if __name__ == '__main__':
    #
    path_filename = os.getcwd() + "\..\Config\config_local.json"
    database = Config.create_database(database_type="MySQL", config_file=path_filename, config_field="MySQL")

    #
    w.start()
    #
    datetime1 = datetime.datetime(2000, 1, 1)
    datetime2 = datetime.datetime(2020, 8, 31)
    #
    # Calc_SEO(database, datetime1, datetime2)
    # Calc_IPO(database, datetime1, datetime2)

    datetime1 = datetime.datetime(2010, 1, 1)
    datetime2 = datetime.datetime(2020, 11, 1)
    # Calc_Equity_Issue(database, datetime1, datetime2)
    # Calc_New_Setup_Funds(database, datetime1, datetime2)
    # Calc_Big_HedgeFund(database, datetime1, datetime2)
    # Calc_New_MF_Equity_Shares(database, datetime1, datetime2)
    # Calc_Fund_Aum(database, datetime1, datetime2)
    # Calc_New_Investor(database, datetime1, datetime2)
    # Calc_New_MarginTrade_Investor(database, datetime1, datetime2)
    # Calc_UnRestrict_Ratio(database, datetime1, datetime2)
    # Calc_Deposit_2_Market(database, datetime1, datetime2)

    #
    datetime1 = datetime.datetime(2000, 1, 1)
    datetime2 = datetime.datetime(2020, 11, 30)
    # Calc_Deposit_Reserve_Ratio(database, datetime1, datetime2)
    # Calc_Investor_Confidence(database, datetime1, datetime2)
    # Calc_CHN_US_IR_Spread(database, datetime1, datetime2)
    # Calc_Stock_Connect(database, datetime1, datetime2)
    # Calc_Capital_In(database, datetime1, datetime2)
    # Test_SEO_Issue(database)
    # Test_IPO_Issue(database)

    datetime1 = datetime.datetime(2000, 1, 1)
    datetime1 = datetime.datetime(2020, 5, 1)
    datetime2 = datetime.datetime(2020, 7, 31)
    # Calc_Bond_Net_Issue_Sovereign(database, datetime1, datetime2)
    # Calc_Bond_Net_Issue_Credit(database, datetime1, datetime2)

    datetime1 = datetime.datetime(2000, 1, 1)
    datetime2 = datetime.datetime(2022, 2, 1)
    # download_money_flow(database, datetime1, datetime2)

    #
    datetime1 = datetime.datetime(2024, 6, 1)
    datetime2 = datetime.datetime(2024, 6, 10)
    Calc_ShareHoldingChg_Planning_v2(database, datetime1, datetime2, realtime=None)