import datetime
import pandas as pd
import numpy as np
import math
import statsmodels.api as sm
from sklearn.linear_model import LinearRegression


def Regression_SKLearn(x_train, y_train, printResult=True, normalize=True):
    #
    regressor = LinearRegression(copy_X=True, fit_intercept=True, n_jobs=1, normalize=normalize)
    regressor.fit(x_train, y_train)

    # ---Print Coeffient---
    if printResult:
        features = list(x_train.columns)
        # print(regressor.intercept_)
        # print(regressor.coef_)

        results = []
        for i in range(len(features)):
            name = features[i]
            coef = regressor.coef_[i]
            absCoef = math.fabs(regressor.coef_[i])
            results.append([name, coef, absCoef])
        results = sorted(results, key=lambda d: d[2])
        results = reversed(results)

        print("Intercept :", regressor.intercept_)
        for result in results:
            print("{0} : {1:.4f}".format(result[0], result[1]))

    # 特征重要度
    # features = list(x_train.columns)
    # importances = regressor.feature_importances_
    # indices = np.argsort(importances)[::-1]
    # num_features = len(importances)

    # 输出各个特征的重要度
    # for i in indices:
    #     print("{0} - {1:.3f}".format(features[i], importances[i]))

    # 将特征重要度以柱状图展示
    #plt.figure()
    #plt.title("Feature importances")
    #plt.bar(range(num_features), importances[indices], color="g", align="center")
    #plt.xticks(range(num_features), [features[i] for i in indices], rotation='45')
    #plt.xlim([-1, num_features])
    #plt.show()

    return regressor


def Regression_SM(x, y, normalize=False, printResult=True):
    #
    factorNames = list(x.columns)

    if normalize:
        x = (x - np.mean(x)) / np.std(x)

    #
    x = sm.add_constant(x)

    # GLS
    model = sm.GLS(y, x).fit()

    #
    if printResult:
        print(model.summary2())

    #
    return model


# 逐步回归
def StepwiseRegression(dfX, dfY, dropExtereme=False, normalize=True, logit=False):
    # y = df[y_node]
    # x = df[x_nodes]

    y = dfY
    x = dfX
    fields = list(dfX.columns)

    def t_test(x_list, printModel=False):
        xTable = pd.DataFrame()
        for x_name in x_list:
            x_data = x[[x_name]]

            # 极值处理
            if dropExtereme:
                qt_left = x_data.dropna().quantile(0.05).values[0]  # 5%分位数
                qt_right = x_data.dropna().quantile(0.95).values[0]  # 95%分位数
                x_data[x_data < qt_left] = qt_left  # 小于5%分位数的取5%分位数
                x_data[x_data > qt_right] = qt_right  # 大于95%分位数的取95%分位数
                print(qt_left, qt_right)

            # 因子值标准化
            if normalize:
                x_data = (x_data - np.mean(x_data)) / np.std(x_data)

            xTable = pd.concat([xTable, x_data], axis=1)  # 外连接x_nodes所有的因子值
            xTable = sm.add_constant(xTable)

        # 回归
        if logit:
            model = sm.Logit(y, xTable).fit()
        else:
            # model = sm.GLS(y, xTable).fit()
            model = sm.OLS(y, xTable).fit()

        #
        if printModel:
            print(model.summary2())

        # output
        output = pd.DataFrame(index=x_list, columns=['coef', 'pvalue'])
        for x_name in x_list:
            output.loc[x_name, 'coef'] = model.params[x_name]
            output.loc[x_name, 'pvalue'] = model.pvalues[x_name]
        #
        return output

    # 对单因子做回归
    regression_table = pd.DataFrame()
    for factor_name in fields:
        # print('regression @:', factor_name)
        regression_table = pd.concat([regression_table, t_test([factor_name])], axis=0)
    print("Single Factor Test")
    print(regression_table)

    # 筛选通过t检验的因子并按绝对值排序
    pvalueTable_passed = regression_table[regression_table['pvalue'] < 0.05]  # 取出通过了t检验的因子
    print("")
    print("Factor Passed P Value < 5%")
    print(pvalueTable_passed)

    #
    if len(pvalueTable_passed) > 0:

        # Sort by Beta (or by Correlation)
        betaTable_passed = pvalueTable_passed.abs().sort_values(by='coef',
                                                                ascending=False).index.tolist()  # 把通过t检验的因子按照coef值从大到小排列
        # 逐步回归
        factorList_optimal = []
        for factor in betaTable_passed:
            # print('stepwise @:', factor)
            factorList_optimal.append(factor)
            outPut_data = t_test(factorList_optimal)

            # 新加入的变量是的其他变量变得不显著了，剔除
            if (outPut_data['pvalue'] > 0.1).sum() != 0:
                factorList_optimal.remove(factor)

        # 最终结果
        outPut_data = t_test(factorList_optimal, printModel=True)

    else:
        print('No Significant Factor!')
        outPut_data = pd.DataFrame()

    return outPut_data


if __name__ == '__main__':

    # ---Connect Database---
    from Core.Config import *
    cfgPathFilename = os.getcwd() + "/../config.json"
    config = Config(cfgPathFilename)
    #database = config.DataBase("MySQL")
    #realtime = config.RealTime()


    # ---Factors---
    factors = []
    factors.append("ProfitMargin_NetIncome2_LYR")
    factors.append("AssetTurnover_LYR")
    factors.append("Leverage")
    factors.append("ROE_NetIncome2_LYR")
    factors.append("ROE_NetIncome2_TTM")
    factors.append("EarningToPrice_LYR")
    factors.append("PB_LF")
    factors.append("Growth_CAGR_TotalRevenue_1Yr")
    factors.append("Growth_CAGR_NetIncome2_1Yr")
    factors.append("Growth_YoY_TotalRevenue")
    factors.append("Growth_YoY_NetIncome2")
    factors.append("LnCap")

    #
    datetime1 = datetime.datetime(2007, 5, 1)
    datetime2 = datetime.datetime(2019, 5, 1)

    #
    df = LearningFramework.MultiPeriodData(None, datetime1, datetime2, factors,
                                           profileFolderName="MultiFactor",
                                           rangeReturnFolderName="HalfYear")

    # print(df)

    dfX = df.drop(columns=["IsSignificant"])
    dfY = df["IsSignificant"]

    # ---Regression---
    logit = sm.Logit(dfY, dfX)
    result = logit.fit()
    # print(result.summary())
    print(result.summary2())

    # ---StepWise Regression---
    ret = FactorModel.StepwiseRegression(dfY, dfX, False, False, logit=True)
