import os
import time

import torch
import torchvision
from torch import nn
from torch.autograd import Variable
from torch.utils.data import DataLoader
from torchvision import transforms
from torchvision.datasets import MNIST, CIFAR10
from torchvision.utils import save_image
import numpy as np
import pandas as pd
import torch
import os
from datetime import datetime
import matplotlib.pyplot as plt
from torch.utils.data import DataLoader
from torch.autograd import Variable
from torch import nn
from torch import optim
from torchvision import datasets
import sys
from opening._MyCode.Modules import *
from opening._MyCode.Utils import *
import math
import xgboost as xgb
from sklearn import datasets
from sklearn.model_selection import train_test_split
from xgboost.sklearn import XGBClassifier
from sklearn.model_selection import GridSearchCV
#from sklearn import cross_validation,metrics
from sklearn import model_selection as cv
from sklearn import metrics
from sklearn.model_selection import KFold, cross_val_score
from sklearn.linear_model import Ridge,RidgeCV
import seaborn as sns
import warnings


warnings.filterwarnings("ignore")

def extraModel(SAEModel, x):
    # return SAEModel.ae2(SAEModel.ae1(x, True), True)
    return SAEModel.ae1(x, True)

def extraModel2(SAEModel, x):
    return SAEModel.ae2(SAEModel.ae1(x, True), True)

def seabornPlot(y_test, preds):
    #warnings.filterwarnings("ignore")
    preds = preds.reshape(preds.shape[0], 1)
    #print(y_test.shape, preds.shape)

    seabornDf = pd.DataFrame(np.hstack((y_test, preds)),
                             columns=["y_test", "preds"])

    sns.set_style("whitegrid")
    # 散点图
    plt.figure(figsize=(10, 10))
    sns.scatterplot(x="y_test", y="preds", data=seabornDf, )
    plt.xlim(-0.05, 1)
    plt.ylim(-0.05, 1)
    plt.plot(np.linspace(-1, 2, 1000), np.linspace(-1, 2, 1000),
             lineWidth=4, color="c")
    plt.ylabel("preds", fontsize=30)
    plt.xlabel("y_test", fontsize=30)
    plt.title("rmse: " + str(rmse(y_test[:], preds[:])), fontsize=20)
    plt.show()
    # 曲线图
    plotLen = len(seabornDf)
    plotX = np.linspace(0, plotLen - 1, plotLen)
    ax1 = sns.lineplot(x=plotX, y="y_test", data=seabornDf)
    plt.plot(preds)
    plt.legend(["y_test", "prediction"])
    plt.title("rmse: " + str(rmse(y_test[:], preds[:])))
    plt.ylabel("prediction", fontsize=15)
    plt.ylim(-0.05, 1.05)
    plt.show()
    #warnings.filterwarnings("always")

def xgbPrediction(step = 28, hiddenSize1 = 12, hiddenSize2 = 4, isAE = True,
                  isFilter = True, isTwoLayer = True, isBasicModel = False, isDe = True):

    print("\n\n本次预测的条件为: " + str(step) + "to" + str(hiddenSize1) +
          "to" + str(hiddenSize2) + " towLayer:" + str(isTwoLayer) + " basic:"+str(isBasicModel))
    if isBasicModel:
        SAEModel = torch.load(
            "./basicModelStorage/DeSAE" + str(step) + "To" + str(hiddenSize1) + "to" + str(hiddenSize2) + ".pth")
    else:
        SAEModel = torch.load(
            "./modelStorage/DeSAE" + str(step) + "To" + str(hiddenSize1) + "to" + str(hiddenSize2) + ".pth")

    startPoint = 0
    # trainTestBound = 1596 - step
    trainTestBound = 1500
    testSize = 2394 - trainTestBound - step
    endPoint = trainTestBound + testSize

    torch.manual_seed(233)
    if isDe:
        xsDf, ysDf = dataProcessDe(step=step)
    # else:
        # xsDf, ysDf = dataProcess(step)
    xPredi = torch.from_numpy(np.array(xsDf.iloc[:, :]))  # torch.Size([14990, 130])
    yPredi = torch.from_numpy(np.array(ysDf.iloc[:, :]))  # torch.Size([14990, 1])
    # 模型提取特征
    if isAE:
        if isTwoLayer:
            xPredi = torch.cat((extraModel(SAEModel, xPredi), extraModel2(SAEModel, xPredi)), 1)
        else:
            xPredi = extraModel(SAEModel, xPredi)

    tempRInfo = []
    preInfo = xPredi.detach().numpy()
    ypreInfo = yPredi.detach().numpy()
    for i in range(preInfo.shape[1]):
        temp1 = np.corrcoef(preInfo[:, i], ypreInfo.flat)[0, 1]
        #tempRInfo.append(np.abs(temp1) > (0.2 if isAE else 0))
        tempRInfo.append(np.abs(temp1) > 0.1)
    #print(tempRInfo)

    if isFilter:
        xPredi = xPredi[:, tempRInfo]

    X_train = xPredi.detach().numpy()[startPoint:startPoint + trainTestBound, :]
    y_train = yPredi.detach().numpy()[startPoint:startPoint + trainTestBound, :]
    X_test = xPredi.detach().numpy()[trainTestBound + startPoint:trainTestBound + startPoint + endPoint, :]
    y_test = yPredi.detach().numpy()[trainTestBound + startPoint:trainTestBound + startPoint + endPoint, :]

    minRmse = 1
    minParam = "abaaba"
    bestDepth = 10
    bestGamma = 10
    bestSubsample = 10
    for max_depth in range(4, 5, 1):
        for gamma in np.arange(0.24, 0.26, 0.01):
            #print("参数: md {}, gamma {:.2f}".format(max_depth, gamma))
            for subsample in np.arange(0.07, 0.15, 0.02):
                other_params = {
                    "learning_rate": 0.01,
                    "max_depth": max_depth,
                    "min_child_weight": 1,
                    "gamma": gamma,
                    "subsample": subsample,
                    "reg_alpha": 0.0,
                    "reg_lambda": 0,
                    "colsample_bytree": 0.5,
                    "objective": 'reg:squarederror',
                    # "objective": 'reg:logistic',
                    "nthread": 4,
                    "scale_pos_weight": 0.8,
                    "seed": 233,
                    "booster": "gbtree"
                }
                xgb1 = xgb.XGBRegressor(**other_params, n_estimators=1000)

                xgb1.fit(X_train, y_train, verbose=0, eval_set=[(X_test, y_test)],
                         early_stopping_rounds=50, eval_metric="rmse")
                # print("参数, md {}, gamma {:.2f}, subsample {:.3f}".format(max_depth, gamma, subsample))
                tempRmse = rmse(y_test, xgb1.predict(X_test))
                if tempRmse < minRmse:
                    bestDepth = max_depth
                    bestGamma = gamma
                    bestSubsample = subsample
                    minRmse = tempRmse
                    # print("rmse: ", minRmse)
                    # print("subsample:{:.3f}".format(subsample))
                    minParam = "md: " + str(max_depth) + "|gamma: " + str(gamma) + "|subsample" + str(subsample)
                # print("回归的rmse为: ", tempRmse)
    print("xgb调参最小的rmse为: ", minRmse, "\n其参数为: ", minParam)

    other_params = {
        "learning_rate": 0.01,
        "max_depth": bestDepth,
        "min_child_weight": 1,
        "gamma": bestGamma,
        "subsample": bestSubsample,
        "reg_alpha": 0.0,
        "reg_lambda": 0,
        "colsample_bytree": 0.5,
        "objective": 'reg:squarederror',
        "nthread": 4,
        "scale_pos_weight": 0.8,
        "seed": 233,
        "booster": "gbtree"
    }
    xgb1 = xgb.XGBRegressor(**other_params, n_estimators=1000)

    xgb1.fit(X_train, y_train, verbose=0, eval_set=[(X_test, y_test)],
             early_stopping_rounds=200, eval_metric="rmse")

    preds = xgb1.predict(X_test)
    print("回归的rmse为: ", rmse(y_test[:], preds[:]))
    print("回归的r2_score为: ", metrics.r2_score(y_test, preds))
    seabornPlot(y_test, preds)



def lassoPrediction(step = 26, hiddenSize1 = 12, hiddenSize2 = 4, isAE = True,
                  isFilter = True, isTwoLayer = True, isDe = True, path = "./modelStorage/SAE26To12to6.pth"):
    print("\n\n本次预测的条件为: " + str(step) + "to" + str(hiddenSize1) +
          "to" + str(hiddenSize2) + str(isTwoLayer))

    #SAEModel = torch.load("./basicModelStorage/DeSAE" + str(step) + "To" + str(hiddenSize1) + "to" + str(hiddenSize2) + ".pth")
    SAEModel = torch.load(path)

    startPoint = 0
    # trainTestBound = 1596 - step
    trainTestBound = 1500
    testSize = 2394 - trainTestBound - step
    endPoint = trainTestBound + testSize

    torch.manual_seed(233)
    if isDe:
        xsDf, ysDf = dataProcessDe(step=step)

    xPredi = torch.from_numpy(np.array(xsDf.iloc[:, :]))  # torch.Size([14990, 130])
    yPredi = torch.from_numpy(np.array(ysDf.iloc[:, :]))  # torch.Size([14990, 1])



    isTwoLayer = False
    if isAE:
        if isTwoLayer:
            xPredi = torch.cat((extraModel(SAEModel, xPredi), extraModel2(SAEModel, xPredi)), 1)
        else:
            xPredi = extraModel(SAEModel, xPredi)

    tempRInfo = []
    preInfo = xPredi.detach().numpy()
    ypreInfo = yPredi.detach().numpy()
    for i in range(preInfo.shape[1]):
        temp1 = np.corrcoef(preInfo[:, i], ypreInfo.flat)[0, 1]
        #tempRInfo.append(np.abs(temp1) > (0.2 if isAE else 0))
        tempRInfo.append(np.abs(temp1) > 0.1)
    #print(tempRInfo)

    if isFilter:
        xPredi = xPredi[:, tempRInfo]


    X_train = xPredi.detach().numpy()[startPoint:startPoint + trainTestBound, :]
    y_train = yPredi.detach().numpy()[startPoint:startPoint + trainTestBound, :]
    X_test = xPredi.detach().numpy()[trainTestBound + startPoint:endPoint, :]
    y_test = yPredi.detach().numpy()[trainTestBound + startPoint:endPoint, :]

    # 构造不同的Lambda值
    Lambdas = np.logspace(-5, 2, 200)
    # 构造空列表，用于存储模型的偏回归系数
    bestRmse = 1
    bestLambda = 0
    for Lambda in Lambdas:
        ridge = Ridge(alpha=Lambda, normalize=True)
        ridge.fit(X_train, y_train)
        preds = ridge.predict(X_test)
        tempScore = rmse(y_test[:], preds[:])
        if tempScore < bestRmse:
            bestRmse = tempScore
            bestLambda = Lambda
    # print("best rmse: {:.8f}".format(bestRmse))
    # print("best lambda: {:.8f}".format(bestLambda))


    ridge = Ridge(alpha=bestLambda, normalize=True)
    ridge.fit(X_train[::], y_train[::])
    preds = ridge.predict(X_test)
    print("回归的rmse为: ", rmse(y_test[:], preds[:]))
    print("回归的r2_score为: ", metrics.r2_score(y_test, preds))
    #print("虚假的r2_score为: ", metrics.r2_score(y_test[:700], preds[:700]))
    seabornPlot(y_test, preds)



def scoreBarPlot():
    sns.set(style="dark", context="talk")
    scores = [[0.09536025356860776, 0.7448024960213306],
              [0.10989910270306792, 0.6611968717096408],
              [0.10157146133082537, 0.7108648396826567],
              [0.09435829897511136, 0.7489160717921943],
              [0.12019614983292833, 0.592582365956949],
              [0.11516917865667922, 0.6259486439176356]]
    scores = np.array(scores)
    labels = ["VRAE-xgb", "AE-xgb", "xgb", "VRAE-Lasso", "AE-Lasso", "Lasso"]
    index = ["RMSE", "R2_score"]
    scoreDf = pd.DataFrame(scores, columns=index)
    scoreDf.index = labels

    f, ((ax0, ax1), (ax2, ax3)) = plt.subplots(2, 2, figsize=(12, 9))
    sns.barplot(x=labels[:3], y="RMSE", ax=ax0, data=scoreDf.iloc[:3, :])
    sns.barplot(x=labels[3:], y="RMSE", ax=ax1, data=scoreDf.iloc[3:, :])
    sns.barplot(x=labels[:3], y="R2_score", ax=ax2, data=scoreDf.iloc[:3, :])
    sns.barplot(x=labels[3:], y="R2_score", ax=ax3, data=scoreDf.iloc[3:, :])


    ax0.hlines(np.min(scores[:3, 0]), -0.5, 2.5, colors="black", linestyles="dashed")
    ax1.hlines(np.min(scores[3:, 0]), -0.5, 2.5, colors="black", linestyles="dashed")
    ax2.hlines(np.max(scores[:3, 1]), -0.5, 2.5, colors="black", linestyles="dashed")
    ax3.hlines(np.max(scores[3:, 1]), -0.5, 2.5, colors="black", linestyles="dashed")
    ax0.set_title("RMSE  XGBoost", fontsize=20)
    ax1.set_title("RMSE  Lasso", fontsize=20)
    ax2.set_title("R2_score  XGBoost", fontsize=20)
    ax3.set_title("R2_score  Lasso", fontsize=20)
    plt.show()






























