from sklearn import linear_model, preprocessing
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np

from sklearn.metrics import mean_squared_error, r2_score, mean_squared_log_error
from sklearn import metrics

import lightgbm as lgb

sale_data = pd.read_csv('./data/001488_pre1.csv')

data_X = sale_data.iloc[:, :-1]

print(data_X.columns)
data_X.pop('temp_hi')
data_X.pop('last_3day')
data_X.pop('last_2day')

data_X.pop('temp_lo')
data_X.pop('out')
# data_X.pop('day_of_week')
# data_X.pop('days')

print('after:', data_X.columns)

data_Y = sale_data.iloc[:, -1:]

# print('data x:',data_X)

# print('x shape:',data_X.shape)
# print('y shape',data_Y.shape)
x_train, x_test, y_train, y_test = train_test_split(data_X, data_Y, test_size=0.25, random_state=0)

y_train = y_train.values.ravel()
y_test = y_test.values.ravel()


# x_train= preprocessing.normalize(x_train)

# print('x_train',x_train)


# x_test=preprocessing.normalize(x_test)

# print('x_test',x_test)


# ridge=linear_model.Ridge()

# ridge.fit(x_train,y_train)


# mse=np.mean((ridge.predict(x_test) - y_test) ** 2)
# score=ridge.score(x_test,y_test)

# print('score:%f,mse: %f' %(score,mse))

# alphas=np.logspace(-2,2)
# rhos=np.linspace(0.01,1)
# scores=[]
# for i in alphas:
#     for j in rhos:
#         regr=linear_model.ElasticNet(alpha=i,l1_ratio=j)
#         regr.fit(x_train,y_train)
#         scores.append(regr.score(x_test,y_test))
# print('scores:',scores)
# ##绘图
# alphas,rhos=np.meshgrid(alphas,rhos)
# scores=np.array(scores).reshape(alphas.shape)
# from mpl_toolkits.mplot3d import Axes3D
# from matplotlib import cm
# fig=plt.figure()
# ax=Axes3D(fig)
# surf=ax.plot_surface(alphas,rhos,scores,rstride=1,cstride=1,cmap=cm.jet,linewidth=0,antialiased=False)
# fig.colorbar(surf,shrink=0.5,aspect=5)
# ax.set_title('ElasticNet')
# ax.set_xlabel(r"$\alpha$")
# ax.set_ylabel(r"$\rho$")
# ax.set_zlabel("score")
# plt.show()

def _test_Ridge_alpha(*data):
    x_train, x_test, y_train, y_test = data
    alphas = [0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 50, 100, 200, 500, 1000]  # alpha列表
    scores = []  # 预测性能
    for each in alphas:
        regr = linear_model.Ridge(alpha=each)
        regr.fit(x_train, y_train)
        score = regr.score(x_test, y_test)
        scores.append(score)
    # 画图
    fig = plt.figure()
    ax = fig.add_subplot(1, 1, 1)
    ax.plot(alphas, scores)
    ax.set_xlabel(r"$\alpha$")
    ax.set_ylabel(r"score")
    ax.set_xscale('log')
    ax.set_title("Ridge")
    plt.show()


# test_Ridge_alpha(x_train,x_test,y_train,y_test)

def train_ridge_cv(*data):
    x_train, x_test, y_train, y_test = data
    n_alphas = 200
    alphas = np.logspace(-10, -2, n_alphas)
    rcv = linear_model.RidgeCV(alphas=alphas, cv=5)
    rcv.fit(x_train, y_train)
    rcv_score = rcv.score(x_test, y_test)

    y_test_pred = rcv.predict(x_test)

    rcv_mse = mean_squared_error(y_test, y_test_pred)

    #  R2 决定系数（拟合优度）
    # 模型越好：r2→1
    rcv_r2 = r2_score(y_test, y_test_pred)

    print('ridgecv alpha:', rcv.alpha_)
    print('ridgecv coef:', rcv.coef_)
    print('ridgecv score:', rcv_score)
    print('ridgecv mse:', rcv_mse)
    print('ridgecv r2:', rcv_r2)


# test_ridge_cv(x_train,x_test,y_train,y_test)


def train_lasso(*data):
    x_train, x_test, y_train, y_test = data
    n_alphas = 200
    alphas = np.logspace(-10, -2, n_alphas)
    lasso = linear_model.LassoCV(cv=3, alphas=alphas)
    lasso.fit(x_train, y_train)
    coef = lasso.coef_
    lasso_score = lasso.score(x_test, y_test)

    y_test_pred = lasso.predict(x_test)

    lasso_mse = mean_squared_error(y_test, y_test_pred)

    #  R2 决定系数（拟合优度）
    # 模型越好：r2→1
    lasso_r2 = r2_score(y_test, y_test_pred)

    print('lasso coef:', coef)
    print('lasso alpha:', lasso.alpha_)
    print('lasso score:', lasso_score)
    print('lasso mse:', lasso_mse)
    print('lasso r2:', lasso_r2)


def train_el_net(*data):
    x_train, x_test, y_train, y_test = data
    n_alphas = 200
    alphas = np.logspace(-10, -2, n_alphas)
    elnet = linear_model.ElasticNetCV(alphas=alphas, cv=5)
    elnet.fit(x_train, y_train)
    elnet_score = elnet.score(x_test, y_test)
    print('elnet coef:', elnet.coef_)
    print('elnet alpha:', elnet.alpha_)
    print('elnet score:', elnet_score)

    result = elnet.predict(x_test)

    plt.figure()
    plt.plot(y_test, 'go-', label='true value')
    plt.plot(result, 'ro-', label='predict value')
    plt.title('score: %f' % elnet_score)
    plt.legend()
    plt.show()


def train_general_linear_model(*data):
    x_train, x_test, y_train, y_test = data
    line_reg = linear_model.LinearRegression()
    line_reg.fit(x_train, y_train)
    line_reg_score = line_reg.score(x_test, y_test)
    print('line reg score:', line_reg_score)
    print('line reg coef:', line_reg.coef_)


def _test_linear_model(*data):
    x_train, x_test, y_train, y_test = data
    train_lasso(x_train, x_test, y_train, y_test)
    train_ridge_cv(x_train, x_test, y_train, y_test)
    train_el_net(x_train, x_test, y_train, y_test)
    train_general_linear_model(x_train, x_test, y_train, y_test)


from sklearn.pipeline import Pipeline


def train_polynomial_model(*data):
    x_train, x_test, y_train, y_test = data
    poly_reg = preprocessing.PolynomialFeatures(degree=2)  # degree=2表示二次多项式
    x_poly_train = poly_reg.fit_transform(x_train)  #
    x_poly_test = poly_reg.fit_transform(x_test)  #

    # train_lasso(x_train,x_test,y_train,y_test)
    # train_el_net(x_train,x_test,y_train,y_test)
    # train_ridge_cv(x_train,x_test,y_train,y_test)

    line_reg = linear_model.LinearRegression()
    line_reg.fit(x_poly_train, y_train)
    lr_score = line_reg.score(x_poly_test, y_test)

    lr_y_test_pred = line_reg.predict(x_poly_test)

    lr_mse = mean_squared_error(y_test, lr_y_test_pred)
    lr_r2 = r2_score(y_test, lr_y_test_pred)

    print('poly coef:', line_reg.coef_)
    print('poly score:', lr_score)

    print('poly mse:', lr_mse)
    print('poly r2:', lr_r2)

    # plt.plot(x_train,y_train,'b')
    # # plt.show()
    # # 预测图
    # plt.plot(x_train,line_reg.predict(x_poly_train),c='purple')
    # plt.show()
    # 预测图
    plt.plot(x_train, line_reg.predict(x_poly_train), c='purple')
    plt.show()


def train_poly_2(*data):
    x_train, x_test, y_train, y_test = data
    degree = [1, 2, 5, 10, 20, 50, 100]
    for d in degree:
        clf = Pipeline([('poly', preprocessing.PolynomialFeatures(degree=d)),
                        ('linear', linear_model.LinearRegression(fit_intercept=False))])
        clf.fit(x_train, y_train)
        y_test_pred = clf.predict(x_test)

        # 拟合结果
        # print(clf.named_steps['linear'].coef_)
        # 评价
        print('level=%d,mse=%.2f, R2=%.2f, clf.score=%.2f' %
              (d,
               mean_squared_error(y_test, y_test_pred),
               r2_score(y_test, y_test_pred),
               clf.score(x_test, y_test)))


from sklearn import ensemble


def train_random_forest(*data):
    x_train, x_test, y_train, y_test = data
    rf = ensemble.RandomForestRegressor(n_estimators=200, oob_score=True)  # 这里使用20个决策树
    rf.fit(x_train, y_train)
    score = rf.score(x_test, y_test)
    result = rf.predict(x_test)
    r2 = r2_score(y_test, result)
    print('rf score: ', score)
    print('r2 score: ', r2)

    feat_labels = x_train.columns[:]
    importances = rf.feature_importances_
    indices = np.argsort(importances)[::-1]
    for f in range(x_train.shape[1]):
        print("%2d) %-*s %f" % (f + 1, 30, feat_labels[indices[f]], importances[indices[f]]))

    plt.figure()
    plt.plot(y_test, 'go-', label='true value')
    plt.plot(result, 'ro-', label='predict value')
    plt.title('score: %f' % score)
    plt.legend()
    plt.show()


def _test_non_linear_model(*data):
    x_train, x_test, y_train, y_test = data

    train_polynomial_model(x_train, x_test, y_train, y_test)


def train_lgdm(*data):
    x_train, x_test, y_train, y_test = data
    gbm = lgb.LGBMRegressor(objective='regression',
                            num_leaves=31,
                            learning_rate=0.05,
                            n_estimators=20)
    gbm.fit(x_train, np.log1p(y_train),
            eval_set=[(x_test, y_test)],
            eval_metric='l1', early_stopping_rounds=5)
    y_pred = gbm.predict(x_test, num_iteration=gbm.best_iteration_)

    # p = np.expm1(y_pred)

    gbm_score = gbm.score(x_test, y_test)
    error = np.sqrt(mean_squared_log_error(y_test, y_pred))
    r2 = r2_score(y_test, y_pred)

    print('gbm score:', gbm_score)
    print('r2 score:', r2)
    print('error :', error)
    # eval
    print('The rmse of prediction is:', mean_squared_error(y_test, y_pred) ** 0.5)

    # feature importances
    print('Feature importances:', list(gbm.feature_importances_))


# _test_linear_model(x_train, x_test, y_train, y_test)
# _test_non_linear_model(x_train,x_test,y_train,y_test)
# train_random_forest(x_train, x_test, y_train, y_test)
train_lgdm(x_train, x_test, y_train, y_test)
