# -*- coding:utf-8 -*-
import sys
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O
from sklearn.preprocessing import StandardScaler, MinMaxScaler
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.metrics import r2_score  #评价回归预测模型的性能

import matplotlib.pyplot as plt
import seaborn as sns
'''
df = pd.read_csv("boston_housing.csv")

y = df['MEDV']
X = df.drop('MEDV', axis=1)
log_y = np.log1p(y)

X["RAD"].astype("object")
X_cat = X['RAD']
X_cat = pd.get_dummies(X_cat, prefix="RAD")

X = X.drop("RAD", axis=1)
fe_names = X.columns
ss_X = MinMaxScaler()
ss_y = MinMaxScaler()
ss_log_y = MinMaxScaler()

X = ss_X.fit_transform(X)
y = ss_y.fit_transform(y.values.reshape(-1,1))
log_y = ss_y.fit_transform(log_y.values.reshape(-1,1))

fe_data = pd.DataFrame(data=X, columns=fe_names, index=df.index)
fe_data = pd.concat([fe_data, X_cat], axis=1, ignore_index=False)
fe_data['MEDV'] = y
fe_data['log_MEDV'] = log_y
# print(fe_data)

# fe_data.to_csv('FE_boston_housing_minmax.csv', index=False)
'''

# 读取去量纲后的数据
df = pd.read_csv("FE_boston_housing_minmax.csv")
y = df["MEDV"]
X = df.drop(["MEDV", "log_MEDV"], axis=1)
feat_names = X.columns

X_train, X_test, y_train, y_test = train_test_split(X,y,random_state=2,test_size=0.2)
print(X_train.shape)
print(df.info)

'''
# 1. 线性回归
lr = LinearRegression()
lr.fit(X_train, y_train)
y_test_pred_lr = lr.predict(X_test)
y_train_pred_lr = lr.predict(X_train)
# 权重系数
fs = pd.DataFrame({"columns":list(feat_names), "coef": list((lr.coef_.T))})
print(fs.sort_values(by=['coef'], ascending=False))
# r2_score 评估模型性能
print('The r2 score of LinearRegression on test is %.6f'  % r2_score(y_test, y_test_pred_lr))
print('The r2 score of LinearRegression on train is %.6f'  % r2_score(y_train, y_train_pred_lr))
# 观察残差分布，看是否符合：噪声为0均值的高斯分布
f,ax = plt.subplots(figsize=(7,5))
f.tight_layout()
ax.hist(y_train-y_train_pred_lr, bins=40, label='Residuals Linear', color='b', alpha=0.5)
ax.set_title("Histogram of Resicuals")
ax.legend(loc='best')
plt.show()
# 观察预测值与真值的散点图
# plt.clf
plt.figure(figsize=(4,3))
plt.scatter(y_train, y_train_pred_lr)
plt.plot([0,1],[0,1], '--k')
plt.axis('tight')
plt.xlabel('true price')
plt.ylabel('predicted price')
plt.tight_layout()
plt.show()
'''

'''
# 2.正则化的线性回归
from sklearn.linear_model import RidgeCV
# 超参数范围
alphas = [0.01, 0.1, 1,10,100]
# 生成ridge实例
ridge = RidgeCV(alphas=alphas, store_cv_values=True)
# 模型训练
ridge.fit(X_train, y_train)
# 预测
y_test_pred_ridge = ridge.predict(X_test)
y_train_pred_ridge = ridge.predict(X_train)
# r2_score 评估模型性能
print('The r2 score of RidgeCV on test is %.6f'  % r2_score(y_test, y_test_pred_ridge))
print('The r2 score of RidgeCV on train is %.6f'  % r2_score(y_train, y_train_pred_ridge))

# 可视化
# print(ridge.cv_values_)
mse_mean = np.mean(ridge.cv_values_, axis=0)
print(np.log10(ridge.alpha_)*np.ones(3))
plt.plot(np.log10(alphas), mse_mean.reshape(len(alphas), 1))
plt.xlabel('log(alpha)')
plt.ylabel('mse')
plt.show()
print('alpha is ', ridge.alpha_)
#在训练集上观察预测残差的分布，看是否符合模型假设：噪声为0均值的高斯噪声
f, ax = plt.subplots(figsize=(7, 5))
f.tight_layout()
ax.hist(y_train - y_train_pred_ridge, bins=40, label='Residuals Ridge', color='b', alpha=.5)
ax.set_title("Histogram of Residuals")
ax.legend(loc='best')
# 观察预测值与真值的散点图
plt.figure(figsize=(4,3))
plt.scatter(y_train, y_train_pred_ridge)
plt.plot([0,1],[0,1], '--k')
plt.axis('tight')
plt.xlabel('true price')
plt.ylabel('predicted price')
plt.tight_layout()
plt.show()

# print(y_train_pred_ridge)
# print(y_train_pred_lr)
'''

from sklearn.linear_model import LassoCV

lasso = LassoCV()
lasso.fit( X_train, y_train)

lasso.fit(X_train, y_train)
y_test_pred_lasso = lasso.predict(X_test)
y_train_pred_lasso = lasso.predict(X_train)
# r2_score 评估模型性能
print('The r2 score of Lasso on test is %.6f'  % r2_score(y_test, y_test_pred_lasso))
print('The r2 score of Lasso on train is %.6f'  % r2_score(y_train, y_train_pred_lasso))
# 观察预测值与真值的散点图
# plt.clf
plt.figure(figsize=(4,3))
plt.scatter(y_train, y_train_pred_lasso)
plt.plot([0,1],[0,1], '--k')
plt.axis('tight')
plt.xlabel('true price')
plt.ylabel('predicted price')
plt.tight_layout()
plt.show()

mses = np.mean(lasso.mse_path_, axis=1)
plt.plot(np.log10(lasso.alphas_), mses)
plt.xlabel('log(alpha)')
plt.ylabel('mse')
plt.show()




