import pandas as pd
from sklearn.linear_model import LinearRegression
import numpy as np

def boston_housing():
    train = pd.read_csv('data/boston_housing/train.csv')
    # 前面的都是自变量，最后一列的MENV是因变量
    df_features = train.drop(['medv'], axis=1)
    df_target = train['medv']

    regression = LinearRegression().fit(df_features, df_target)
    print(regression.score(df_features, df_target))
    print(regression.coef_)

    pd.set_option('display.max_rows', None)
    print(abs(df_target - regression.predict(df_features).tolist()))

# 线性回归章节的最小二乘法
def lease_squares_method():
    x = np.matrix([[0, 1], [1, -1], [2, 8]])
    y = np.matrix([[1.4], [-0.48], [13.2]])

    print("\n系数矩阵B：\n", (x.T.dot(x)).I.dot(x.T).dot(y))

    regression = LinearRegression().fit(np.array(x), np.array(y))
    print("\nsklearn LinearRegression系数矩阵B：\n", regression.coef_)

    x2 = np.matrix([[1, 0, 1], [1, 1, -1], [1, 2, 8]])
    y2 = np.matrix([[1.4], [-0.48], [13.2]])

    print("\n添加了误差之后的系数矩阵B：\n", (x2.T.dot(x2)).I.dot(x2.T).dot(y2))

lease_squares_method()

