# -*- coding: utf-8 -*-

import numpy as np
from numpy.linalg import inv
from numpy import dot
from numpy import mat
import pandas as pd

# 线性回归机器学习模型

# 原理

# 什么是线性回归 Y~X  Fig.1
# 线性回归的数学表示  Fig.2

# 实现

# 实践
# A = np.mat([1, 1])
# print('A:\n', A)
# print('A.reshape:\n', A.reshape(2, 1))
#
# print('A.T:\n', A.T)
#
# B = np.mat([[1, 2], [3, 4]])
# print('B:\n', B)
# print('B的逆矩阵:\n', inv(B))
# print('B[0行]:\n', B[0, :])
# print('B[0列]:\n', B[:, 0])
#
# # A: 1x2 B: 2x2
# print('A.B:\n', dot(A, B))


# # y = 2x
# x = mat([1, 2, 3]).reshape(3, 1)
# y = 2*x
# # theta = (x'x)^x'y
# # theta = dot(dot(inv(dot(x.T, x)), x.T), y)
# # theta = theta - alpha*(theta*x - y)*x  alpha: 学习率
# theta = 1.
# alpha = 0.1
#
# for i in range(100):
#     theta = theta + np.sum(alpha*(y - dot(x, theta))*x.reshape(1, 3))/3.
#
# print(theta)    # 2

dataSet = pd.read_csv('D:\projects\\ai\linear_regression\linear_regression\data.csv')
# print(dataSet)

temp = dataSet.iloc[:, 2:5]
temp['x0'] = 1
x = temp.iloc[:, [3, 0, 1, 2]]
# print(x)
y = dataSet.iloc[:, 1].values.reshape(150, 1)
theta = dot(dot(inv(dot(x.T, x)), x.T), y)
print(theta)

theta = np.array([1., 1., 1., 1.]).reshape(4, 1)
alpha = 0.1
temp = theta
x0 = x.iloc[:, 0].values.reshape(150, 1)
x1 = x.iloc[:, 1].values.reshape(150, 1)
x2 = x.iloc[:, 2].values.reshape(150, 1)
x3 = x.iloc[:, 3].values.reshape(150, 1)

for i in range(10000):
    temp[0] = theta[0] + alpha*np.sum((y - dot(x, theta))*x0)/150.
    temp[1] = theta[1] + alpha*np.sum((y - dot(x, theta))*x1)/150.
    temp[2] = theta[2] + alpha*np.sum((y - dot(x, theta))*x2)/150.
    temp[3] = theta[3] + alpha*np.sum((y - dot(x, theta))*x3)/150.
    theta = temp

print(theta)
