import numpy as np
import matplotlib.pyplot as plt
import scipy.optimize as opt
from sklearn.metrics import classification_report

# 导入数据集
path = '/home/wdd/Desktop/ML_CODE/Logistic_Regression/ex2data1.txt'
data = np.loadtxt(path, delimiter=',')
x = data[:, :-1]
y = data[:, -1]
# print(x.shape)
# print(y.shape)

P_index = []  # 正例的索引
N_insex = []  # 负例的索引
for i in range(y.shape[0]):
    if y[i] == 1:
        P_index.append(i)
    else:
        N_insex.append(i)
# print(P_index, N_insex)

# 做出数据散点图
plt.xlabel('X1')
plt.ylabel('X2')
s1 = plt.scatter(x[P_index, 0], x[P_index, 1], marker='p')
s2 = plt.scatter(x[N_insex, 0], x[N_insex, 1], marker='x')
plt.legend((s1, s2), ('1', '0'), loc='best')
# plt.show()

# 对训练数据进行特征缩放,加速迭代过程,并添加一列
# x[:, 0] = (x[:, 0] - np.mean(x[:, 0])) / np.std(x[:, 0])
# x[:, 1] = (x[:, 1] - np.mean(x[:, 1])) / np.std(x[:, 1])
x = np.insert(x, 0, 1, 1)

theta = np.zeros(3)
print(theta)


# Logistic_Regression
def sigmoid(z):
    return 1 / (1 + np.exp(-z))


def cost(theta, x, y):
    return np.mean(-y * np.log(sigmoid(x @ theta)) - (1 - y) * np.log(1 - sigmoid(x @ theta)))


# print(cost(theta, x, y))

def gradient(theta, x, y):
    return (1 / len(x)) * x.T @ (sigmoid(x @ theta) - y)


# print(gradient(theta, x, y))
res = opt.minimize(fun=cost, x0=theta, args=(x, y), method='Newton-CG', jac=gradient)
print(res)


# 利用训练机预测和验证
def predict(x, theta):
    prob = sigmoid(x @ theta)
    return [1 if x >= 0.5 else 0 for x in prob]
    return prob
final_theta = res.x
y_pred = predict(x, final_theta)
print(y_pred)
'''
 找到分界线,在分界线上有x@theta=0
 theta[0]+theta[1]x1+theta[2]x2=0
 x2 =  -(theta[0]+theta[1]x1)/theta[2]
'''

coef = -(res.x / res.x[2])  # find the equation
print(coef)
x = np.linspace(30, 100, 100)
# x = np.arange(130, step=0.1)
y = coef[0] + coef[1] * x
plt.plot(x, y)
plt.show()
