import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.preprocessing import StandardScaler
from common import plot_utils

# sigmoid 函数  Y >= 0.5     结果为1
#               Y < 0.5     结果为0
# θTX == 0 为决策边界


# def sigmoid(x):
#     return 1 / ( 1 + np.exp(x) )
#
#
# x = np.linspace(-10, 10, 50)
#
# y = sigmoid(x)
#
# plt.plot(x, y)
# plt.show()


########################  不规则决策边界  ####################################
np.random.seed(666)
x = np.random.normal(0, 1, size=(200, 2))
y = np.array(x[:, 0] ** 2 + x[:, 1] ** 2 < 1.5, dtype='int')
for _ in range(20):
    y[np.random.randint(200)] = 1

plt.scatter(x[y == 0, 0], x[y == 0, 1])
plt.scatter(x[y == 1, 0], x[y == 1, 1])
plt.show()

x_train, x_test, y_train, y_test = train_test_split(x, y)

logic_reg = LogisticRegression(solver='liblinear')
logic_reg.fit(x_train, y_train)
print(logic_reg.score(x_train, y_train))
print(logic_reg.score(x_test, y_test))
# plot_utils.plot_decision_boundary(logic_reg, axis = [-4,4,-4,4])


# 添加多项式回归
print("==========添加多项式回归==========")

# C 表示模型正则化参数
def LogicRegPip(degree = 1, C = 1):
    return Pipeline([
        ('ploy', PolynomialFeatures(degree=degree)),
        ('std', StandardScaler()),
        ('logic', LogisticRegression(solver='liblinear', C=C))
    ])


logic_pip = LogicRegPip(degree=2)
logic_pip.fit(x_train, y_train)
# print(logic_pip.score(x_train, y_train))
print(logic_pip.score(x_test, y_test))

print("==========添加多项式回归(过拟合)==========")
logic_pip2 = LogicRegPip(degree=20, C = 10)
logic_pip2.fit(x_train, y_train)
# print(logic_pip2.score(x_train, y_train))
print(logic_pip2.score(x_test, y_test))

# 逻辑回归多分类
from sklearn import datasets

load_iris = datasets.load_iris()
iris_x_train, iris_x_test, iris_y_train, iris_y_test = train_test_split(load_iris.data, load_iris.target)
# OVR  one vs rest   效率： O(n)
print("==========OVR==========")
logic_reg_ovr = LogisticRegression(multi_class='ovr', solver='liblinear')
logic_reg_ovr.fit(iris_x_train, iris_y_train)
print(logic_reg_ovr.score(iris_x_test, iris_y_test))

# OVO  one vs one    效率： O(n2)
print("==========OVO==========")
logic_reg_ovo = LogisticRegression(multi_class='multinomial', solver='newton-cg')
logic_reg_ovo.fit(iris_x_train, iris_y_train)
print(logic_reg_ovo.score(iris_x_test, iris_y_test))


###################### sklearn 自动融合的多分类器 ###########################
from sklearn.multiclass import OneVsRestClassifier
from sklearn.multiclass import OneVsOneClassifier

# reg = OneVsRestClassifier(reg)
# reg.fit()
# reg.predict()