﻿import numpy as np
import os

import matplotlib
import matplotlib.pyplot as plt
plt.rcParams['axes.labelsize'] = 14
plt.rcParams['xtick.labelsize'] = 12
plt.rcParams['ytick.labelsize'] = 12
import warnings
warnings.filterwarnings('ignore')
np.random.seed(42)

from sklearn import datasets
iris = datasets.load_iris() # 加载鸢尾花数据集
print(list(iris.keys()))
# print(iris.DESCR)

X = iris['data'][:, (2, 3)] # 选取第二、三两个特征
y = (iris['target'] == 2).astype(np.intc) # 1 如果是Iris-Virginica则为1，否则为0
# print(X)
# print(X[:, 0].min()) # 1.0
# print(X[:, 0].max()) # 6.9
# print(X[:, 1].min()) # 0.1
# print(X[:, 1].max()) # 2.5

from sklearn.linear_model import LogisticRegression
log_res = LogisticRegression(C=10) # C越大，正则越小（越容易过拟合）
log_res.fit(X, y)

# 构建二维坐标系
x0, x1 = np.meshgrid(np.linspace(2.9, 7, 500).reshape(-1, 1),
                     np.linspace(0.8, 2.7, 500).reshape(-1, 1))
X_new = np.c_[x0.ravel(), x1.ravel()]
# print(X_new)

y_proba = log_res.predict_proba(X_new)
# print(y_proba)

plt.figure(figsize=(10, 4))
# 原始数据
plt.plot(X[y==0, 0], X[y==0, 1], 'bs') # Not Iris-Virginica
plt.plot(X[y==1, 0], X[y==1, 1], 'g^') # Iris-Virginica
# 等高线
zz = y_proba[:, 1].reshape(x0.shape)
contour = plt.contour(x0, x1, zz, cmap=plt.cm.brg)
plt.clabel(contour, inline=1)
plt.axis([2.9, 7, 0.8, 2.7])
plt.text(3.5, 1.5, 'NOT Vir', fontsize=16, color='b')
plt.text(6.5, 2.3, 'Vir', fontsize=16, color='g')
plt.show()