from sklearn.svm import SVC
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
from sklearn.tree import DecisionTreeClassifier  # 导入决策树分类器
import numpy as np
import pandas as pd
from sklearn.metrics import RocCurveDisplay
from sklearn.decomposition import PCA
from matplotlib import pyplot as plt
from mlxtend.plotting import plot_decision_regions
from sklearn.tree import plot_tree
from joblib import dump
# 生成示例数据
X = pd.read_csv('ML\\data.csv')
y = X['标签']
X = X.drop('标签', axis=1)
print(X)
# 设置中文字体
plt.rcParams['font.sans-serif'] = ['SimHei']
plt.rcParams['axes.unicode_minus'] = False

# 划分训练集和测试集
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)

# 创建支持向量机模型
SVM_model = SVC(kernel='linear', C=0.1, random_state=42)
#创建对率回归模型
Loistic_model = LogisticRegression(C=0.1, random_state=42, max_iter=1000)
#创建决策树模型
DecisionTree_model = DecisionTreeClassifier(criterion='gini', max_depth=10, random_state=42)
# 训练模型
SVM_model.fit(X_train, y_train)
Loistic_model.fit(X_train, y_train)
DecisionTree_model.fit(X_train, y_train)
# 使用joblib保存模型
# 保存支持向量机模型
dump(SVM_model, 'SVM_model.joblib')
# 保存对率回归模型
dump(Loistic_model, 'Loistic_model.joblib')
# 保存决策树模型
dump(DecisionTree_model, 'DecisionTree_model.joblib')

# 打印保存成功信息
print("模型已成功保存为joblib文件")

#对率回归模型的预测概率
y_pred_prob_Loistic = Loistic_model.predict_proba(X)[:, 1]  # 使用测试集计算概率，避免在训练集上评估导致过度自信的预测
# 打印对率回归模型的预测概率
print("对率回归模型预测概率:")
for i, prob in enumerate(y_pred_prob_Loistic):
    print(f"样本{i+1}: {prob*100:.2f}%")

# 计算并打印支持向量机模型的决策函数值（类似概率）
y_decision_SVM = SVM_model.decision_function(X)
print("\n支持向量机模型决策函数值:")
for i, val in enumerate(y_decision_SVM):
    print(f"样本{i+1}: {val:.4f}")

# 预测测试集
y_pred_SVM = SVM_model.predict(X_test)
y_pred_Loistic = Loistic_model.predict(X_test)
y_pred_DecisionTree = DecisionTree_model.predict(X_test)
# 计算准确率
accuracy_SVM = accuracy_score(y_test, y_pred_SVM)
accuracy_Loistic = accuracy_score(y_test, y_pred_Loistic)
accuracy_DecisionTree = accuracy_score(y_test, y_pred_DecisionTree)
print(f"支持向量机模型准确率: {accuracy_SVM:.2f}")
print(f"对率回归模型准确率: {accuracy_Loistic:.2f}")
print(f"决策树模型准确率: {accuracy_DecisionTree:.2f}")
# 绘制决策边界
plt.figure(figsize=(10, 6))
plot_decision_regions(X.values, y.values, clf=SVM_model, legend=2)
plt.title('支持向量机决策边界')
plt.xlabel('特征1')
plt.ylabel('特征2')
plt.show()
plot_decision_regions(X.values, y.values, clf=Loistic_model, legend=2)
plt.title('对率回归决策边界')
plt.xlabel('特征1')
plt.ylabel('特征2') 
plt.show()
# 绘制决策树决策边界
plot_decision_regions(X.values, y.values, clf=DecisionTree_model, legend=2)
plt.title('决策树决策边界')
plt.xlabel('特征1')
plt.ylabel('特征2')
plt.show()

# 绘制决策树结构
# 创建更大的画布来显示多特征决策树
plt.figure(figsize=(10,10))
plot_tree(DecisionTree_model, filled=True, feature_names=X.columns.tolist(), class_names=['0','1'])
plt.title('决策树结构')
plt.show()