import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import make_blobs
from sklearn.model_selection import train_test_split


class SimpleSVM:
    def __init__(self, learning_rate=0.01, lambda_param=0.01, n_iters=1000):
        """
        初始化简单的SVM模型
        :param learning_rate: 学习率
        :param lambda_param: 正则化参数
        :param n_iters: 迭代次数
        """
        self.lr = learning_rate
        self.lambda_param = lambda_param
        self.n_iters = n_iters
        self.w = None
        self.b = None

    def fit(self, X, y):
        """
        训练模型
        :param X: 特征矩阵
        :param y: 标签向量
        """
        n_samples, n_features = X.shape

        # 确保标签为-1和1
        y_ = np.where(y <= 0, -1, 1)

        # 初始化参数
        self.w = np.zeros(n_features)
        self.b = 0

        # 梯度下降
        for _ in range(self.n_iters):
            for idx, x_i in enumerate(X):
                condition = y_[idx] * (np.dot(x_i, self.w) - self.b) >= 1

                if condition:
                    # 如果样本点在正确的一侧且距离超过边距
                    self.w -= self.lr * (2 * self.lambda_param * self.w)
                else:
                    # 如果样本点在错误的一侧或距离小于边距
                    self.w -= self.lr * \
                        (2 * self.lambda_param * self.w - np.dot(x_i, y_[idx]))
                    self.b -= self.lr * y_[idx]

        return self

    def predict(self, X):
        """
        预测新样本
        :param X: 特征矩阵
        :return: 预测结果
        """
        approx = np.dot(X, self.w) - self.b
        return np.sign(approx)

    def score(self, X, y):
        """
        计算准确率
        :param X: 特征矩阵
        :param y: 标签向量
        :return: 准确率
        """
        y_pred = self.predict(X)
        # 将标签转换为-1和1
        y_ = np.where(y <= 0, -1, 1)
        return np.mean(y_pred == y_)

    def plot_decision_boundary(self, X, y, title='Simple SVM Decision Boundary'):
        """
        绘制决策边界
        :param X: 特征矩阵
        :param y: 标签向量
        :param title: 图表标题
        """
        # 仅对二维数据有效
        if X.shape[1] != 2:
            print("只能绘制二维数据的决策边界")
            return

        plt.figure(figsize=(10, 8))

        # 转换标签为-1和1
        y_ = np.where(y <= 0, -1, 1)

        # 创建网格以绘制决策边界
        x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
        y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
        xx, yy = np.meshgrid(np.arange(x_min, x_max, 0.1),
                             np.arange(y_min, y_max, 0.1))

        # 进行预测
        Z = self.predict(np.c_[xx.ravel(), yy.ravel()])
        Z = Z.reshape(xx.shape)

        # 绘制决策边界
        plt.contourf(xx, yy, Z, alpha=0.3, cmap=plt.cm.RdBu)
        plt.contour(xx, yy, Z, colors='k', linestyles='-', linewidths=2)

        # 绘制数据点
        plt.scatter(X[:, 0], X[:, 1], c=y_, cmap=plt.cm.RdBu, edgecolors='k')

        # 绘制支持向量（近似）
        margin_points = []
        for idx, x_i in enumerate(X):
            if abs(np.dot(x_i, self.w) - self.b) - 1 < 0.1:
                margin_points.append(idx)

        if margin_points:
            plt.scatter(X[margin_points, 0], X[margin_points, 1], s=100,
                        facecolors='none', edgecolors='k', linewidths=2)

        # 绘制超平面和边距
        # w·x + b = 0 => x2 = (-w1*x1 - b) / w2
        def get_hyperplane_value(x, w, b, offset):
            return (-w[0] * x - b + offset) / w[1]

        plt.title(title)
        plt.xlabel('Feature 1')
        plt.ylabel('Feature 2')

        # 仅在w[1]不为0时绘制
        if self.w[1] != 0:
            x0_1 = np.min(X[:, 0])
            x0_2 = np.max(X[:, 0])

            # 决策边界
            y0_1 = get_hyperplane_value(x0_1, self.w, self.b, 0)
            y0_2 = get_hyperplane_value(x0_2, self.w, self.b, 0)
            plt.plot([x0_1, x0_2], [y0_1, y0_2], 'k-',
                     lw=2, label='Decision Boundary')

            # 正边距
            y1_1 = get_hyperplane_value(x0_1, self.w, self.b, 1)
            y1_2 = get_hyperplane_value(x0_2, self.w, self.b, 1)
            plt.plot([x0_1, x0_2], [y1_1, y1_2], 'k--', lw=1, label='Margin')

            # 负边距
            y2_1 = get_hyperplane_value(x0_1, self.w, self.b, -1)
            y2_2 = get_hyperplane_value(x0_2, self.w, self.b, -1)
            plt.plot([x0_1, x0_2], [y2_1, y2_2], 'k--', lw=1)

        plt.legend()
        plt.tight_layout()
        plt.show()

# 生成线性可分数据并展示


def main():
    # 生成线性可分的数据
    X, y = make_blobs(n_samples=100, centers=2,
                      random_state=42, cluster_std=1.2)
    X_train, X_test, y_train, y_test = train_test_split(
        X, y, test_size=0.2, random_state=42)

    # 训练简单SVM模型
    svm = SimpleSVM(learning_rate=0.01, lambda_param=0.01, n_iters=1000)
    svm.fit(X_train, y_train)

    # 评估模型
    train_accuracy = svm.score(X_train, y_train)
    test_accuracy = svm.score(X_test, y_test)

    print(f"训练集准确率: {train_accuracy:.4f}")
    print(f"测试集准确率: {test_accuracy:.4f}")

    # 绘制决策边界
    svm.plot_decision_boundary(X, y, title="Simple SVM with Gradient Descent")

    # 尝试不同的参数
    print("\n尝试不同的参数:")
    for lr in [0.001, 0.01, 0.1]:
        for lambda_param in [0.001, 0.01, 0.1]:
            svm = SimpleSVM(learning_rate=lr,
                            lambda_param=lambda_param, n_iters=1000)
            svm.fit(X_train, y_train)
            train_acc = svm.score(X_train, y_train)
            test_acc = svm.score(X_test, y_test)
            print(
                f"学习率={lr}, lambda={lambda_param} => 训练准确率: {train_acc:.4f}, 测试准确率: {test_acc:.4f}")


if __name__ == "__main__":
    main()
