{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "3ba5c23b-ed21-4bb3-9bbf-f24a23ab4799",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "训练集样本数量: 60000\n",
      "测试集样本数量: 10000\n"
     ]
    }
   ],
   "source": [
    "import torch\n",
    "from torchvision import datasets, transforms\n",
    "\n",
    "# 定义数据预处理步骤\n",
    "transform = transforms.Compose([\n",
    "    transforms.ToTensor(),  # 将图像转换为Tensor\n",
    "    transforms.Normalize((0.5,), (0.5,))  # 归一化\n",
    "])\n",
    "\n",
    "# 下载训练集和测试集\n",
    "train_dataset = datasets.MNIST(root='./data', train=True, download=True, transform=transform)\n",
    "test_dataset = datasets.MNIST(root='./data', train=False, download=True, transform=transform)\n",
    "\n",
    "# 创建数据加载器\n",
    "train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=64, shuffle=True)\n",
    "test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=64, shuffle=False)\n",
    "\n",
    "# 查看数据集大小\n",
    "print(f'训练集样本数量: {len(train_dataset)}')\n",
    "print(f'测试集样本数量: {len(test_dataset)}')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "efd782b1-6eff-4890-bdd1-1a6c61641fff",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "训练集标签分布: [30596 29404]\n",
      "测试集标签分布: [5139 4861]\n"
     ]
    }
   ],
   "source": [
    "import torch\n",
    "import numpy as np\n",
    "from torchvision import datasets, transforms\n",
    "from torch.utils.data import Dataset, DataLoader\n",
    "\n",
    "# 自定义数据集类\n",
    "class MNISTBinaryClassificationDataset(Dataset):\n",
    "    def __init__(self, original_dataset):\n",
    "        self.original_dataset = original_dataset\n",
    "        # 将标签转换为二分类标签：0-4 -> 0 (小数)，5-9 -> 1 (大数)\n",
    "        self.labels = [1 if label >= 5 else 0 for label in self.original_dataset.targets]\n",
    "        \n",
    "    def __len__(self):\n",
    "        return len(self.original_dataset)\n",
    "    \n",
    "    def __getitem__(self, idx):\n",
    "        # 获取图像数据和标签\n",
    "        image, label = self.original_dataset[idx]\n",
    "        return image, self.labels[idx]\n",
    "\n",
    "# 定义数据预处理步骤\n",
    "transform = transforms.Compose([\n",
    "    transforms.ToTensor(),  # 将图像转换为Tensor\n",
    "    transforms.Normalize((0.5,), (0.5,))  # 归一化\n",
    "])\n",
    "\n",
    "# 下载原始MNIST数据集\n",
    "train_dataset = datasets.MNIST(root='./data', train=True, download=True, transform=transform)\n",
    "test_dataset = datasets.MNIST(root='./data', train=False, download=True, transform=transform)\n",
    "\n",
    "# 使用自定义数据集类进行二分类标签转换\n",
    "train_binary_dataset = MNISTBinaryClassificationDataset(train_dataset)\n",
    "test_binary_dataset = MNISTBinaryClassificationDataset(test_dataset)\n",
    "\n",
    "# 创建数据加载器\n",
    "train_loader = DataLoader(train_binary_dataset, batch_size=64, shuffle=True)\n",
    "test_loader = DataLoader(test_binary_dataset, batch_size=64, shuffle=False)\n",
    "\n",
    "# 查看数据集大小和标签的分布\n",
    "train_labels = np.array([label for _, label in train_binary_dataset])\n",
    "test_labels = np.array([label for _, label in test_binary_dataset])\n",
    "print(f'训练集标签分布: {np.bincount(train_labels)}')\n",
    "print(f'测试集标签分布: {np.bincount(test_labels)}')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "7ce6b868-5ed4-4d32-ae5c-8c1db9c79db0",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "逻辑回归模型在测试集上的准确率: 86.96%\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/environment/miniconda3/lib/python3.10/site-packages/sklearn/linear_model/_logistic.py:460: ConvergenceWarning: lbfgs failed to converge (status=1):\n",
      "STOP: TOTAL NO. of ITERATIONS REACHED LIMIT.\n",
      "\n",
      "Increase the number of iterations (max_iter) or scale the data as shown in:\n",
      "    https://scikit-learn.org/stable/modules/preprocessing.html\n",
      "Please also refer to the documentation for alternative solver options:\n",
      "    https://scikit-learn.org/stable/modules/linear_model.html#logistic-regression\n",
      "  n_iter_i = _check_optimize_result(\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "import torch\n",
    "from sklearn.linear_model import LogisticRegression\n",
    "from sklearn.metrics import accuracy_score\n",
    "from torchvision import datasets, transforms\n",
    "from sklearn.model_selection import train_test_split\n",
    "\n",
    "# 自定义数据集类（已在前面定义）\n",
    "class MNISTBinaryClassificationDataset(torch.utils.data.Dataset):\n",
    "    def __init__(self, original_dataset):\n",
    "        self.original_dataset = original_dataset\n",
    "        # 将标签转换为二分类标签：0-4 -> 0 (小数)，5-9 -> 1 (大数)\n",
    "        self.labels = [1 if label >= 5 else 0 for label in self.original_dataset.targets]\n",
    "        \n",
    "    def __len__(self):\n",
    "        return len(self.original_dataset)\n",
    "    \n",
    "    def __getitem__(self, idx):\n",
    "        # 获取图像数据和标签\n",
    "        image, label = self.original_dataset[idx]\n",
    "        return image.view(-1).numpy(), self.labels[idx]  # 展平图像为一维向量\n",
    "\n",
    "# 定义数据预处理步骤\n",
    "transform = transforms.Compose([\n",
    "    transforms.ToTensor(),  # 将图像转换为Tensor\n",
    "    transforms.Normalize((0.5,), (0.5,))  # 归一化\n",
    "])\n",
    "\n",
    "# 下载原始MNIST数据集\n",
    "train_dataset = datasets.MNIST(root='./data', train=True, download=True, transform=transform)\n",
    "test_dataset = datasets.MNIST(root='./data', train=False, download=True, transform=transform)\n",
    "\n",
    "# 使用自定义数据集类进行二分类标签转换\n",
    "train_binary_dataset = MNISTBinaryClassificationDataset(train_dataset)\n",
    "test_binary_dataset = MNISTBinaryClassificationDataset(test_dataset)\n",
    "\n",
    "# 获取训练集和测试集的图像数据和标签\n",
    "train_images, train_labels = zip(*train_binary_dataset)\n",
    "test_images, test_labels = zip(*test_binary_dataset)\n",
    "\n",
    "# 转换为numpy数组\n",
    "train_images = np.array(train_images)\n",
    "train_labels = np.array(train_labels)\n",
    "test_images = np.array(test_images)\n",
    "test_labels = np.array(test_labels)\n",
    "\n",
    "# 将像素值范围从 [0, 1] 转换为 [0, 255] (MNIST的原始范围)\n",
    "train_images = train_images * 255\n",
    "test_images = test_images * 255\n",
    "\n",
    "# 创建逻辑回归模型\n",
    "logreg = LogisticRegression(max_iter=1000)\n",
    "\n",
    "# 训练模型\n",
    "logreg.fit(train_images, train_labels)\n",
    "\n",
    "# 在测试集上进行预测\n",
    "test_preds = logreg.predict(test_images)\n",
    "\n",
    "# 计算准确率\n",
    "accuracy = accuracy_score(test_labels, test_preds)\n",
    "print(f'逻辑回归模型在测试集上的准确率: {accuracy * 100:.2f}%')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "400dae3e-b341-47c5-91b6-d8b3048be5ff",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Softmax回归模型在测试集上的准确率: 92.11%\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "from sklearn.linear_model import LogisticRegression\n",
    "from sklearn.metrics import accuracy_score\n",
    "from sklearn.preprocessing import StandardScaler\n",
    "from torchvision import datasets, transforms\n",
    "\n",
    "# 定义数据预处理步骤\n",
    "transform = transforms.Compose([\n",
    "    transforms.ToTensor(),  # 将图像转换为Tensor\n",
    "    transforms.Normalize((0.5,), (0.5,))  # 归一化\n",
    "])\n",
    "\n",
    "# 下载原始MNIST数据集\n",
    "train_dataset = datasets.MNIST(root='./data', train=True, download=True, transform=transform)\n",
    "test_dataset = datasets.MNIST(root='./data', train=False, download=True, transform=transform)\n",
    "\n",
    "# 获取训练集和测试集的图像数据和标签\n",
    "train_images, train_labels = zip(*[(image.view(-1).numpy(), label) for image, label in train_dataset])\n",
    "test_images, test_labels = zip(*[(image.view(-1).numpy(), label) for image, label in test_dataset])\n",
    "\n",
    "# 转换为numpy数组\n",
    "train_images = np.array(train_images)\n",
    "train_labels = np.array(train_labels)\n",
    "test_images = np.array(test_images)\n",
    "test_labels = np.array(test_labels)\n",
    "\n",
    "# 数据标准化\n",
    "scaler = StandardScaler()\n",
    "train_images = scaler.fit_transform(train_images)\n",
    "test_images = scaler.transform(test_images)\n",
    "\n",
    "# 创建 Softmax 回归模型（多项式逻辑回归）\n",
    "softmax_regressor = LogisticRegression(multi_class='multinomial', solver='lbfgs', max_iter=1000)\n",
    "\n",
    "# 训练模型\n",
    "softmax_regressor.fit(train_images, train_labels)\n",
    "\n",
    "# 在测试集上进行预测\n",
    "test_preds = softmax_regressor.predict(test_images)\n",
    "\n",
    "# 计算准确率\n",
    "accuracy = accuracy_score(test_labels, test_preds)\n",
    "print(f'Softmax回归模型在测试集上的准确率: {accuracy * 100:.2f}%')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "8e712b52-fefb-4991-abe5-3acfc3011095",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[LibSVM]*\n",
      "optimization finished, #iter = 651\n",
      "obj = -98.539456, rho = -0.177579\n",
      "nSV = 341, nBSV = 73\n",
      ".*.*\n",
      "optimization finished, #iter = 2034\n",
      "obj = -304.337283, rho = 0.744255\n",
      "nSV = 1111, nBSV = 261\n",
      ".*\n",
      "optimization finished, #iter = 1756\n",
      "obj = -249.078382, rho = 0.590159\n",
      "nSV = 909, nBSV = 203\n",
      "*.*\n",
      "optimization finished, #iter = 1308\n",
      "obj = -178.811637, rho = 0.363249\n",
      "nSV = 684, nBSV = 132\n",
      ".*\n",
      "optimization finished, #iter = 1843\n",
      "obj = -320.934670, rho = 0.484167\n",
      "nSV = 972, nBSV = 292\n",
      ".*\n",
      "optimization finished, #iter = 1997\n",
      "obj = -325.169978, rho = 0.698235\n",
      "nSV = 1069, nBSV = 291\n",
      ".*\n",
      "optimization finished, #iter = 1527\n",
      "obj = -182.845134, rho = 0.725665\n",
      "nSV = 868, nBSV = 133\n",
      ".*\n",
      "optimization finished, #iter = 1598\n",
      "obj = -283.925954, rho = 0.262827\n",
      "nSV = 834, nBSV = 242\n",
      "*.*\n",
      "optimization finished, #iter = 1244\n",
      "obj = -229.366396, rho = 0.439921\n",
      "nSV = 696, nBSV = 206\n",
      ".*\n",
      "optimization finished, #iter = 1394\n",
      "obj = -286.598849, rho = 0.833230\n",
      "nSV = 885, nBSV = 302\n",
      "*.*\n",
      "optimization finished, #iter = 1330\n",
      "obj = -275.680069, rho = 0.735476\n",
      "nSV = 770, nBSV = 293\n",
      "*\n",
      "optimization finished, #iter = 927\n",
      "obj = -200.558614, rho = 0.445394\n",
      "nSV = 534, nBSV = 195\n",
      "*.*\n",
      "optimization finished, #iter = 1039\n",
      "obj = -219.716232, rho = 0.623655\n",
      "nSV = 615, nBSV = 241\n",
      "*.*\n",
      "optimization finished, #iter = 1192\n",
      "obj = -173.821824, rho = 0.764082\n",
      "nSV = 678, nBSV = 167\n",
      "*.*\n",
      "optimization finished, #iter = 1215\n",
      "obj = -254.063831, rho = 0.812841\n",
      "nSV = 775, nBSV = 263\n",
      "*.*\n",
      "optimization finished, #iter = 1356\n",
      "obj = -395.413969, rho = 0.337467\n",
      "nSV = 836, nBSV = 456\n",
      "*\n",
      "optimization finished, #iter = 932\n",
      "obj = -218.805348, rho = 0.570576\n",
      "nSV = 573, nBSV = 220\n",
      "..*.*\n",
      "optimization finished, #iter = 3634\n",
      "obj = -696.777561, rho = -0.264201\n",
      "nSV = 1983, nBSV = 604\n",
      ".*.*\n",
      "optimization finished, #iter = 2433\n",
      "obj = -384.170630, rho = -0.552269\n",
      "nSV = 1311, nBSV = 299\n",
      ".*.*\n",
      "optimization finished, #iter = 2605\n",
      "obj = -392.595875, rho = -0.425631\n",
      "nSV = 1389, nBSV = 309\n",
      "..*.*\n",
      "optimization finished, #iter = 3083\n",
      "obj = -446.026474, rho = -0.121161\n",
      "nSV = 1605, nBSV = 308\n",
      ".*.*\n",
      "optimization finished, #iter = 2838\n",
      "obj = -511.404675, rho = 0.046053\n",
      "nSV = 1605, nBSV = 358\n",
      "..*\n",
      "optimization finished, #iter = 2901\n",
      "obj = -569.897976, rho = -0.610143\n",
      "nSV = 1611, nBSV = 547\n",
      ".*.*\n",
      "optimization finished, #iter = 2025\n",
      "obj = -353.730415, rho = -0.420543\n",
      "nSV = 1172, nBSV = 286\n",
      ".*\n",
      "optimization finished, #iter = 1742\n",
      "obj = -284.216606, rho = -0.307215\n",
      "nSV = 965, nBSV = 227\n",
      "..*.*\n",
      "optimization finished, #iter = 3412\n",
      "obj = -825.885807, rho = -0.207400\n",
      "nSV = 1942, nBSV = 854\n",
      ".*.*\n",
      "optimization finished, #iter = 2138\n",
      "obj = -294.393621, rho = 0.120908\n",
      "nSV = 1140, nBSV = 194\n",
      ".*.*\n",
      "optimization finished, #iter = 2794\n",
      "obj = -483.439745, rho = 0.299461\n",
      "nSV = 1517, nBSV = 398\n",
      "..*.*\n",
      "optimization finished, #iter = 3169\n",
      "obj = -784.662254, rho = -0.445743\n",
      "nSV = 1809, nBSV = 838\n",
      ".*.*\n",
      "optimization finished, #iter = 2323\n",
      "obj = -498.884558, rho = -0.200543\n",
      "nSV = 1279, nBSV = 450\n",
      ".*\n",
      "optimization finished, #iter = 1985\n",
      "obj = -337.011835, rho = 0.152061\n",
      "nSV = 1037, nBSV = 283\n",
      ".*.*\n",
      "optimization finished, #iter = 2028\n",
      "obj = -324.458673, rho = 0.431863\n",
      "nSV = 1095, nBSV = 260\n",
      ".*.*\n",
      "optimization finished, #iter = 2469\n",
      "obj = -454.058140, rho = 0.539362\n",
      "nSV = 1332, nBSV = 421\n",
      ".*\n",
      "optimization finished, #iter = 1821\n",
      "obj = -352.837243, rho = -0.102087\n",
      "nSV = 993, nBSV = 307\n",
      "..*.*\n",
      "optimization finished, #iter = 3131\n",
      "obj = -890.255009, rho = 0.199416\n",
      "nSV = 1801, nBSV = 961\n",
      ".*.*\n",
      "optimization finished, #iter = 2476\n",
      "obj = -503.867638, rho = 0.309440\n",
      "nSV = 1383, nBSV = 462\n",
      ".*.*\n",
      "optimization finished, #iter = 2172\n",
      "obj = -319.292477, rho = 0.453331\n",
      "nSV = 1185, nBSV = 252\n",
      "..*.*\n",
      "optimization finished, #iter = 3273\n",
      "obj = -689.305942, rho = -0.368987\n",
      "nSV = 1716, nBSV = 698\n",
      ".*.*\n",
      "optimization finished, #iter = 2016\n",
      "obj = -444.922193, rho = -0.013559\n",
      "nSV = 1152, nBSV = 411\n",
      ".*.*\n",
      "optimization finished, #iter = 2152\n",
      "obj = -254.735108, rho = 0.149883\n",
      "nSV = 1154, nBSV = 109\n",
      ".*\n",
      "optimization finished, #iter = 1978\n",
      "obj = -368.637203, rho = -0.502090\n",
      "nSV = 1127, nBSV = 342\n",
      ".*\n",
      "optimization finished, #iter = 1712\n",
      "obj = -213.028188, rho = -0.297123\n",
      "nSV = 905, nBSV = 125\n",
      ".*.*\n",
      "optimization finished, #iter = 2159\n",
      "obj = -354.611149, rho = -0.605958\n",
      "nSV = 1220, nBSV = 312\n",
      "..*.*\n",
      "optimization finished, #iter = 3930\n",
      "obj = -1015.046255, rho = -0.523489\n",
      "nSV = 2223, nBSV = 1097\n",
      ".*.*\n",
      "optimization finished, #iter = 2250\n",
      "obj = -526.305848, rho = 0.225726\n",
      "nSV = 1265, nBSV = 528\n",
      "Total nSV = 15549\n",
      "SVM模型在测试集上的准确率: 96.61%\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "from sklearn.svm import SVC\n",
    "from sklearn.metrics import accuracy_score\n",
    "from sklearn.preprocessing import StandardScaler\n",
    "from torchvision import datasets, transforms\n",
    "\n",
    "# 定义数据预处理步骤\n",
    "transform = transforms.Compose([\n",
    "    transforms.ToTensor(),  # 将图像转换为Tensor\n",
    "    transforms.Normalize((0.5,), (0.5,))  # 归一化\n",
    "])\n",
    "\n",
    "# 下载原始MNIST数据集\n",
    "train_dataset = datasets.MNIST(root='./data', train=True, download=True, transform=transform)\n",
    "test_dataset = datasets.MNIST(root='./data', train=False, download=True, transform=transform)\n",
    "\n",
    "# 获取训练集和测试集的图像数据和标签\n",
    "train_images, train_labels = zip(*[(image.view(-1).numpy(), label) for image, label in train_dataset])\n",
    "test_images, test_labels = zip(*[(image.view(-1).numpy(), label) for image, label in test_dataset])\n",
    "\n",
    "# 转换为numpy数组\n",
    "train_images = np.array(train_images)\n",
    "train_labels = np.array(train_labels)\n",
    "test_images = np.array(test_images)\n",
    "test_labels = np.array(test_labels)\n",
    "\n",
    "# 数据标准化\n",
    "scaler = StandardScaler()\n",
    "train_images = scaler.fit_transform(train_images)\n",
    "test_images = scaler.transform(test_images)\n",
    "\n",
    "# 创建 SVM 模型（支持向量机）\n",
    "svm_model = SVC(kernel='rbf', max_iter=5000, verbose=1)  # 增加最大迭代次数\n",
    "\n",
    "# 训练模型\n",
    "svm_model.fit(train_images, train_labels)\n",
    "\n",
    "# 在测试集上进行预测\n",
    "test_preds = svm_model.predict(test_images)\n",
    "\n",
    "# 计算准确率\n",
    "accuracy = accuracy_score(test_labels, test_preds)\n",
    "print(f'SVM模型在测试集上的准确率: {accuracy * 100:.2f}%')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "23e84d3c-d607-4c06-b907-5d652dcf869f",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[LibSVM]...*.*\n",
      "optimization finished, #iter = 4826\n",
      "obj = -787.973896, rho = -0.964977\n",
      "nSV = 2793, nBSV = 780\n",
      "Total nSV = 2793\n",
      "[LibSVM]..*.*\n",
      "optimization finished, #iter = 3538\n",
      "obj = -834.890127, rho = -0.960058\n",
      "nSV = 2372, nBSV = 980\n",
      "Total nSV = 2372\n",
      "[LibSVM]....WARN: libsvm Solver reached max_iter\n",
      "optimization finished, #iter = 5000\n",
      "obj = -1736.707649, rho = -0.673455\n",
      "nSV = 4713, nBSV = 1676\n",
      "Total nSV = 4713\n",
      "[LibSVM]"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/environment/miniconda3/lib/python3.10/site-packages/sklearn/svm/_base.py:297: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  warnings.warn(\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "....WARN: libsvm Solver reached max_iter\n",
      "optimization finished, #iter = 5000\n",
      "obj = -2151.926529, rho = -0.802465\n",
      "nSV = 5086, nBSV = 2202\n",
      "Total nSV = 5086\n",
      "[LibSVM]"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/environment/miniconda3/lib/python3.10/site-packages/sklearn/svm/_base.py:297: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  warnings.warn(\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "....WARN: libsvm Solver reached max_iter\n",
      "optimization finished, #iter = 5000\n",
      "obj = -1659.740050, rho = -0.934121\n",
      "nSV = 4202, nBSV = 1833\n",
      "Total nSV = 4202\n",
      "[LibSVM]"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/environment/miniconda3/lib/python3.10/site-packages/sklearn/svm/_base.py:297: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  warnings.warn(\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "....WARN: libsvm Solver reached max_iter\n",
      "optimization finished, #iter = 5000\n",
      "obj = -1957.321172, rho = -0.875684\n",
      "nSV = 4797, nBSV = 2078\n",
      "Total nSV = 4797\n",
      "[LibSVM]"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/environment/miniconda3/lib/python3.10/site-packages/sklearn/svm/_base.py:297: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  warnings.warn(\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "....WARN: libsvm Solver reached max_iter\n",
      "optimization finished, #iter = 5000\n",
      "obj = -1247.459344, rho = -0.704357\n",
      "nSV = 3655, nBSV = 1187\n",
      "Total nSV = 3655\n",
      "[LibSVM]"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/environment/miniconda3/lib/python3.10/site-packages/sklearn/svm/_base.py:297: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  warnings.warn(\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "....WARN: libsvm Solver reached max_iter\n",
      "optimization finished, #iter = 5000\n",
      "obj = -1815.734867, rho = -0.587091\n",
      "nSV = 4412, nBSV = 1864\n",
      "Total nSV = 4412\n",
      "[LibSVM]"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/environment/miniconda3/lib/python3.10/site-packages/sklearn/svm/_base.py:297: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  warnings.warn(\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "....WARN: libsvm Solver reached max_iter\n",
      "optimization finished, #iter = 5000\n",
      "obj = -2586.326069, rho = -0.971318\n",
      "nSV = 5300, nBSV = 2917\n",
      "Total nSV = 5300\n",
      "[LibSVM]"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/environment/miniconda3/lib/python3.10/site-packages/sklearn/svm/_base.py:297: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  warnings.warn(\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "....WARN: libsvm Solver reached max_iter\n",
      "optimization finished, #iter = 5000\n",
      "obj = -2813.096691, rho = -0.904367\n",
      "nSV = 5533, nBSV = 3209\n",
      "Total nSV = 5533\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/environment/miniconda3/lib/python3.10/site-packages/sklearn/svm/_base.py:297: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  warnings.warn(\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "一对多方法（OvR）SVM模型在测试集上的准确率: 96.82%\n",
      "[LibSVM]*\n",
      "optimization finished, #iter = 756\n",
      "obj = -101.513549, rho = -0.323958\n",
      "nSV = 421, nBSV = 69\n",
      "Total nSV = 421\n",
      "[LibSVM].*\n",
      "optimization finished, #iter = 1791\n",
      "obj = -314.647466, rho = 0.745381\n",
      "nSV = 995, nBSV = 296\n",
      "Total nSV = 995\n",
      "[LibSVM].*\n",
      "optimization finished, #iter = 1667\n",
      "obj = -250.567588, rho = 0.592422\n",
      "nSV = 879, nBSV = 211\n",
      "Total nSV = 879\n",
      "[LibSVM]*.*\n",
      "optimization finished, #iter = 1401\n",
      "obj = -182.251387, rho = 0.365140\n",
      "nSV = 759, nBSV = 116\n",
      "Total nSV = 759\n",
      "[LibSVM].*\n",
      "optimization finished, #iter = 1866\n",
      "obj = -319.365888, rho = 0.481641\n",
      "nSV = 985, nBSV = 283\n",
      "Total nSV = 985\n",
      "[LibSVM].*\n",
      "optimization finished, #iter = 1802\n",
      "obj = -333.461123, rho = 0.710524\n",
      "nSV = 986, nBSV = 323\n",
      "Total nSV = 986\n",
      "[LibSVM]*.*\n",
      "optimization finished, #iter = 1300\n",
      "obj = -182.436114, rho = 0.734344\n",
      "nSV = 745, nBSV = 157\n",
      "Total nSV = 745\n",
      "[LibSVM].*\n",
      "optimization finished, #iter = 1751\n",
      "obj = -279.514383, rho = 0.259171\n",
      "nSV = 913, nBSV = 212\n",
      "Total nSV = 913\n",
      "[LibSVM]*.*\n",
      "optimization finished, #iter = 1356\n",
      "obj = -228.822343, rho = 0.432579\n",
      "nSV = 742, nBSV = 192\n",
      "Total nSV = 742\n",
      "[LibSVM].*\n",
      "optimization finished, #iter = 1500\n",
      "obj = -278.485584, rho = 0.841275\n",
      "nSV = 922, nBSV = 284\n",
      "Total nSV = 922\n",
      "[LibSVM].*\n",
      "optimization finished, #iter = 1491\n",
      "obj = -251.416252, rho = 0.771758\n",
      "nSV = 861, nBSV = 237\n",
      "Total nSV = 861\n",
      "[LibSVM]*.*\n",
      "optimization finished, #iter = 1247\n",
      "obj = -192.531045, rho = 0.643948\n",
      "nSV = 728, nBSV = 155\n",
      "Total nSV = 728\n",
      "[LibSVM]*.*\n",
      "optimization finished, #iter = 1384\n",
      "obj = -193.880480, rho = 0.696309\n",
      "nSV = 763, nBSV = 170\n",
      "Total nSV = 763\n",
      "[LibSVM]*.*\n",
      "optimization finished, #iter = 1248\n",
      "obj = -170.359831, rho = 0.775877\n",
      "nSV = 711, nBSV = 160\n",
      "Total nSV = 711\n",
      "[LibSVM]*.*\n",
      "optimization finished, #iter = 1294\n",
      "obj = -249.668722, rho = 0.819409\n",
      "nSV = 802, nBSV = 252\n",
      "Total nSV = 802\n",
      "[LibSVM].*\n",
      "optimization finished, #iter = 1754\n",
      "obj = -310.814272, rho = 0.566504\n",
      "nSV = 965, nBSV = 284\n",
      "Total nSV = 965\n",
      "[LibSVM]*.*\n",
      "optimization finished, #iter = 1274\n",
      "obj = -201.122680, rho = 0.675586\n",
      "nSV = 714, nBSV = 169\n",
      "Total nSV = 714\n",
      "[LibSVM]..*.*\n",
      "optimization finished, #iter = 3216\n",
      "obj = -726.487595, rho = -0.251463\n",
      "nSV = 1795, nBSV = 723\n",
      "Total nSV = 1795\n",
      "[LibSVM].*.*\n",
      "optimization finished, #iter = 2301\n",
      "obj = -385.180963, rho = -0.558160\n",
      "nSV = 1254, nBSV = 317\n",
      "Total nSV = 1254\n",
      "[LibSVM].*.*\n",
      "optimization finished, #iter = 2278\n",
      "obj = -403.704462, rho = -0.405641\n",
      "nSV = 1257, nBSV = 362\n",
      "Total nSV = 1257\n",
      "[LibSVM].*.*\n",
      "optimization finished, #iter = 2520\n",
      "obj = -456.566100, rho = -0.072477\n",
      "nSV = 1334, nBSV = 419\n",
      "Total nSV = 1334\n",
      "[LibSVM].*.*\n",
      "optimization finished, #iter = 2224\n",
      "obj = -502.904267, rho = 0.087532\n",
      "nSV = 1290, nBSV = 445\n",
      "Total nSV = 1290\n",
      "[LibSVM]..*\n",
      "optimization finished, #iter = 2863\n",
      "obj = -580.375840, rho = -0.610737\n",
      "nSV = 1568, nBSV = 578\n",
      "Total nSV = 1568\n",
      "[LibSVM].*\n",
      "optimization finished, #iter = 1922\n",
      "obj = -353.249455, rho = -0.416585\n",
      "nSV = 1092, nBSV = 301\n",
      "Total nSV = 1092\n",
      "[LibSVM].*\n",
      "optimization finished, #iter = 1842\n",
      "obj = -286.819574, rho = -0.298260\n",
      "nSV = 1034, nBSV = 215\n",
      "Total nSV = 1034\n",
      "[LibSVM]..*.*\n",
      "optimization finished, #iter = 3372\n",
      "obj = -834.884292, rho = -0.204873\n",
      "nSV = 1917, nBSV = 871\n",
      "Total nSV = 1917\n",
      "[LibSVM].*\n",
      "optimization finished, #iter = 1865\n",
      "obj = -288.167935, rho = 0.137316\n",
      "nSV = 980, nBSV = 229\n",
      "Total nSV = 980\n",
      "[LibSVM].*.*\n",
      "optimization finished, #iter = 2432\n",
      "obj = -488.690566, rho = 0.321038\n",
      "nSV = 1345, nBSV = 449\n",
      "Total nSV = 1345\n",
      "[LibSVM]..*.*\n",
      "optimization finished, #iter = 3329\n",
      "obj = -763.220247, rho = -0.440286\n",
      "nSV = 1866, nBSV = 788\n",
      "Total nSV = 1866\n",
      "[LibSVM].*.*\n",
      "optimization finished, #iter = 2354\n",
      "obj = -496.441064, rho = -0.199581\n",
      "nSV = 1301, nBSV = 438\n",
      "Total nSV = 1301\n",
      "[LibSVM].*.*\n",
      "optimization finished, #iter = 2161\n",
      "obj = -334.556618, rho = 0.114476\n",
      "nSV = 1168, nBSV = 228\n",
      "Total nSV = 1168\n",
      "[LibSVM].*\n",
      "optimization finished, #iter = 1980\n",
      "obj = -323.820010, rho = 0.439719\n",
      "nSV = 1069, nBSV = 267\n",
      "Total nSV = 1069\n",
      "[LibSVM].*.*\n",
      "optimization finished, #iter = 2314\n",
      "obj = -456.289185, rho = 0.548228\n",
      "nSV = 1288, nBSV = 446\n",
      "Total nSV = 1288\n",
      "[LibSVM].*.*\n",
      "optimization finished, #iter = 2295\n",
      "obj = -358.567928, rho = -0.109898\n",
      "nSV = 1229, nBSV = 229\n",
      "Total nSV = 1229\n",
      "[LibSVM]..*.*\n",
      "optimization finished, #iter = 3411\n",
      "obj = -831.053669, rho = 0.146028\n",
      "nSV = 1907, nBSV = 816\n",
      "Total nSV = 1907\n",
      "[LibSVM].*.*\n",
      "optimization finished, #iter = 2246\n",
      "obj = -519.855314, rho = 0.308401\n",
      "nSV = 1284, nBSV = 514\n",
      "Total nSV = 1284\n",
      "[LibSVM].*\n",
      "optimization finished, #iter = 1917\n",
      "obj = -320.876131, rho = 0.452857\n",
      "nSV = 1064, nBSV = 294\n",
      "Total nSV = 1064\n",
      "[LibSVM]..*.*\n",
      "optimization finished, #iter = 3467\n",
      "obj = -656.952605, rho = -0.331186\n",
      "nSV = 1820, nBSV = 605\n",
      "Total nSV = 1820\n",
      "[LibSVM].*.*\n",
      "optimization finished, #iter = 2137\n",
      "obj = -437.340146, rho = -0.006093\n",
      "nSV = 1217, nBSV = 372\n",
      "Total nSV = 1217\n",
      "[LibSVM].*\n",
      "optimization finished, #iter = 1695\n",
      "obj = -221.729037, rho = 0.142746\n",
      "nSV = 899, nBSV = 133\n",
      "Total nSV = 899\n",
      "[LibSVM].*\n",
      "optimization finished, #iter = 1892\n",
      "obj = -370.357290, rho = -0.506539\n",
      "nSV = 1089, nBSV = 351\n",
      "Total nSV = 1089\n",
      "[LibSVM].*\n",
      "optimization finished, #iter = 1605\n",
      "obj = -206.824194, rho = -0.304941\n",
      "nSV = 852, nBSV = 131\n",
      "Total nSV = 852\n",
      "[LibSVM].*.*\n",
      "optimization finished, #iter = 2041\n",
      "obj = -356.346938, rho = -0.611331\n",
      "nSV = 1146, nBSV = 325\n",
      "Total nSV = 1146\n",
      "[LibSVM]..*.*\n",
      "optimization finished, #iter = 3567\n",
      "obj = -1047.816402, rho = -0.535868\n",
      "nSV = 2159, nBSV = 1163\n",
      "Total nSV = 2159\n",
      "[LibSVM].*.*\n",
      "optimization finished, #iter = 2586\n",
      "obj = -500.097963, rho = 0.202277\n",
      "nSV = 1412, nBSV = 441\n",
      "Total nSV = 1412\n",
      "一对一方法（OvO）SVM模型在测试集上的准确率: 96.77%\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "from sklearn.svm import SVC\n",
    "from sklearn.multiclass import OneVsRestClassifier, OneVsOneClassifier\n",
    "from sklearn.metrics import accuracy_score\n",
    "from sklearn.preprocessing import StandardScaler\n",
    "from torchvision import datasets, transforms\n",
    "\n",
    "# 定义数据预处理步骤\n",
    "transform = transforms.Compose([\n",
    "    transforms.ToTensor(),  # 将图像转换为Tensor\n",
    "    transforms.Normalize((0.5,), (0.5,))  # 归一化\n",
    "])\n",
    "\n",
    "# 下载原始MNIST数据集\n",
    "train_dataset = datasets.MNIST(root='./data', train=True, download=True, transform=transform)\n",
    "test_dataset = datasets.MNIST(root='./data', train=False, download=True, transform=transform)\n",
    "\n",
    "# 获取训练集和测试集的图像数据和标签\n",
    "train_images, train_labels = zip(*[(image.view(-1).numpy(), label) for image, label in train_dataset])\n",
    "test_images, test_labels = zip(*[(image.view(-1).numpy(), label) for image, label in test_dataset])\n",
    "\n",
    "# 转换为numpy数组\n",
    "train_images = np.array(train_images)\n",
    "train_labels = np.array(train_labels)\n",
    "test_images = np.array(test_images)\n",
    "test_labels = np.array(test_labels)\n",
    "\n",
    "# 数据标准化\n",
    "scaler = StandardScaler()\n",
    "train_images = scaler.fit_transform(train_images)\n",
    "test_images = scaler.transform(test_images)\n",
    "\n",
    "# 一对多方法（OvR）\n",
    "# 创建 SVM 模型\n",
    "svm_ovr = SVC(kernel='rbf', max_iter=5000, verbose=1)\n",
    "ovr_model = OneVsRestClassifier(svm_ovr)\n",
    "\n",
    "# 训练模型\n",
    "ovr_model.fit(train_images, train_labels)\n",
    "\n",
    "# 在测试集上进行预测\n",
    "test_preds_ovr = ovr_model.predict(test_images)\n",
    "\n",
    "# 计算准确率\n",
    "accuracy_ovr = accuracy_score(test_labels, test_preds_ovr)\n",
    "print(f'一对多方法（OvR）SVM模型在测试集上的准确率: {accuracy_ovr * 100:.2f}%')\n",
    "\n",
    "# 一对一方法（OvO）\n",
    "# 创建 SVM 模型\n",
    "svm_ovo = SVC(kernel='rbf', max_iter=5000, verbose=1)\n",
    "ovo_model = OneVsOneClassifier(svm_ovo)\n",
    "\n",
    "# 训练模型\n",
    "ovo_model.fit(train_images, train_labels)\n",
    "\n",
    "# 在测试集上进行预测\n",
    "test_preds_ovo = ovo_model.predict(test_images)\n",
    "\n",
    "# 计算准确率\n",
    "accuracy_ovo = accuracy_score(test_labels, test_preds_ovo)\n",
    "print(f'一对一方法（OvO）SVM模型在测试集上的准确率: {accuracy_ovo * 100:.2f}%')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "b8abe4cd-dff8-488c-9969-fc5b51cfb7b6",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "决策树模型在测试集上的准确率: 86.61%\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "from sklearn.tree import DecisionTreeClassifier\n",
    "from sklearn.metrics import accuracy_score\n",
    "from sklearn.preprocessing import StandardScaler\n",
    "from torchvision import datasets, transforms\n",
    "\n",
    "# 定义数据预处理步骤\n",
    "transform = transforms.Compose([\n",
    "    transforms.ToTensor(),  # 将图像转换为Tensor\n",
    "    transforms.Normalize((0.5,), (0.5,))  # 归一化\n",
    "])\n",
    "\n",
    "# 下载原始MNIST数据集\n",
    "train_dataset = datasets.MNIST(root='./data', train=True, download=True, transform=transform)\n",
    "test_dataset = datasets.MNIST(root='./data', train=False, download=True, transform=transform)\n",
    "\n",
    "# 获取训练集和测试集的图像数据和标签\n",
    "train_images, train_labels = zip(*[(image.view(-1).numpy(), label) for image, label in train_dataset])\n",
    "test_images, test_labels = zip(*[(image.view(-1).numpy(), label) for image, label in test_dataset])\n",
    "\n",
    "# 转换为numpy数组\n",
    "train_images = np.array(train_images)\n",
    "train_labels = np.array(train_labels)\n",
    "test_images = np.array(test_images)\n",
    "test_labels = np.array(test_labels)\n",
    "\n",
    "# 数据标准化\n",
    "scaler = StandardScaler()\n",
    "train_images = scaler.fit_transform(train_images)\n",
    "test_images = scaler.transform(test_images)\n",
    "\n",
    "# 创建决策树分类器\n",
    "dt_classifier = DecisionTreeClassifier(max_depth=10, random_state=42)  # 设置最大树深度，防止过拟合\n",
    "\n",
    "# 训练决策树模型\n",
    "dt_classifier.fit(train_images, train_labels)\n",
    "\n",
    "# 在测试集上进行预测\n",
    "test_preds = dt_classifier.predict(test_images)\n",
    "\n",
    "# 计算准确率\n",
    "accuracy = accuracy_score(test_labels, test_preds)\n",
    "print(f'决策树模型在测试集上的准确率: {accuracy * 100:.2f}%')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "4f5036e0-126c-4dc9-b0a4-c6909ce062e3",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "训练包含 10 棵树的随机森林模型...\n",
      "随机森林模型（子树数量=10）在测试集上的准确率: 94.92%\n",
      "\n",
      "训练包含 50 棵树的随机森林模型...\n",
      "随机森林模型（子树数量=50）在测试集上的准确率: 96.72%\n",
      "\n",
      "训练包含 100 棵树的随机森林模型...\n",
      "随机森林模型（子树数量=100）在测试集上的准确率: 97.04%\n",
      "\n",
      "训练包含 200 棵树的随机森林模型...\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "from sklearn.ensemble import RandomForestClassifier\n",
    "from sklearn.metrics import accuracy_score\n",
    "from sklearn.preprocessing import StandardScaler\n",
    "from torchvision import datasets, transforms\n",
    "\n",
    "# 定义数据预处理步骤\n",
    "transform = transforms.Compose([\n",
    "    transforms.ToTensor(),  # 将图像转换为Tensor\n",
    "    transforms.Normalize((0.5,), (0.5,))  # 归一化\n",
    "])\n",
    "\n",
    "# 下载原始MNIST数据集\n",
    "train_dataset = datasets.MNIST(root='./data', train=True, download=True, transform=transform)\n",
    "test_dataset = datasets.MNIST(root='./data', train=False, download=True, transform=transform)\n",
    "\n",
    "# 获取训练集和测试集的图像数据和标签\n",
    "train_images, train_labels = zip(*[(image.view(-1).numpy(), label) for image, label in train_dataset])\n",
    "test_images, test_labels = zip(*[(image.view(-1).numpy(), label) for image, label in test_dataset])\n",
    "\n",
    "# 转换为numpy数组\n",
    "train_images = np.array(train_images)\n",
    "train_labels = np.array(train_labels)\n",
    "test_images = np.array(test_images)\n",
    "test_labels = np.array(test_labels)\n",
    "\n",
    "# 数据标准化\n",
    "scaler = StandardScaler()\n",
    "train_images = scaler.fit_transform(train_images)\n",
    "test_images = scaler.transform(test_images)\n",
    "\n",
    "# 创建随机森林分类器\n",
    "n_estimators_list = [10, 50, 100, 200]  # 尝试不同的子树数量\n",
    "\n",
    "for n_estimators in n_estimators_list:\n",
    "    print(f\"训练包含 {n_estimators} 棵树的随机森林模型...\")\n",
    "    \n",
    "    # 构建并训练随机森林模型\n",
    "    rf_classifier = RandomForestClassifier(n_estimators=n_estimators, random_state=42)\n",
    "    rf_classifier.fit(train_images, train_labels)\n",
    "    \n",
    "    # 在测试集上进行预测\n",
    "    test_preds = rf_classifier.predict(test_images)\n",
    "    \n",
    "    # 计算准确率\n",
    "    accuracy = accuracy_score(test_labels, test_preds)\n",
    "    print(f'随机森林模型（子树数量={n_estimators}）在测试集上的准确率: {accuracy * 100:.2f}%\\n')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "e43d7d60-4e3d-4e11-82be-6a9a796c2509",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.12"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
