{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "872a4d8c-3575-4f5a-95c2-e3719938bb94",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Collecting torch==1.10.1\n",
      "  Downloading http://repo.myhuaweicloud.com/repository/pypi/packages/9a/f5/b76d021f06e50f770d3f6c1a1b50b62a69e587b1f0db7248269c4be21206/torch-1.10.1-cp36-cp36m-manylinux1_x86_64.whl (881.9MB)\n",
      "\u001b[K    100% |████████████████████████████████| 881.9MB 6.6MB/s ta 0:00:0101  2% |▊                               | 18.5MB 127.3MB/s eta 0:00:07    4% |█▍                              | 37.3MB 121.0MB/s eta 0:00:07    23% |███████▍                        | 203.6MB 123.0MB/s eta 0:00:06    31% |██████████                      | 274.2MB 120.4MB/s eta 0:00:06    36% |███████████▋                    | 319.3MB 123.8MB/s eta 0:00:05         | 345.2MB 126.7MB/s eta 0:00:05    51% |████████████████▌               | 455.2MB 126.8MB/s eta 0:00:04    62% |████████████████████            | 551.0MB 136.7MB/s eta 0:00:03 570.2MB 127.0MB/s eta 0:00:03    65% |█████████████████████           | 576.8MB 120.3MB/s eta 0:00:03�█████████████████▏          | 583.0MB 130.0MB/s eta 0:00:03         | 589.3MB 125.5MB/s eta 0:00:03████████████▋          | 596.7MB 125.4MB/s eta 0:00:03█████████████          | 603.3MB 123.8MB/s eta 0:00:03███████████▏         | 609.8MB 119.4MB/s eta 0:00:03███████▋         | 622.7MB 116.8MB/s eta 0:00:03�██████████▉         | 629.6MB 122.6MB/s eta 0:00:03�        | 642.9MB 131.5MB/s eta 0:00:02�███████████████▉        | 655.9MB 117.7MB/s eta 0:00:02    75% |████████████████████████        | 662.7MB 125.4MB/s eta 0:00:02��███████▎       | 669.1MB 122.9MB/s eta 0:00:02███████████▌       | 675.6MB 132.3MB/s eta 0:00:020:02�█████████████       | 688.4MB 123.6MB/s eta 0:00:02�█████████████▎      | 695.4MB 130.2MB/s eta 0:00:02�███████████████████▌      | 701.5MB 120.8MB/s eta 0:00:0202MB/s eta 0:00:02��███      | 719.4MB 118.7MB/s eta 0:00:0225.6MB/s eta 0:00:02/s eta 0:00:02███████████████████▊     | 735.7MB 126.6MB/s eta 0:00:02�█████████████████████     | 741.3MB 119.7MB/s eta 0:00:02�█     | 746.4MB 126.4MB/s eta 0:00:02 eta 0:00:02██████████████████████▌    | 756.9MB 123.5MB/s eta 0:00:02�    | 763.4MB 124.0MB/s eta 0:00:01 | 770.0MB 121.3MB/s eta 0:00:01�██████████████████████▏   | 775.3MB 125.5MB/s eta 0:00:01�████████▎   | 780.8MB 127.8MB/s eta 0:00:01��███████████████████▌   | 785.6MB 125.5MB/s eta 0:00:01█████████████▊   | 790.9MB 123.2MB/s eta 0:00:01█████████████▉   | 796.1MB 124.6MB/s eta 0:00:01████████████   | 801.0MB 122.0MB/s eta 0:00:01�█████████████████████▎  | 806.0MB 120.2MB/s eta 0:00:01�█████████████████▍  | 810.9MB 126.8MB/s eta 0:00:01████████████████████▋  | 817.0MB 121.9MB/s eta 0:00:01████████████████████████████▉  | 822.6MB 120.6MB/s eta 0:00:01��███████████████████████████  | 827.9MB 119.8MB/s eta 0:00:01█████████████▏ | 832.7MB 130.7MB/s eta 0:00:01��███▍ | 838.8MB 128.0MB/s eta 0:00:01███████████████████▋ | 844.2MB 123.8MB/s eta 0:00:010:01�█████████████ | 854.4MB 125.0MB/s eta 0:00:01  97% |███████████████████████████████▏| 859.2MB 118.2MB/s eta 0:00:01��██████▍| 864.6MB 122.4MB/s eta 0:00:01��████████████████████████████▊| 875.3MB 119.7MB/s eta 0:00:01��███████████████| 880.4MB 120.8MB/s eta 0:00:01\n",
      "\u001b[?25hCollecting torchvision==0.11.2\n",
      "  Downloading http://repo.myhuaweicloud.com/repository/pypi/packages/38/03/c963ecdf98fae15286437ae533750e2c39e988b7d8c86fad4dbc73a3a146/torchvision-0.11.2-cp36-cp36m-manylinux1_x86_64.whl (23.3MB)\n",
      "\u001b[K    100% |████████████████████████████████| 23.3MB 131.6MB/s ta 0:00:01�███▊           | 15.1MB 127.8MB/s eta 0:00:01��██████▉| 23.1MB 150.0MB/s eta 0:00:01\n",
      "\u001b[?25hCollecting typing-extensions (from torch==1.10.1)\n",
      "  Downloading http://repo.myhuaweicloud.com/repository/pypi/packages/45/6b/44f7f8f1e110027cf88956b59f2fad776cca7e1704396d043f89effd3a0e/typing_extensions-4.1.1-py3-none-any.whl\n",
      "Collecting dataclasses; python_version < \"3.7\" (from torch==1.10.1)\n",
      "  Downloading http://repo.myhuaweicloud.com/repository/pypi/packages/fe/ca/75fac5856ab5cfa51bbbcefa250182e50441074fdc3f803f6e76451fab43/dataclasses-0.8-py3-none-any.whl\n",
      "Requirement already satisfied: numpy in /home/ma-user/anaconda3/envs/Pytorch-1.0.0/lib/python3.6/site-packages (from torchvision==0.11.2)\n",
      "Collecting pillow!=8.3.0,>=5.3.0 (from torchvision==0.11.2)\n",
      "  Downloading http://repo.myhuaweicloud.com/repository/pypi/packages/7d/2a/2fc11b54e2742db06297f7fa7f420a0e3069fdcf0e4b57dfec33f0b08622/Pillow-8.4.0.tar.gz (49.4MB)\n",
      "\u001b[K    100% |████████████████████████████████| 49.4MB 142.5MB/s ta 0:00:01                 | 10.7MB 126.7MB/s eta 0:00:017.2MB 118.5MB/s eta 0:00:01   44% |██████████████▎                 | 22.0MB 125.6MB/s eta 0:00:01| 28.1MB 140.6MB/s eta 0:00:01█████▋          | 33.3MB 134.0MB/s eta 0:00:01██████████████████████▌      | 39.3MB 135.9MB/s eta 0:00:01█   | 44.8MB 131.1MB/s eta 0:00:01�███████████████████████▉| 49.1MB 153.3MB/s eta 0:00:01\n",
      "\u001b[?25hBuilding wheels for collected packages: pillow\n",
      "  Running setup.py bdist_wheel for pillow ... \u001b[?25ldone\n",
      "\u001b[?25h  Stored in directory: /home/ma-user/.cache/pip/wheels/ff/5e/55/9ce6a14e2a79b1bba1c0cd5087c7b291e4f76dc45a417df5bd\n",
      "Successfully built pillow\n",
      "Installing collected packages: typing-extensions, dataclasses, torch, pillow, torchvision\n",
      "  Found existing installation: torch 1.0.0\n",
      "    Uninstalling torch-1.0.0:\n",
      "      Successfully uninstalled torch-1.0.0\n",
      "  Found existing installation: Pillow 5.0.0\n",
      "    Uninstalling Pillow-5.0.0:\n",
      "      Successfully uninstalled Pillow-5.0.0\n",
      "  Found existing installation: torchvision 0.2.1\n",
      "    Uninstalling torchvision-0.2.1:\n",
      "      Successfully uninstalled torchvision-0.2.1\n",
      "Successfully installed dataclasses-0.8 pillow-8.4.0 torch-1.10.1 torchvision-0.11.2 typing-extensions-4.1.1\n",
      "\u001b[33mYou are using pip version 9.0.1, however version 24.3.1 is available.\n",
      "You should consider upgrading via the 'pip install --upgrade pip' command.\u001b[0m\n",
      "Requirement already satisfied: pillow in /home/ma-user/anaconda3/envs/Pytorch-1.0.0/lib/python3.6/site-packages\n",
      "Collecting nanoid\n",
      "  Downloading http://repo.myhuaweicloud.com/repository/pypi/packages/2e/0d/8630f13998638dc01e187fadd2e5c6d42d127d08aeb4943d231664d6e539/nanoid-2.0.0-py3-none-any.whl\n",
      "Requirement already satisfied: tqdm in /home/ma-user/anaconda3/envs/Pytorch-1.0.0/lib/python3.6/site-packages\n",
      "Collecting loguru\n",
      "  Downloading http://repo.myhuaweicloud.com/repository/pypi/packages/03/0a/4f6fed21aa246c6b49b561ca55facacc2a44b87d65b8b92362a8e99ba202/loguru-0.7.2-py3-none-any.whl (62kB)\n",
      "\u001b[K    100% |████████████████████████████████| 71kB 25.8MB/s ta 0:00:01\n",
      "\u001b[?25hCollecting aiocontextvars>=0.2.0; python_version < \"3.7\" (from loguru)\n",
      "  Downloading http://repo.myhuaweicloud.com/repository/pypi/packages/db/c1/7a723e8d988de0a2e623927396e54b6831b68cb80dce468c945b849a9385/aiocontextvars-0.2.2-py2.py3-none-any.whl\n",
      "Collecting contextvars==2.4; python_version < \"3.7\" (from aiocontextvars>=0.2.0; python_version < \"3.7\"->loguru)\n",
      "  Downloading http://repo.myhuaweicloud.com/repository/pypi/packages/83/96/55b82d9f13763be9d672622e1b8106c85acb83edd7cc2fa5bc67cd9877e9/contextvars-2.4.tar.gz\n",
      "Collecting immutables>=0.9 (from contextvars==2.4; python_version < \"3.7\"->aiocontextvars>=0.2.0; python_version < \"3.7\"->loguru)\n",
      "  Downloading http://repo.myhuaweicloud.com/repository/pypi/packages/fb/ad/154c84dcb517f534c74accd5811d00d41af112ccfe505b7013f32efebb9e/immutables-0.19-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (116kB)\n",
      "\u001b[K    100% |████████████████████████████████| 122kB 24.3MB/s ta 0:00:01\n",
      "\u001b[?25hRequirement already satisfied: typing-extensions>=3.7.4.3; python_version < \"3.8\" in /home/ma-user/anaconda3/envs/Pytorch-1.0.0/lib/python3.6/site-packages (from immutables>=0.9->contextvars==2.4; python_version < \"3.7\"->aiocontextvars>=0.2.0; python_version < \"3.7\"->loguru)\n",
      "Building wheels for collected packages: contextvars\n",
      "  Running setup.py bdist_wheel for contextvars ... \u001b[?25ldone\n",
      "\u001b[?25h  Stored in directory: /home/ma-user/.cache/pip/wheels/af/a1/0f/a32ea5d7589d4fca4e4b506a1338762da61b9853f0cc3202c5\n",
      "Successfully built contextvars\n",
      "Installing collected packages: nanoid, immutables, contextvars, aiocontextvars, loguru\n",
      "Successfully installed aiocontextvars-0.2.2 contextvars-2.4 immutables-0.19 loguru-0.7.2 nanoid-2.0.0\n",
      "\u001b[33mYou are using pip version 9.0.1, however version 24.3.1 is available.\n",
      "You should consider upgrading via the 'pip install --upgrade pip' command.\u001b[0m\n"
     ]
    }
   ],
   "source": [
    "!pip install torch==1.10.1 torchvision==0.11.2\n",
    "!pip install pillow nanoid tqdm loguru`"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "427aea50-fe8b-4eab-9433-fde16b63e705",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "ca253fb3-fe4a-4945-9000-533d3e49c325",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "bbefe3e4-76bc-4651-9179-e1aaa8f13e56",
   "metadata": {},
   "outputs": [],
   "source": [
    "import json\n",
    "from PIL import Image, ImageDraw, ImageFont,ImageFilter\n",
    "import random\n",
    "from nanoid import generate\n",
    "from tqdm import tqdm\n",
    "import os"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "73873203-9a77-49c7-9a01-a36ae42ad516",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "total 2808\n",
      "-rw-r--r-- 1 root 704128 May 17  2015 DejaVuSans-Bold.ttf\n",
      "-rw-r--r-- 1 root 331536 May 17  2015 DejaVuSansMono-Bold.ttf\n",
      "-rw-r--r-- 1 root 340240 May 17  2015 DejaVuSansMono.ttf\n",
      "-rw-r--r-- 1 root 756072 May 17  2015 DejaVuSans.ttf\n",
      "-rw-r--r-- 1 root 355692 May 17  2015 DejaVuSerif-Bold.ttf\n",
      "-rw-r--r-- 1 root 379740 May 17  2015 DejaVuSerif.ttf\n"
     ]
    }
   ],
   "source": [
    "ll /usr/share/fonts/truetype/dejavu/"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "066a976f-f2a1-4e79-bbb4-f1c4c52e6b85",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<matplotlib.image.AxesImage at 0x7f2e4c29feb8>"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPwAAAD6CAYAAACF8ip6AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAADTNJREFUeJzt3W+MVXV+x/HPR/4kLlCDcEuA4EIcSNdkJVkmCpSNNCkSkAfCatno1iYaSWpiTOqD1dgHstn2QR+4Ro1sSG1jyMqGkNhgqxIQJ2WL2+ydZEU3ulIN7C5IMiQrUzRaF799MJcwOzPnzuVw7h/4vl/JZM6933vP75uT85nfuefce8cRIQA5XNPtBgB0DoEHEiHwQCIEHkiEwAOJEHggEQIPJELggUQIPJDI1HYPMHfu3Fi8eHG7hwFSGxwcPBMRtckeVzrwtl+Q9A1Jr0bED4set3jxYtXr9bLDAGiB7ROtPK7UIb3tLZKmRMRqSQtsLy2zHgCdVfY1/FpJexrLhyStqaQbAG1VNvAzJJ1sLA9Lmje6aHub7brt+tDQ0OX0B6BCZQN/TtK1jeWZY9cTETsjoj8i+mu1Sc8jAOiQsoEf1MXD+OWSjlfSDYC2KnuW/t8kHba9QNIGSSurawlAu5Sa4SNiWCMn7n4u6S8i4myVTQFoj9LX4SPi97p4ph7AFYC31gKJEHggEQIPJELggUQIPJAIgQcSIfBAIgQeSITAA4kQeCARAg8kQuCBRAg8kAiBBxIh8EAiBB5IhMADiRB4IBECDyRC4IFECDyQCIEHEiHwQCIEHkiEwAOJEHggEQIPJHLJgbc91fZvbA80fr7ZjsYAVK/MP5O8WdLuiPh+1c0AaK8yh/QrJW22/TPbP7Fd+j/QAuisMoH/haTbImKNpE8kbRz7ANvbbNdt14eGhi63RwAVKRP4oxHxcWP5fUlLxz4gInZGRH9E9NdqtctqEEB1ygR+l+3ltqdI2izp7Yp7AtAmZV5//0DSS5IsaV9EHKy2JQDtcsmBj4h3NXKmHqN88MEHhbV9+/Y1fe7+/fsLa8ePHy+snTp1qrBmu+mYS5YsKaytX7++sPbII480Xe+iRYua1tFdvPEGSITAA4kQeCARAg8kQuCBRAg8kIgjoq0D9Pf3R71eb+sYvWCyy2BXi1mzZjWt7969u7B2xx13VN0OGmwPRkT/ZI9jhgcSIfBAIgQeSITAA4kQeCARAg8kwtdTVaSvr6+wtnHjuC8F+iPNPp124403FtYWLlxYWPv000+bjjkwMFBYe/TRRwtrJ0+ebLreu+66q7D2zjvvFNaabT9UhxkeSITAA4kQeCARAg8kQuCBRAg8kAiX5Spy7NixbrfwR2bOnNm0vnXr1sJas0tkt956a9P1fv7554W15557rrD29NNPN10vqsEMDyRC4IFECDyQCIEHEiHwQCIEHkiEwAOJcB0e46xYsaKwtmzZsqbPfe+99wprg4ODpXtCNVqa4W3Ps324sTzN9r/bPmL7/va2B6BKkwbe9mxJL0qa0bjrYUn1iFgtaZPt5l9UDqBntDLDn5e0VdJw4/ZaSXsay0ckjfvye9vbbNdt14eGhqroE0AFJg18RAxHxNlRd82QdOF7joYlzZvgOTsjoj8i+mu1WjWdArhsZc7Sn5N0bWN5Zsl1AOiCMmEdlLSmsbxc0vHKugHQVmUuy70o6VXb35Z0k6T/rrYl9LI5c+aUfu6UKVMq7ARltDzDR8Taxu8TktZJ+i9JfxkR59vTGoCqlXrjTUSc0sUz9QCuEJxwAxIh8EAiBB5IhMADifBpOYzT7Jtnjx49Wnq9t9xyS+nnohrM8EAiBB5IhMADiRB4IBECDyRC4IFEuCyHcZ555pnC2vDwcGFNkqZNm1ZYe/DBB0v3hGowwwOJEHggEQIPJELggUQIPJAIgQcSIfBAIlyHT6rZx1y3b99eer2PPfZYYW3p0qWl14tqMMMDiRB4IBECDyRC4IFECDyQCIEHEuGy3FVqaGioaX3Lli2Ftc8++6ywdvvttzdd75NPPtm0ju5qaYa3Pc/24cbyQtu/sz3Q+Km1t0UAVZl0hrc9WyP/InpG465bJf1DROxoZ2MAqtfKDH9e0lZJF77qZKWkh2y/ZftHbesMQOUmDXxEDEfE2VF3vSZpdUSskrTM9s1jn2N7m+267fpkryUBdE6Zs/RHIuJ/G8vvSxr3BumI2BkR/RHRX6vxEh/oFWUCv9/2fNtfk7Re0rsV9wSgTcpcltsu6U1J/yfpxxHx62pbQquafYPshg0bmj73ww8/LKytWLGisLZ3796m673mGt7a0ctaDnxErG38flPSn7WrIQDtw59jIBECDyRC4IFECDyQCIEHEiHwQCJ8PLbHNfuo6qZNmwprg4ODTdd70003FdZef/31wtqsWbOarhe9jRkeSITAA4kQeCARAg8kQuCBRAg8kAiX5XrAF198UVi78847C2uHDx8urPX19TUd8+DBg4W1uXPnNn0urlzM8EAiBB5IhMADiRB4IBECDyRC4IFEuCzXAV9++WXT+t13311YO3DgQGFt8eLFhbU33nij6Zjz589vWsfViRkeSITAA4kQeCARAg8kQuCBRAg8kAiX5Spy/vz5wto999zT9LmvvPJKYW3RokWFtUOHDhXWbrjhhqZjIqdJZ3jb19l+zfYB2y/bnm77BdtHbP99J5oEUI1WDunvlfRURKyTdFrSdyVNiYjVkhbYXtrOBgFUZ9JD+oh4ftTNmqTvSXq6cfuQpDWSjlXfGoCqtXzSzvYqSbMl/VbSycbdw5LmTfDYbbbrtutDQ0OVNArg8rUUeNvXS3pW0v2Szkm6tlGaOdE6ImJnRPRHRH+tVquqVwCXqZWTdtMl7ZH0eESckDSokcN4SVou6XjbugNQqVZm+AckrZD0hO0BSZb017afkvRXkv6jfe0BqFIrJ+12SNox+j7b+yStk/RPEXG2Tb31nK+++qqwdt999xXW9u7d23S9CxYsKKw1u9a+ZMmSpusFxir1xpuI+L1GDvMBXEF4ay2QCIEHEiHwQCIEHkiEwAOJ8PHYS/DRRx8V1l566aXS6z116lRhbenSq+ezSXPmzCmsnTlzpoOd5MUMDyRC4IFECDyQCIEHEiHwQCIEHkiEwAOJEHggEQIPJELggUQIPJAIgQcSIfBAInxa7hL09fUV1iKig50A5TDDA4kQeCARAg8kQuCBRAg8kAiBBxIh8EAik16Ht32dpJ82HntO0lZJ/yPpwle4PhwR77StQwCVaWWGv1fSUxGxTtJpSY9J2h0Raxs/hB24QrTy76KfH3WzJum3kjbb/nNJJyT9TUT8oU39AahQy6/hba+SNFvSAUm3RcQaSZ9I2jjBY7fZrtuuDw0NVdYsgMvTUuBtXy/pWUn3SzoaER83Su9LGvevUSJiZ0T0R0R/rVarrFkAl2fSwNueLmmPpMcj4oSkXbaX254iabOkt9vcI4CKtDLDPyBphaQnbA9I+pWkXZJ+KemtiDjYvvYAVKmVk3Y7JO0Yc/f29rQDoJ144w2QCIEHEiHwQCIEHkiEwAOJEHggEQIPJELggUQIPJAIgQcSIfBAIgQeSITAA4kQeCARAg8kQuCBRAg8kAiBBxIh8EAiBB5IhMADiRB4IBECDyRC4IFECDyQCIEHEiHwQCIEHkjEEdHeAewhSSdG3TVX0pm2Dnpp6GdyvdYT/Yz39YioTfagtgd+3IB2PSL6OzpoE/QzuV7riX7K45AeSITAA4l0I/A7uzBmM/QzuV7riX5K6vhreADdwyE9kEjawNueavs3tgcaP9/sdk+9xPY824cbywtt/27Utpr08s/VyvZ1tl+zfcD2y7anX0n7UUcP6W2/IOkbkl6NiB92bOCJe/mWpK0R8f1u9tHoZZ6kvRHxbdvTJL0s6XpJ/xwR/9KFfmZL2i3pTyPiW7a3SJoXETu60Mt1kn4qaaqkc5K2StqhLu1Hth+SdCwiDtjeIeljSTN6YT9qRcdm+MZOMyUiVktaYHtpp8YusFLSZts/s/0T21O70UQjXC9KmtG462FJ9cZ22mR7VhfaOq+RYA03bq+U9JDtt2z/qMO93CvpqYhYJ+m0pO+qi/tRRDwfEQcaN2uS/qAe2I9a1clD+rWS9jSWD0la08GxJ/ILSbdFxBpJn0ja2KU+xoZrrS5upyOSOv6GjogYjoizo+56TdLqiFglaZntmzvYy9iAfU89sB/ZXiVptqQD6o39qCWd/Gs0Q9LJxvKwpL4Ojj2RoxHxRWP5fUldOeKIiGFJsn3hrrHbaV4X2hrryATb6mgnGxgVsOPq8n5k+3pJz0r6jqTTvbAftaqTM/w5Sdc2lmd2eOyJ7LK93PYUSZslvd3lfi7ote0kSfttz7f9NUnrJb3bycFHBex+dXn72J6ukSOMxyPihHp3P5pQJzfWoC4efi3XyF/qbvqBpF2SfinprYg42OV+Lui17SRJ2yW9Kennkn4cEb/u1MATBKzb2+cBSSskPWF7QNKv1Jv70YQ6dpbe9p9IOizpDUkbJK0c8zoxNdsDEbHW9tclvSrpoKTVGtlO57vbXffY/ltJ/6iLM+e/Svo7sR+V0unLcrMlrZP0nxFxumMDX2FsL9DILLafnXk89qPyeGstkEgvnBAC0CEEHkiEwAOJEHggEQIPJPL/O1NIiy9KY08AAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "\n",
    "img = Image.new(\"RGBA\", (30, 30), (255, 255, 255, 255))\n",
    "draw = ImageDraw.Draw(img)\n",
    "font = ImageFont.truetype(\"/usr/share/fonts/truetype/dejavu/DejaVuSansMono.ttf\", 25)\n",
    "draw.text(xy=(6, 0), font=font, text=\"2\", fill=(0, 0, 0))\n",
    "\n",
    "plt.imshow(img)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "c53e63f2-4828-4328-bb66-ef4c9b4677c8",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z']\n"
     ]
    }
   ],
   "source": [
    "def make_alpha_num():\n",
    "    # 数字 大写 小写\n",
    "    return [chr(i) for i in range(48,58)] + [chr(i) for i in range(65,91)] + [chr(i) for i in range(97,123)] \n",
    "\n",
    "alpha_num = make_alpha_num()\n",
    "print(alpha_num)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "b48c8b6e-844f-4e0b-a066-be862947f27c",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<matplotlib.image.AxesImage at 0x7f2dac5ec438>"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPwAAAD6CAYAAACF8ip6AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAADspJREFUeJzt3X+MFHWexvHnOVREQIM6ElkImByrSxSidDxEjGNy46pZTVjjzkbXXPwFasQfFxPPePyxZs9E/2DP+AOF8xTJnoqXaDYehIAKi4d7oSdZwVU3ezlFxxOdTVbmSAyy5HN/TBtwmKpuarq6e/y+X8nEmv501fdjpx6qpr7d1Y4IAUjDX7W7AQCtQ+CBhBB4ICEEHkgIgQcSQuCBhBB4ICEEHkgIgQcSckzZA5x66qkxa9assocBktbX1/eniOiq97zCgbf9jKQfSFofEb/Iet6sWbNUrVaLDgOgAbZ3N/K8Qqf0tn8saVxELJQ0zfbsItsB0FpF/4bvlrSutvyGpEVN6QZAqYoGfqKkT2vLg5KmHl60vcR21XZ1YGBgNP0BaKKigd8naUJtedLw7UTEqoioRESlq6vudQQALVI08H06dBo/T9JHTekGQKmKXqV/VdI229MkXS5pQfNaAlCWQkf4iBjU0IW730q6JCL2NrMpAOUoPA8fEX/WoSv1AMYA3loLJITAAwkh8EBCCDyQEAIPJITAAwkh8EBCCDyQEAIPJITAAwkh8EBCCDyQEAIPJITAAwkh8EBCCDyQEAIPJITAAwkh8EBCCDyQEAIPJITAAwkh8EBCCDyQEAIPJITAAwkh8EBCjjrwto+x/bHtLbWfc8poDEDzFfkyybmSXoiI+5rdDIByFTmlXyBpse23bP/KduFvoAXQWkUCv0PSxRGxSNKXkq4Y/gTbS2xXbVcHBgZG2yOAJikS+J0R8Vlt+QNJs4c/ISJWRUQlIipdXV2jahBA8xQJ/Frb82yPk7RY0jtN7glASYr8/f2gpH+TZEm/jojNzW0JQFmOOvAR8a6GrtSjQQcOHMitr169OrP20ksvZda2bt1auCekiTfeAAkh8EBCCDyQEAIPJITAAwkh8EBCeB/8Udi7d2+h9U444YQmdzLklFNOyawNDg7mrjtp0qTM2pw5cwr3dM0112TWbrvttsza+PHjC4+JxnGEBxJC4IGEEHggIQQeSAiBBxJC4IGEOCJKHaBSqUS1Wi11jFYpOi3X09OTW9+xY0eh7Y4155yTfb/T119/PbPGTVTqs90XEZV6z+MIDySEwAMJIfBAQgg8kBACDySEwAMJ4dNyR2H58uWF1hvNtNtdd92VWbvjjjsya9OnT8/d7p49ewr1k3fDTUl66KGHMmu7du3KrN17772ZtTVr1tRvDA3hCA8khMADCSHwQEIIPJAQAg8khMADCSHwQEL4eOxRmDFjRqH1+vv7c+uXXnppZm3jxo2FxmyX3t7ezNq6desya5MnT86s1bsDL5r88VjbU21vqy0fa/s129tt3zjaRgG0Tt3A254iaY2kibWHlkmqRsRCST+ynf1PM4CO0sgR/qCkXknfnFd1S/rm3Gy7pCNOI2wvsV21XR0YGGhGnwCaoG7gI2IwIg6/t9NESZ/WlgclTR1hnVURUYmICrcnAjpHkav0+yRNqC1PKrgNAG1QJKx9khbVludJ+qhp3QAoVZGPx66RtN72RZLmSPqv5rbUufbv31/Kds8777xSttsO5557bmYtb1ruwIEDmbXRTB3bLrzud1HDR/iI6K79d7ekHkn/KelvI+JgOa0BaLZCN8CIiP/VoSv1AMYILrgBCSHwQEIIPJAQAg8khLvWHoUvvvgis3bllVdm1l577bXc7e7cubNwT52m6P/L+eef3+ROMBKO8EBCCDyQEAIPJITAAwkh8EBCCDyQEKblmuThhx/OrG3fvj133fXr12fW7rvvvszarbfemlmbNm1a7piff/55bj3Ls88+W2i90eju7s6tb926tTWNfAdwhAcSQuCBhBB4ICEEHkgIgQcSQuCBhBB4ICHMwzfJnDlzMmt9fX256z744IOZtSeeeCKz9sgjj9RvrIDdu3cXXvfFF1/MrJ144omZtRUrVhQeE43jCA8khMADCSHwQEIIPJAQAg8khMADCWFargX6+/sL17/66qtmt1PXzJkzM2vHH3987rrXXnttZm358uWZtTPPPLN+Yxi1ho7wtqfa3lZb/p7tfttbaj9d5bYIoFnqHuFtT9HQV0RPrD30N5L+KSJWltkYgOZr5Ah/UFKvpMHa7wsk3W77bdu/LK0zAE1XN/ARMRgRew97aIOkhRFxgaTv2547fB3bS2xXbVcHBgaa2C6A0ShylX57RPxfbfkDSbOHPyEiVkVEJSIqXV38iQ90iiKB32j7dNsnSPqhpHeb3BOAkhSZlvu5pDclfS3pqYj4Q3NbGpteffXVzFpvb2/uugcOHMis2S7cUxn279+fW8+bYvzkk08ya0zLtUbDgY+I7tp/35R0VlkNASgP77QDEkLggYQQeCAhBB5ICIEHEkLggYTw8dhh6s0zZ7nzzjsza19//XXuunlz7UuXLs2sLVu2LLOW9xHXej7++OPM2sqV+Z+ZevzxxzNrl112WWbt+eefr99YhryP5OLbOMIDCSHwQEIIPJAQAg8khMADCSHwQEKYlhtmx44dLR/zhhtuyKw9+eSTLexkyFlnZX8Y8tFHH81d9+DBg5m1vC/GvPvuu+s3luGqq67KrE2aNKnwdr+LOMIDCSHwQEIIPJAQAg8khMADCSHwQEKYlhvmww8/zKydccYZmbW8O7LWc/PNNxdet9PccsstmbW8abm8Lyzhuw2ahyM8kBACDySEwAMJIfBAQgg8kBACDySEablhrr/++sza2rVrSxlz+vTppWz3uyJvyk6S3nrrrcxa3o0zU1T3CG/7JNsbbG+y/Yrt42w/Y3u77X9sRZMAmqORU/rrJK2IiB5JeyT9VNK4iFgoaZrt2WU2CKB56p7SR8Thd2DokvQzSf9c+/0NSYsk/bH5rQFotoYv2tm+QNIUSZ9I+rT28KCkqSM8d4ntqu1qvb+/ALROQ4G3fbKkxyTdKGmfpAm10qSRthERqyKiEhEV3gcNdI5GLtodJ2mdpPsjYrekPg2dxkvSPEkfldYdgKZq5Ah/k6T5kh6wvUWSJV1ve4Wkn0j6j/LaA9BMjVy0WynpW98gaPvXknokPRIRe0vqreMce+yxpWw378sbZ8yYUcqYZWlHv3kfaca3FXrjTUT8WUOn+QDGEN5aCySEwAMJIfBAQgg8kBACDySEj8cehfnz55ey3dWrV2fWLrzwwlLGLMto7t5b1Pjx41s+5ljFER5ICIEHEkLggYQQeCAhBB5ICIEHEsK03FGYPTv79n2XX355Zm3Dhg25212zZk1mbcKECZm1lStXZtbaJW+KsSyVSqXlY45VHOGBhBB4ICEEHkgIgQcSQuCBhBB4ICFMyzXJc889l1nr6enJXXfnzp2Fxnz//fcLrSdJM2fOzKz19/dn1p5++unc7ZYxVXjJJZc0fZup4ggPJITAAwkh8EBCCDyQEAIPJITAAwkh8EBC6s7D2z5J0ou15+6T1CvpvyX9T+0pyyJiV2kdjhGnnXZaKdt96qmnCtU60Zw5czJr7733XuHtzp07t/C6qWnkCH+dpBUR0SNpj6R/kPRCRHTXfpIPOzBWNPJ10U8e9muXpE8kLbZ9oaTdkv4uIv5SUn8Amqjhv+FtXyBpiqRNki6OiEWSvpR0xQjPXWK7ars6MDDQtGYBjE5Dgbd9sqTHJN0oaWdEfFYrfSDpiPs+RcSqiKhERKWrq6tpzQIYnbqBt32cpHWS7o+I3ZLW2p5ne5ykxZLeKblHAE3SyBH+JknzJT1ge4uk30taK+l3kt6OiM3ltQegmRwRpQ5QqVSiWq2WOkan279/f249706vL7/8cmZt167iEySDg4OZtcmTJ2fWzj777NztXn311Zm1e+65J7OWN2W3eXP+MeX000/PrafAdl9E1L19L2+8ARJC4IGEEHggIQQeSAiBBxJC4IGEMC2Ho3LRRRfl1vOm5fIsXbo0s5b3hZoYwrQcgCMQeCAhBB5ICIEHEkLggYQQeCAhfJkkjsq2bdva3QJGgSM8kBACDySEwAMJIfBAQgg8kBACDySEwAMJIfBAQgg8kBACDySEwAMJIfBAQgg8kBACDySk9LvW2h6QtPuwh06V9KdSBz069FNfp/VEP0eaGRFd9Z5UeuCPGNCuNnI73Vahn/o6rSf6KY5TeiAhBB5ISDsCv6oNY+ahn/o6rSf6Kajlf8MDaB9O6YGEJBt428fY/tj2ltrPOe3uqZPYnmp7W235e7b7D3ut6k7/fFfZPsn2BtubbL9i+7ixtB+19JTe9jOSfiBpfUT8omUDj9zLeZJ6I+K+dvZR62WqpH+PiItsHyvpFUknS/qXiPjXNvQzRdILkk6LiPNs/1jS1IhY2YZeTpL0ooZuqb5PUq+klWrTfmT7dkl/jIhNtldK+kzSxE7YjxrRsiN8bacZFxELJU2zPbtVY2dYIGmx7bds/8p2W+7RXwvXGkkTaw8tk1StvU4/sj25DW0d1FCwBmu/L5B0u+23bf+yxb1cJ2lFRPRI2iPpp2rjfhQRT0bEptqvXZL+og7YjxrVylP6bknrastvSFrUwrFHskPSxRGxSNKXkq5oUx/Dw9WtQ6/Tdkktf0NHRAxGxN7DHtogaWFEXCDp+7bntrCX4QH7mTpgP7J9gaQpkjapM/ajhrTyX6OJkj6tLQ9K+usWjj2SnRGxv7b8gaS2nHFExKAk2f7moeGv09Q2tDXc9hFeq52tbOCwgH2kNu9Htk+W9JikqyXt6YT9qFGtPMLvkzShtjypxWOPZK3tebbHSVos6Z029/ONTnudJGmj7dNtnyDph5LebeXghwXsRrX59bF9nIbOMO6PiN3q3P1oRK18sfp06PRrnob+pW6nByWtlfQ7SW9HxOY29/ONTnudJOnnkt6U9FtJT0XEH1o18AgBa/frc5Ok+ZIesL1F0u/VmfvRiFp2ld72iZK2SXpd0uWSFgz7OzFptrdERLftmZLWS9osaaGGXqeD7e2ufWzfJukhHTpyPivp78V+VEirp+WmSOqR9JuI2NOygccY29M0dBTbyM58JPaj4nhrLZCQTrggBKBFCDyQEAIPJITAAwkh8EBC/h/AYtit94qKiwAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "def make_img(alpha_num, is_rotate=False,is_twist=False, is_blur=False):\n",
    "    \"\"\"\n",
    "    生成字母/数字图片\n",
    "    \"\"\"\n",
    "    img = Image.new(\"RGBA\", (30, 30), (255, 255, 255, 255))\n",
    "    img2 = Image.new(\"RGBA\", (30, 30), (255, 255, 255, 255))\n",
    "    draw = ImageDraw.Draw(img)\n",
    "    font = ImageFont.truetype(\"/usr/share/fonts/truetype/dejavu/DejaVuSansMono.ttf\", 25)\n",
    "    draw.text(xy=(5, 0), font=font, text=str(alpha_num), fill=(0, 0, 0))\n",
    "    if is_rotate:\n",
    "        angle = random.randint(-10, 10)\n",
    "        img = img.rotate(angle)\n",
    "        _, _, _, a = img.split()\n",
    "        img2.paste(img, mask=a)\n",
    "        img = img2\n",
    "    if is_twist: # 图片透视变幻\n",
    "        perspective_matrix = [\n",
    "            1 - float(random.randint(1, 2)) / 100,\n",
    "            0,\n",
    "            0,\n",
    "            0,\n",
    "            1 - float(random.randint(1, 10)) / 100,\n",
    "            float(random.randint(1, 10)) / 500,\n",
    "            0.005,\n",
    "            float(random.randint(1, 2)) / 500,\n",
    "        ]\n",
    "        img = img.transform((30, 30), Image.PERSPECTIVE, perspective_matrix)\n",
    "    if is_blur:\n",
    "        radius = random.choice([0,1,2]*100)\n",
    "        img = img.filter(ImageFilter.GaussianBlur(radius=radius))\n",
    "    img = img.convert(\"RGB\")\n",
    "    return img\n",
    "\n",
    "plt.imshow(make_img(8,is_rotate=True,is_twist=True,is_blur=False))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "72231a64-5efb-4cf6-83ab-3d02e638e1c4",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 10000/10000 [01:21<00:00, 123.39it/s]\n"
     ]
    }
   ],
   "source": [
    "os.makedirs(\"datasets\",exist_ok=True) # 新建数据集目录\n",
    "alpha_num = make_alpha_num()\n",
    "for _ in tqdm(range(10000)):\n",
    "    an = random.choice(alpha_num)\n",
    "    img = make_img(an, is_rotate=True, is_twist=True, is_blur=True)\n",
    "    img.save(f\"datasets/{an}_{generate()}.jpg\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "dc2ff06e-6bd5-495c-b3c2-0b993bbeacae",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "torch.Size([1, 62])\n"
     ]
    }
   ],
   "source": [
    "import torch\n",
    "from torch import nn\n",
    " \n",
    " \n",
    "\n",
    "class LeNetConv(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(LeNetConv, self).__init__()\n",
    "        self.conv1 = nn.Conv2d(in_channels=3, out_channels=6, kernel_size=5, padding=2)\n",
    "        self.Sigmoid = nn.Sigmoid()\n",
    "        self.pool2 = nn.AvgPool2d(kernel_size=2, stride=2)\n",
    "        self.conv3 = nn.Conv2d(in_channels=6, out_channels=16, kernel_size=5)\n",
    "        self.pool4 = nn.AvgPool2d(kernel_size=2, stride=2)\n",
    "        self.conv5 = nn.Conv2d(in_channels=16, out_channels=120, kernel_size=5)\n",
    "        self.flatten = nn.Flatten()\n",
    "        self.linear6 = nn.Linear(120, 84)\n",
    "        self.output = nn.Linear(84, 62)\n",
    " \n",
    "    def forward(self, x):\n",
    "        x = self.Sigmoid(self.conv1(x))\n",
    "        x = self.pool2(x)\n",
    "        x = self.Sigmoid(self.conv3(x))\n",
    "        x = self.pool4(x)\n",
    "        x = self.conv5(x)\n",
    "        x = self.flatten(x)\n",
    "        x = self.linear6(x)\n",
    "        x = self.output(x)\n",
    "        return x\n",
    " \n",
    "\n",
    "if __name__ == \"__main__\":\n",
    "    x = torch.rand([1, 3, 28, 28])\n",
    "    model = LeNetConv()\n",
    "    print(model(x).shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "e19b1a16-6cbd-41dc-a668-a17f1b87f6f7",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\u001b[32m2024-12-05 14:52:56.175\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m__init__\u001b[0m:\u001b[36m14\u001b[0m - \u001b[1mpic num:10000\u001b[0m\n",
      "\u001b[32m2024-12-05 14:52:56.176\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m__init__\u001b[0m:\u001b[36m21\u001b[0m - \u001b[1mtrainset num:9000\u001b[0m\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "9000\n"
     ]
    }
   ],
   "source": [
    "from torch.utils import data\n",
    "from glob import glob\n",
    "import random\n",
    "import os\n",
    "from loguru import logger\n",
    "from PIL import Image\n",
    "from torch.utils.data import DataLoader\n",
    "\n",
    "\n",
    "class NumAlphabetDataset(data.Dataset):\n",
    "    def __init__(self,root_dir,transform,mode=\"train\",train_split=0.9):\n",
    "        datas = glob(os.path.join(root_dir,\"*.jpg\"))\n",
    "        random.shuffle(datas)\n",
    "        logger.info(\"pic num:{}\",len(datas))\n",
    "        train_num = int(len(datas) * train_split)\n",
    "        test_num = len(datas) - train_num\n",
    "        self.transform = transform\n",
    "        self.labels = make_alpha_num()\n",
    "        if mode == \"train\":\n",
    "            self.data = datas[:train_num]\n",
    "            logger.info(\"trainset num:{}\",train_num)\n",
    "        else:\n",
    "            self.data = datas[train_num:]\n",
    "            logger.info(\"testset num:{}\",test_num)\n",
    "    \n",
    "    def __getitem__(self,index):\n",
    "        img = Image.open(self.data[index])\n",
    "        name = os.path.splitext(os.path.basename(self.data[index]))[0].split(\"_\")[0]\n",
    "        label = self.labels.index(name)\n",
    "        img = self.transform(img)\n",
    "        return img,label\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.data)\n",
    "    \n",
    "train_set = NumAlphabetDataset(\"./datasets\",None,\"train\")\n",
    "print(len(train_set))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "97ffc329-d6d8-48ac-959d-6dfe36192674",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\u001b[32m2024-12-05 14:53:17.178\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m__init__\u001b[0m:\u001b[36m14\u001b[0m - \u001b[1mpic num:10000\u001b[0m\n",
      "\u001b[32m2024-12-05 14:53:17.179\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m__init__\u001b[0m:\u001b[36m21\u001b[0m - \u001b[1mtrainset num:9000\u001b[0m\n",
      "\u001b[32m2024-12-05 14:53:17.211\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m__init__\u001b[0m:\u001b[36m14\u001b[0m - \u001b[1mpic num:10000\u001b[0m\n",
      "\u001b[32m2024-12-05 14:53:17.212\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m__init__\u001b[0m:\u001b[36m24\u001b[0m - \u001b[1mtestset num:1000\u001b[0m\n"
     ]
    }
   ],
   "source": [
    "import torch\n",
    "from torch import nn\n",
    "from torch.utils.data import DataLoader\n",
    "from torchvision import transforms\n",
    "from loguru import logger\n",
    "from torch.optim import lr_scheduler\n",
    "import os\n",
    "\n",
    "epochs = 12 \n",
    "\n",
    "\n",
    "lenet_transform = transforms.Compose([\n",
    "    transforms.ToTensor()\n",
    "])\n",
    "\n",
    "train_set = NumAlphabetDataset(\"./datasets\",lenet_transform,\"train\")\n",
    "test_set = NumAlphabetDataset(\"./datasets\",lenet_transform,\"test\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "bf058a86-e010-4923-af19-cc0ded185330",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "563 63\n"
     ]
    }
   ],
   "source": [
    "train_loader = DataLoader(dataset=train_set,batch_size=16,shuffle=True)\n",
    "test_loader = DataLoader(dataset=test_set,batch_size=16,shuffle=True)\n",
    "\n",
    "print(len(train_loader),len(test_loader))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "39f2a26a-7814-4c29-bae4-b32cf05fa1ea",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "SGD (\n",
      "Parameter Group 0\n",
      "    dampening: 0\n",
      "    initial_lr: 0.01\n",
      "    lr: 0.01\n",
      "    momentum: 0.9\n",
      "    nesterov: False\n",
      "    weight_decay: 0\n",
      ")\n"
     ]
    }
   ],
   "source": [
    "mln = LeNetConv()\n",
    "loss_fn = nn.CrossEntropyLoss()\n",
    "optimizer = torch.optim.SGD(mln.parameters(), lr=1e-2, momentum=0.9)\n",
    "lr_scheduler2 = lr_scheduler.StepLR(optimizer, step_size=10, gamma=0.1)\n",
    "print(optimizer)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "dcc38ce7-dea3-4795-9a9f-6d61665001ed",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\u001b[32m2024-12-05 14:55:36.911\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m17\u001b[0m - \u001b[1m1 epoch: train --loss:4.134932054719536,--acc:0.018761101243339254\u001b[0m\n",
      "\u001b[32m2024-12-05 14:55:42.503\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m32\u001b[0m - \u001b[1m1 epoch: eval --loss:4.125249908083961,--acc:0.01984126984126984\u001b[0m\n",
      "\u001b[32m2024-12-05 14:57:14.702\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m17\u001b[0m - \u001b[1m2 epoch: train --loss:4.125795403339935,--acc:0.019538188277087035\u001b[0m\n",
      "\u001b[32m2024-12-05 14:57:20.205\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m32\u001b[0m - \u001b[1m2 epoch: eval --loss:4.122239688086132,--acc:0.015873015873015872\u001b[0m\n",
      "\u001b[32m2024-12-05 14:58:51.102\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m17\u001b[0m - \u001b[1m3 epoch: train --loss:4.120649637591564,--acc:0.020537300177619892\u001b[0m\n",
      "\u001b[32m2024-12-05 14:58:56.604\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m32\u001b[0m - \u001b[1m3 epoch: eval --loss:4.10623001673865,--acc:0.015873015873015872\u001b[0m\n",
      "\u001b[32m2024-12-05 15:00:27.103\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m17\u001b[0m - \u001b[1m4 epoch: train --loss:3.1952324363520472,--acc:0.16751776198934282\u001b[0m\n",
      "\u001b[32m2024-12-05 15:00:32.904\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m32\u001b[0m - \u001b[1m4 epoch: eval --loss:1.3115985138075692,--acc:0.6121031746031746\u001b[0m\n",
      "\u001b[32m2024-12-05 15:02:04.703\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m17\u001b[0m - \u001b[1m5 epoch: train --loss:0.6822579720224623,--acc:0.7678730017761989\u001b[0m\n",
      "\u001b[32m2024-12-05 15:02:10.203\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m32\u001b[0m - \u001b[1m5 epoch: eval --loss:0.30933043567670715,--acc:0.8829365079365079\u001b[0m\n",
      "\u001b[32m2024-12-05 15:03:40.603\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m17\u001b[0m - \u001b[1m6 epoch: train --loss:0.2963584871271205,--acc:0.8877664298401421\u001b[0m\n",
      "\u001b[32m2024-12-05 15:03:46.104\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m32\u001b[0m - \u001b[1m6 epoch: eval --loss:0.21274207641799298,--acc:0.9265873015873016\u001b[0m\n",
      "\u001b[32m2024-12-05 15:05:18.303\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m17\u001b[0m - \u001b[1m7 epoch: train --loss:0.21559350427610677,--acc:0.9198490230905861\u001b[0m\n",
      "\u001b[32m2024-12-05 15:05:23.904\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m32\u001b[0m - \u001b[1m7 epoch: eval --loss:0.21114975287919008,--acc:0.9325396825396826\u001b[0m\n",
      "\u001b[32m2024-12-05 15:06:55.503\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m17\u001b[0m - \u001b[1m8 epoch: train --loss:0.15934336626758305,--acc:0.9389431616341031\u001b[0m\n",
      "\u001b[32m2024-12-05 15:07:00.911\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m32\u001b[0m - \u001b[1m8 epoch: eval --loss:0.127989553173797,--acc:0.9454365079365079\u001b[0m\n",
      "\u001b[32m2024-12-05 15:08:33.201\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m17\u001b[0m - \u001b[1m9 epoch: train --loss:0.1388965015461731,--acc:0.9437166962699822\u001b[0m\n",
      "\u001b[32m2024-12-05 15:08:38.800\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m32\u001b[0m - \u001b[1m9 epoch: eval --loss:0.10008359548916655,--acc:0.9523809523809523\u001b[0m\n",
      "\u001b[32m2024-12-05 15:10:10.205\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m17\u001b[0m - \u001b[1m10 epoch: train --loss:0.12323397765478672,--acc:0.9463809946714032\u001b[0m\n",
      "\u001b[32m2024-12-05 15:10:15.805\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m32\u001b[0m - \u001b[1m10 epoch: eval --loss:0.11827716714007751,--acc:0.9533730158730159\u001b[0m\n",
      "\u001b[32m2024-12-05 15:11:47.702\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m17\u001b[0m - \u001b[1m11 epoch: train --loss:0.11683155421447174,--acc:0.9528197158081705\u001b[0m\n",
      "\u001b[32m2024-12-05 15:11:53.204\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m32\u001b[0m - \u001b[1m11 epoch: eval --loss:0.0861427189471821,--acc:0.9613095238095238\u001b[0m\n",
      "\u001b[32m2024-12-05 15:13:24.603\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m17\u001b[0m - \u001b[1m12 epoch: train --loss:0.09503267550047778,--acc:0.9587033747779752\u001b[0m\n",
      "\u001b[32m2024-12-05 15:13:30.104\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m32\u001b[0m - \u001b[1m12 epoch: eval --loss:0.06663455673131263,--acc:0.9702380952380952\u001b[0m\n"
     ]
    }
   ],
   "source": [
    "for i in range(epochs):\n",
    "    loss, current, n = 0.0, 0.0, 0\n",
    "    mln.train()\n",
    "    for batch,(img,label) in enumerate(train_loader):\n",
    "        output = mln(img)\n",
    "        mini_loss = loss_fn(output,label)\n",
    "        _, pred = torch.max(output, axis=1)\n",
    "        cur_acc = torch.sum(label == pred) / output.shape[0]\n",
    "        optimizer.zero_grad()\n",
    "        mini_loss.backward()\n",
    "        optimizer.step()\n",
    "        loss += mini_loss.item()\n",
    "        current += cur_acc.item()\n",
    "        n = n + 1\n",
    "    train_loss = loss / n\n",
    "    train_acc = current / n\n",
    "    logger.info(\"{} epoch: train --loss:{},--acc:{}\",i+1,train_loss,train_acc)\n",
    "\n",
    "    mln.eval()\n",
    "    loss, current, n = 0.0, 0.0, 0\n",
    "    with torch.no_grad():\n",
    "        for batch,(img,label) in enumerate(test_loader):\n",
    "            output = mln(img)\n",
    "            mini_loss = loss_fn(output,label)\n",
    "            _, pred = torch.max(output, axis=1)\n",
    "            cur_acc = torch.sum(label == pred) / output.shape[0]\n",
    "            loss += mini_loss.item()\n",
    "            current += cur_acc.item()\n",
    "            n = n + 1\n",
    "        val_loss = loss / n\n",
    "        val_acc = current / n\n",
    "        logger.info(\"{} epoch: eval --loss:{},--acc:{}\",i+1,val_loss,val_acc)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "dcd4c0fa-6803-451d-8a3c-a742158cdbec",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\u001b[32m2024-12-05 15:17:40.540\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m<module>\u001b[0m:\u001b[36m3\u001b[0m - \u001b[1mtrain over, test acc:0.9702380952380952,train acc:0.9587033747779752\u001b[0m\n"
     ]
    }
   ],
   "source": [
    "os.makedirs(\"models_weights\")\n",
    "torch.save(mln.state_dict(),os.path.join(\"models_weights\",\"best.pth\"))\n",
    "logger.info(\"train over, test acc:{},train acc:{}\",val_acc,train_acc)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "id": "844e00b4-b858-4b02-9dd3-cecfd53bcd9f",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\u001b[32m2024-12-05 15:19:47.791\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m__init__\u001b[0m:\u001b[36m14\u001b[0m - \u001b[1mpic num:10000\u001b[0m\n",
      "\u001b[32m2024-12-05 15:19:47.792\u001b[0m | \u001b[1mINFO    \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36m__init__\u001b[0m:\u001b[36m24\u001b[0m - \u001b[1mtestset num:1000\u001b[0m\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "<matplotlib.image.AxesImage at 0x7f2d0e135518>"
      ]
     },
     "execution_count": 19,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPwAAAD6CAYAAACF8ip6AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAAEaBJREFUeJzt3V2MVGWaB/D/X6RBG2kaKBtbk+FDcAc/GodORJaJbLJkMmZumE2WSZzJJpqQrIk3ezNr3Bsns3uxF84mJjIh624MmXVCNnEz2dUYXO0Ms8jGwigfChFGGoYPaUBoG/nm2Ysulp6m3+eUp+qcqub5/xJiwVNd79un6++prqfe99DMICIx3NbqCYhIeRR4kUAUeJFAFHiRQBR4kUAUeJFAFHiRQBR4kUAUeJFAbi96gLlz59r8+fOLHkYktB07dpw0s0rW/XIHnuSrAL4N4E0z+3nqfvPnz0e1Ws07jIjUgeRgPffL9ZKe5A8BTDGzlQB6SS7O8zgiUq68v8OvBrC5dvtdAKuaMhsRKVTewHcCOFK7PQygZ2yR5HqSVZLVoaGhRuYnIk2UN/AjAO6o3Z4x/nHMbKOZ9ZtZf6WS+T6CiJQkb+B34MbL+D4AB5syGxEpVN536f8DwFaSvQC+D2BF86YkIkXJdYY3s2GMvnG3HcCfmdnZZk5KRIqRuw9vZl/ixjv1IjIJ6KO1IoEo8CKBKPAigSjwIoEo8CKBKPAigSjwIoEo8CKBKPAigSjwIoEo8CKBKPAigSjwIoEo8CKBKPAigSjwIoEo8CKBKPAigSjwIoEo8CKBKPAigSjwIoEo8CKBKPAigSjwIoEo8CKBKPAigXzjwJO8neQhkgO1Pw8XMTERab48F5N8BMDrZvbTZk9GRIqV5yX9CgBrSf6O5K9I5r4CrYiUK0/gPwDwhJmtAnAGwJPj70ByPckqyerQ0FCjcxSRJskT+J1mdqx2ey+AxePvYGYbzazfzPorlUpDExSR5skT+E0k+0hOAbAWwMdNnpOIFCTP798/A/BvAAjgN2b2TnOnJCJF+caBN7PdGH2nXkQmGX3wRiQQBV4kEAVeJBAFXiQQBV4kEAVeJBAFXiQQBV4kEAVeJBAFXiQQBV4kEAVeJBAFXiSQcNtTXb161a2bWbJGMlmbMmVK7jldunQp13ymTZuWe8zLly8na973cttt/jniypUrydq1a9eStalTpyZr3nEH/GPkjel9L1ljTlY6w4sEosCLBKLAiwSiwIsEosCLBKLAiwRyS7bl8rbWgPztNa+1dvvt/mH2WkcXL15M1rzWWtb3mdWeTMk6Ph0dHblqjSiqXXor0hleJBAFXiQQBV4kEAVeJBAFXiQQBV4kEAVeJJC27sN7/fS8vfas5Z15efM5e/as+7VDQ0PJ2hdffJGsjYyMZE8swftsgFebOXOm+7j33HNPstbT05Osef3yrM8MtOLnPVnVdTRI9pDcWrs9leR/ktxG8ulipycizZQZeJLdAF4D0Fn7p+cAVM1sJYAfkLyrwPmJSBPVc4a/CmAdgOHa31cD2Fy7vQ1A//gvILmeZJVk1Xu5KiLlygy8mQ2b2dhfQjsBHKndHgZw0y9mZrbRzPrNrL9SqTRnpiLSsDzvaIwAuKN2e0bOxxCRFsgT1h0AVtVu9wE42LTZiEih8rTlXgPwJsnvAlgK4H+bO6UbvHaL1wZrhPe4XnvIW8a6d+9ed8w9e/Yka4ODg8nakSNHkrWslp23421nZ2ey9sADD7iPu3Tp0mTNWwY8a9asZM3b0RbIv3uvt8Nu1pLmyaruM7yZra79dxDAGgD/A+DPzSzfwmoRKV2u/42Z2VHceKdeRCYJveEmEogCLxKIAi8SiAIvEkgpvYdUO6uRHUW9ll0jFwL0dp89f/58sua1yD766CN3zH379uUa01tJd+jQIXdMb8dbr811+vRp93G9lYEXLlxI1hYsWJCs9fb2umPmbcvdqheM9OgMLxKIAi8SiAIvEogCLxKIAi8SiAIvEkjhbTkzS66SascL/eVdEXf8+PFkbXh4OFkDgNmzZydr8+bNS9YWLVqUrO3fv98d89SpU8ma1wrMasvt2rUrWTt58mSy5q3CW758uTumt5quq6srWYu4wWW871gkMAVeJBAFXiQQBV4kEAVeJBAFXiQQBV4kkFKWxxaxDNHbAdXrr2bNxftsgPe13i6nXi8dALq7u5O1hQsXJmve5wKWLVvmjuktY/WuFvTJJ5+4j+vtsvvBBx8ka0ePHk3Wsvrl3vGbPn16spZ3We1kpjO8SCAKvEggCrxIIAq8SCAKvEggCrxIIIW35UiW3pZrZEdbry3ntd5mzpyZrN17773umHPmzMlV83bY7enpccf0Lpp57NixZO2uu+5yH/fcuXPJmndRTW9H2yVLlrhjnjlzJlnzljtHVNcZnmQPya212/eS/APJgdqfSrFTFJFmyTzDk+zG6CWir19D+DEAf29mG4qcmIg0Xz1n+KsA1gG4vm3LCgDPknyf5C8Km5mINF1m4M1s2MzGfg7zLQArzexxAEtIPjL+a0iuJ1klWfU+piki5crzLv02M/uqdnsvgMXj72BmG82s38z6KxX9ii/SLvIE/m2S95C8E8D3AOxu8pxEpCB52nIvAngPwCUAvzSz9JUQa1KtLu+ChoDfQvN2KvVaTlm81pvXkvJWxM2YMcMd09vV1tt91lst57W5sr72ypUryZrXsgOAL7/8MlnzVr15x8DbERjwW29e+9b7Pr3nwWRW93dlZqtr/30PwJ8UNSERKY4+aScSiAIvEogCLxKIAi8SiAIvEogCLxJIS5uNjSxV9TTSX/Xm5PV7vR794cOH3TE//PDDZM3rw3tXgPWWzgJ+f9qrebvdAsCBAweSNa9H7/2ssz5T4P28Pe149eKi6QwvEogCLxKIAi8SiAIvEogCLxKIAi8SSCltudRy1ay2XN7dbr3lsVlLZ7261+o6cuRIspZ1Acbdu9NbCng7BnntqKzv01uOeuLEiWTN2yEW8Nt23s/Ta5FlLaP26t5xKGI35XanM7xIIAq8SCAKvEggCrxIIAq8SCAKvEgghbflzCy5yqyotoi3Is7bORXwd3P1Vntt3749Wctqy3mr8B5++OFkrbe3N1mbPn26O6a3E6x30ceslX/eMTp//nyuWtaqtkbasNHoDC8SiAIvEogCLxKIAi8SiAIvEogCLxJI4W05kpmtsGZrZDxvtdfBgweTtT179uR6TABYsGBBsvbYY48la/fdd1+y1tnZ6Y7ptcH6+vqSNW9VIAB8+umnydpnn32WrB06dChZ6+jocMf02rt5W3a36kq6zGSQ7CL5FsktJN8g2UHyVZLbSP5dGZMUkeao51T4FICXzGwNgOMAfgRgipmtBNBLcnGRExSR5sl8SW9mr4z5awXAjwH8U+3v7wJYBSD9Wk1E2kbdv+ySfBxAN4DDAK7/IjcMoGeC+64nWSVZ9XZsEZFy1RV4krMBvAzgaQAjAO6olWZM9BhmttHM+s2sv1KpNGuuItKget606wCwGcDzZjYIYAdGX8YDQB+Ag4XNTkSaqp4z/DMAlgN4geQAAAL4CcmXAPwlgP8qbnoi0kz1vGm3AcCGsf9G8jcA1gD4RzPzm8xorC+e4u3Y6vVQs/qrX331VbLmLSkdHBxM1rwLTQLAokWLkjVveay3BDZrSWlPz01vvfy/7u7uZO3uu+92H9fbDdc7Ro38zNRrr1+uD96Y2ZcYfZkvIpOIPlorEogCLxKIAi8SiAIvEogCLxJIW19MstnjAdntKq/uzffatWvJmncRSsDftbaIpZ9ZvPlkXUzy6NGjuWpeO9SbT1Y97wU3b9WWnc7wIoEo8CKBKPAigSjwIoEo8CKBKPAigbT0YpLeRR+zFLUTrrfba1dXV7LmrT47efKkO+bnn3+erHk7vc6ZMydZmzVrljum18o6depUsrZ//373cQ8cOJCseTveehfxzGrLea03teX+mM7wIoEo8CKBKPAigSjwIoEo8CKBKPAigZRyMclG2m8pjayI80ybNi1ZW7w4fVWt+++/P1nzVoIBwMDAQLLmXcDyoYceStayNpucMWNGsuZtupnVYvRaet5KO+/nefr0aXfM3bt3J2vez/PRRx9N1mbOnOmO6V3gMm9LL2uFYzNahTrDiwSiwIsEosCLBKLAiwSiwIsEosCLBKLAiwSS2SAn2QXg17X7jgBYB2A/gN/X7vKcme3yHqOIXWuLWr7o9VfvvPPOZG3evHnJWtbFJL0lsDt37kzWDh8+nKx5c82a0+zZs5O1EydOuI/79ddfJ2veZwPOnz+frB07dswds1qtJmveZ0Dmzp2brC1dutQdc7Kq5wz/FICXzGwNgOMA/hbA62a2uvbHDbuItI96Lhf9ypi/VgAcBrCW5J8CGATwV2aW3mVARNpG3b/Dk3wcQDeALQCeMLNVAM4AeHKC+64nWSVZHRoaatpkRaQxdQWe5GwALwN4GsBOM7v+S9VeADd9wNzMNppZv5n1VyqVpk1WRBqTGXiSHQA2A3jezAYBbCLZR3IKgLUAPi54jiLSJPWc4Z8BsBzACyQHAOwBsAnARwDeN7N3ipueiDRTPW/abQCwYdw/v/hNBkldaLGRZaxF7VrrLaf0lkwuW7YsWctaHnv27NlkzWu9ee+PXL582R3Ta9s1shuut4TYawV63+e+ffvcMb22ptcuHR4eTtaylqp6Fw/N+7wuY6dcffBGJBAFXiQQBV4kEAVeJBAFXiQQBV4kkFIuJpnV4ihizJSs1odXnz59erLmtX8efPBBd0yvxeOtFPN2iM1qBXqryLz22fz5893H9b7Wq+3alV6D5X2fADAyMpKsee3JS5cuJWtlP2fLojO8SCAKvEggCrxIIAq8SCAKvEggCrxIIKVcTLKMVUBjNdJS8ebqrdDr7OxM1hYtWuSO2d3dnax5mzt6G0aeO3fOHTNv+7Gnpyf343qtQG/MrJ+nt+pt4cKFyZq3KvBWpTO8SCAKvEggCrxIIAq8SCAKvEggCrxIIAq8SCCF9+GBxnanbbasnm7epbXebrfexRmB7ItNpnjLaq9evep+rfeZgqlTpyZrXr8cAC5evOjWU7z5escWAC5cuJCsdXV1JWu9vb3JWtmfHSmLzvAigSjwIoEo8CKBKPAigSjwIoEo8CKBsOjdOUkOARgc809zAZwsdNBvRvPJ1m5z0nxu9i0zq2TdqfDA3zQgWTWz/lIHdWg+2dptTppPfnpJLxKIAi8SSCsCv7EFY3o0n2ztNifNJ6fSf4cXkdbRS3qRQMIGnuTtJA+RHKj9ebjVc2onJHtIbq3dvpfkH8Ycq8z2z62KZBfJt0huIfkGyY7J9Dwq9SU9yVcBfBvAm2b289IGnngu3wGwzsx+2sp51ObSA+Dfzey7JKcCeAPAbAD/bGb/0oL5dAN4HcDdZvYdkj8E0GNmG1owly4Av8boUu4RAOsAbECLnkcknwXwmZltIbkBwDEAne3wPKpHaWf42pNmipmtBNBLcnFZYyesALCW5O9I/opkKXsDjFcL12sArm9s/xyAau04/YBkvsXyjbmK0WBd3/B9BYBnSb5P8hclz+UpAC+Z2RoAxwH8CC18HpnZK2a2pfbXCoAraIPnUb3KfEm/GsDm2u13AawqceyJfADgCTNbBeAMgCdbNI/x4VqNG8dpG4DSP9BhZsNmdnbMP70FYKWZPQ5gCclHSpzL+ID9GG3wPCL5OIBuAFvQHs+jupT5f6NOAEdqt4cB3F/i2BPZaWbXt2fZC6AlrzjMbBj4ox1Wxh8n/1Iv5dg2wbHaWeYExgTsIFr8PCI5G8DLAP4CwPF2eB7Vq8wz/AiAO2q3Z5Q89kQ2kewjOQXAWgAft3g+17XbcQKAt0neQ/JOAN8DsLvMwccE7Gm0+PiQ7MDoK4znzWwQ7fs8mlCZB2sHbrz86sPo/6lb6WcANgH4CMD7ZvZOi+dzXbsdJwB4EcB7ALYD+KWZ7Str4AkC1urj8wyA5QBeIDkAYA/a83k0odLepSc5E8BWAP8N4PsAVoz7PTE0kgNmtprktwC8CeAdACsxepz8HSlvYST/GsA/4MaZ818B/A30PMql7LZcN4A1AH5rZsdLG3iSIdmL0bPY23oy30zPo/z00VqRQNrhDSERKYkCLxKIAi8SiAIvEogCLxLI/wGd/oLRPNPFTQAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "from torchvision.transforms import ToPILImage\n",
    "show = ToPILImage() \n",
    "lenet_transform = transforms.Compose([\n",
    "    transforms.ToTensor()\n",
    "]) \n",
    "test_set = NumAlphabetDataset(\"./datasets\",lenet_transform,\"test\")\n",
    "\n",
    "pic = random.choice(test_set)\n",
    "plt.imshow(show(pic[0]))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "5989cae8-253a-4273-b8e4-b38b238305b7",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "For image ,your answer is:  a\n"
     ]
    }
   ],
   "source": [
    "num_table = make_alpha_num()\n",
    "model = LeNetConv()\n",
    "model.load_state_dict(torch.load(\"./models_weights/best.pth\"))\n",
    "model.eval()\n",
    "with torch.no_grad():\n",
    "    my_pic = pic[0].unsqueeze(0)\n",
    "    output = model(my_pic)\n",
    "    _, pred = torch.max(output, axis=1)\n",
    "    answer = num_table[pred]\n",
    "    print(\"For image ,your answer is: \",answer)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "85bb3dec-a6ee-47a0-924f-dad9aca2bbfa",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Pytorch-1.0.0",
   "language": "python",
   "name": "pytorch-1.0.0"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.4"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
