{
  "cells": [
    {
      "cell_type": "code",
      "execution_count": 2,
      "metadata": {
        "id": "fXWlZ_jkmyR5"
      },
      "outputs": [],
      "source": [
        "# Based on https://github.com/blakechi/ComVEX/blob/master/comvex/coatnet/model.py\n",
        "\n",
        "import torch.nn as nn\n",
        "import torch.nn.functional as F\n",
        "from MBConv import MBConvForRelativeAttention\n",
        "from Transformer import TransformerWithRelativeAttention, ProjectionHead\n",
        "\n",
        "configs = {\n",
        "    'coatnet-0': {\n",
        "        'num_blocks': [2, 2, 3, 5, 2],\n",
        "        'num_channels': [64, 96, 192, 384, 768],\n",
        "        'expand_ratio': [4, 4, 4, 4, 4],\n",
        "        'n_head': 64,\n",
        "        'block_types': ['C', 'C', 'T', 'T']\n",
        "    },\n",
        "    'coatnet-1': {\n",
        "        'num_blocks': [2, 2, 6, 14, 2],\n",
        "        'num_channels': [64, 96, 192, 384, 768],\n",
        "        'expand_ratio': [4, 4, 4, 4, 4],\n",
        "        'n_head': 32,\n",
        "        'block_types': ['C', 'C', 'T', 'T']\n",
        "    },\n",
        "    'coatnet-2': {\n",
        "        'num_blocks': [2, 2, 6, 14, 2],\n",
        "        'num_channels': [128, 128, 256, 512, 1026],\n",
        "        'expand_ratio': [4, 4, 4, 4, 4],\n",
        "        'n_head': 32,\n",
        "        'block_types': ['C', 'C', 'T', 'T']\n",
        "    },\n",
        "    'coatnet-3': {\n",
        "        'num_blocks': [2, 2, 6, 14, 2],\n",
        "        'num_channels': [192, 192, 384, 768, 1536],\n",
        "        'expand_ratio': [4, 4, 4, 4, 4],\n",
        "        'n_head': 32,\n",
        "        'block_types': ['C', 'C', 'T', 'T']\n",
        "    },\n",
        "    'coatnet-4': {\n",
        "        'num_blocks': [2, 2, 12, 28, 2],\n",
        "        'num_channels': [192, 192, 384, 768, 1536],\n",
        "        'expand_ratio': [4, 4, 4, 4, 4],\n",
        "        'n_head': 32,\n",
        "        'block_types': ['C', 'C', 'T', 'T']\n",
        "    },\n",
        "    'coatnet-5': {\n",
        "        'num_blocks': [2, 2, 12, 28, 2],\n",
        "        'num_channels': [192, 256, 512, 1280, 2048],\n",
        "        'expand_ratio': [4, 4, 4, 4, 4],\n",
        "        'n_head': 64,\n",
        "        'block_types': ['C', 'C', 'T', 'T']\n",
        "    },\n",
        "    # Something's not right with this one\n",
        "    # 'coatnet-6': {\n",
        "    #     'num_blocks': [2, 2, 4, [8, 42], 2],\n",
        "    #     'num_channels': [192, 192, 384, [768, 1536], 2048],\n",
        "    #     'expand_ratio': [4, 4, 4, 4, 4],\n",
        "    #     'n_head': 128,\n",
        "    #     'block_types': ['C', 'C', 'C-T', 'T']\n",
        "    # },\n",
        "    # 'coatnet-7': {\n",
        "    #     'num_blocks': [2, 2, 4, [8, 42], 2],\n",
        "    #     'num_channels': [192, 256, 512, [1024, 2048], 3072],\n",
        "    #     'expand_ratio': [4, 4, 4, 4, 4],\n",
        "    #     'n_head': 128,\n",
        "    #     'block_types': ['C', 'C', 'C-T', 'T']\n",
        "    # }\n",
        "}\n",
        "\n",
        "blocks = {\n",
        "    'C': MBConvForRelativeAttention,\n",
        "    'T': TransformerWithRelativeAttention\n",
        "}\n",
        "\n",
        "\n",
        "class CoAtNet(nn.Module):\n",
        "    def __init__(self, inp_h, inp_w, in_channels, config='coatnet-0', num_classes=None, head_act_fn='mish',\n",
        "                 head_dropout=0.1):\n",
        "        super().__init__()\n",
        "        self.config = configs[config]\n",
        "        block_types = self.config['block_types']\n",
        "        self.s0 = self._make_stem(in_channels)\n",
        "        self.s1 = self._make_block(block_types[0], inp_h >> 2, inp_w >> 2,\n",
        "                                   self.config['num_channels'][0],\n",
        "                                   self.config['num_channels'][1],\n",
        "                                   self.config['num_blocks'][1],\n",
        "                                   self.config['expand_ratio'][0])\n",
        "        self.s2 = self._make_block(block_types[1], inp_h >> 3, inp_w >> 3,\n",
        "                                   self.config['num_channels'][1],\n",
        "                                   self.config['num_channels'][2],\n",
        "                                   self.config['num_blocks'][2],\n",
        "                                   self.config['expand_ratio'][1])\n",
        "        self.s3 = self._make_block(block_types[2], inp_h >> 4, inp_w >> 4,\n",
        "                                   self.config['num_channels'][2],\n",
        "                                   self.config['num_channels'][3],\n",
        "                                   self.config['num_blocks'][3],\n",
        "                                   self.config['expand_ratio'][2])\n",
        "        self.s4 = self._make_block(block_types[3], inp_h >> 5, inp_w >> 5,\n",
        "                                   self.config['num_channels'][3],\n",
        "                                   self.config['num_channels'][4],\n",
        "                                   self.config['num_blocks'][4],\n",
        "                                   self.config['expand_ratio'][3])\n",
        "        self.include_head = num_classes is not None\n",
        "        if self.include_head:\n",
        "            if isinstance(num_classes, int):\n",
        "                self.single_head = True\n",
        "                num_classes = [num_classes]\n",
        "            else:\n",
        "                self.single_head = False\n",
        "            self.heads = nn.ModuleList(\n",
        "                [ProjectionHead(self.config['num_channels'][-1], nc, act_fn=head_act_fn, ff_dropout=head_dropout) for nc\n",
        "                 in num_classes])\n",
        "\n",
        "    def _make_stem(self, in_channels):\n",
        "        return nn.Sequential(*[\n",
        "            nn.Conv2d(\n",
        "                in_channels if i == 0 else self.config['num_channels'][0],\n",
        "                self.config['num_channels'][0], kernel_size=3, padding=1,\n",
        "                stride=2 if i == 0 else 1\n",
        "            ) for i in range(self.config['num_blocks'][0])\n",
        "        ])\n",
        "\n",
        "    # 'coatnet-0': {\n",
        "    #     'num_blocks': [2, 2, 3, 5, 2],\n",
        "    #     'num_channels': [64, 96, 192, 384, 768],\n",
        "    #     'expand_ratio': [4, 4, 4, 4, 4],\n",
        "    #     'n_head': 32,\n",
        "    #     'block_types': ['C', 'C', 'T', 'T']\n",
        "    # },\n",
        "\n",
        "    def _make_block(self, block_type, inp_h, inp_w, in_channels, out_channels, depth, expand_ratio):\n",
        "        block_list = []\n",
        "        if not isinstance(in_channels, int):\n",
        "            in_channels = in_channels[-1]\n",
        "        if block_type in blocks:\n",
        "            block_cls = blocks[block_type]\n",
        "            block_list.extend([\n",
        "                block_cls(\n",
        "                    inp_h, inp_w, in_channels if i == 0 else out_channels,\n",
        "                    n_head=self.config['n_head'], out_channels=out_channels,\n",
        "                    expand_ratio=expand_ratio, use_downsampling=(i == 0)\n",
        "                ) for i in range(depth)\n",
        "            ])\n",
        "        else:\n",
        "            for i, _block_type in enumerate(block_type.split('-')):\n",
        "                block_cls = blocks[_block_type]\n",
        "                block_list.extend(\n",
        "                    block_cls(\n",
        "                        inp_h, inp_w,\n",
        "                        in_channels if i == 0 and j == 0 else out_channels[i - 1] if j == 0 else out_channels[i],\n",
        "                        n_head=self.config['n_head'], out_channels=out_channels[i],\n",
        "                        expand_ratio=expand_ratio, use_downsampling=j == 0\n",
        "                    ) for j in range(depth[i])\n",
        "                )\n",
        "        return nn.Sequential(*block_list)\n",
        "\n",
        "    def forward(self, x):\n",
        "        x = self.s0(x)\n",
        "        x = self.s1(x)\n",
        "        x = self.s2(x)\n",
        "        x = self.s3(x)\n",
        "        x = self.s4(x)\n",
        "        x = F.adaptive_avg_pool2d(x, 1).view(x.size(0), -1)\n",
        "        if self.include_head:\n",
        "            if self.single_head:\n",
        "                return self.heads[0](x)\n",
        "            return [head(x) for head in self.heads]\n",
        "        return x\n",
        "\n",
        "\n",
        "\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 5,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "uvOs_0Y3Hynl",
        "outputId": "80de780b-eb73-4dbc-ff37-7be1f4ebc9ac"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Collecting thop\n",
            "  Downloading thop-0.1.1.post2209072238-py3-none-any.whl.metadata (2.7 kB)\n",
            "Requirement already satisfied: torch in /usr/local/lib/python3.11/dist-packages (from thop) (2.6.0+cu124)\n",
            "Requirement already satisfied: filelock in /usr/local/lib/python3.11/dist-packages (from torch->thop) (3.18.0)\n",
            "Requirement already satisfied: typing-extensions>=4.10.0 in /usr/local/lib/python3.11/dist-packages (from torch->thop) (4.13.2)\n",
            "Requirement already satisfied: networkx in /usr/local/lib/python3.11/dist-packages (from torch->thop) (3.4.2)\n",
            "Requirement already satisfied: jinja2 in /usr/local/lib/python3.11/dist-packages (from torch->thop) (3.1.6)\n",
            "Requirement already satisfied: fsspec in /usr/local/lib/python3.11/dist-packages (from torch->thop) (2025.3.2)\n",
            "Collecting nvidia-cuda-nvrtc-cu12==12.4.127 (from torch->thop)\n",
            "  Downloading nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl.metadata (1.5 kB)\n",
            "Collecting nvidia-cuda-runtime-cu12==12.4.127 (from torch->thop)\n",
            "  Downloading nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl.metadata (1.5 kB)\n",
            "Collecting nvidia-cuda-cupti-cu12==12.4.127 (from torch->thop)\n",
            "  Downloading nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl.metadata (1.6 kB)\n",
            "Collecting nvidia-cudnn-cu12==9.1.0.70 (from torch->thop)\n",
            "  Downloading nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl.metadata (1.6 kB)\n",
            "Collecting nvidia-cublas-cu12==12.4.5.8 (from torch->thop)\n",
            "  Downloading nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_x86_64.whl.metadata (1.5 kB)\n",
            "Collecting nvidia-cufft-cu12==11.2.1.3 (from torch->thop)\n",
            "  Downloading nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_x86_64.whl.metadata (1.5 kB)\n",
            "Collecting nvidia-curand-cu12==10.3.5.147 (from torch->thop)\n",
            "  Downloading nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_x86_64.whl.metadata (1.5 kB)\n",
            "Collecting nvidia-cusolver-cu12==11.6.1.9 (from torch->thop)\n",
            "  Downloading nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_x86_64.whl.metadata (1.6 kB)\n",
            "Collecting nvidia-cusparse-cu12==12.3.1.170 (from torch->thop)\n",
            "  Downloading nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_x86_64.whl.metadata (1.6 kB)\n",
            "Requirement already satisfied: nvidia-cusparselt-cu12==0.6.2 in /usr/local/lib/python3.11/dist-packages (from torch->thop) (0.6.2)\n",
            "Requirement already satisfied: nvidia-nccl-cu12==2.21.5 in /usr/local/lib/python3.11/dist-packages (from torch->thop) (2.21.5)\n",
            "Requirement already satisfied: nvidia-nvtx-cu12==12.4.127 in /usr/local/lib/python3.11/dist-packages (from torch->thop) (12.4.127)\n",
            "Collecting nvidia-nvjitlink-cu12==12.4.127 (from torch->thop)\n",
            "  Downloading nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl.metadata (1.5 kB)\n",
            "Requirement already satisfied: triton==3.2.0 in /usr/local/lib/python3.11/dist-packages (from torch->thop) (3.2.0)\n",
            "Requirement already satisfied: sympy==1.13.1 in /usr/local/lib/python3.11/dist-packages (from torch->thop) (1.13.1)\n",
            "Requirement already satisfied: mpmath<1.4,>=1.1.0 in /usr/local/lib/python3.11/dist-packages (from sympy==1.13.1->torch->thop) (1.3.0)\n",
            "Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.11/dist-packages (from jinja2->torch->thop) (3.0.2)\n",
            "Downloading thop-0.1.1.post2209072238-py3-none-any.whl (15 kB)\n",
            "Downloading nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_x86_64.whl (363.4 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m363.4/363.4 MB\u001b[0m \u001b[31m4.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl (13.8 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m13.8/13.8 MB\u001b[0m \u001b[31m41.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl (24.6 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m24.6/24.6 MB\u001b[0m \u001b[31m73.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl (883 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m883.7/883.7 kB\u001b[0m \u001b[31m40.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl (664.8 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m664.8/664.8 MB\u001b[0m \u001b[31m1.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_x86_64.whl (211.5 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m211.5/211.5 MB\u001b[0m \u001b[31m5.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_x86_64.whl (56.3 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m56.3/56.3 MB\u001b[0m \u001b[31m15.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_x86_64.whl (127.9 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m127.9/127.9 MB\u001b[0m \u001b[31m7.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_x86_64.whl (207.5 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m207.5/207.5 MB\u001b[0m \u001b[31m6.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl (21.1 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m21.1/21.1 MB\u001b[0m \u001b[31m86.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hInstalling collected packages: nvidia-nvjitlink-cu12, nvidia-curand-cu12, nvidia-cufft-cu12, nvidia-cuda-runtime-cu12, nvidia-cuda-nvrtc-cu12, nvidia-cuda-cupti-cu12, nvidia-cublas-cu12, nvidia-cusparse-cu12, nvidia-cudnn-cu12, nvidia-cusolver-cu12, thop\n",
            "  Attempting uninstall: nvidia-nvjitlink-cu12\n",
            "    Found existing installation: nvidia-nvjitlink-cu12 12.5.82\n",
            "    Uninstalling nvidia-nvjitlink-cu12-12.5.82:\n",
            "      Successfully uninstalled nvidia-nvjitlink-cu12-12.5.82\n",
            "  Attempting uninstall: nvidia-curand-cu12\n",
            "    Found existing installation: nvidia-curand-cu12 10.3.6.82\n",
            "    Uninstalling nvidia-curand-cu12-10.3.6.82:\n",
            "      Successfully uninstalled nvidia-curand-cu12-10.3.6.82\n",
            "  Attempting uninstall: nvidia-cufft-cu12\n",
            "    Found existing installation: nvidia-cufft-cu12 11.2.3.61\n",
            "    Uninstalling nvidia-cufft-cu12-11.2.3.61:\n",
            "      Successfully uninstalled nvidia-cufft-cu12-11.2.3.61\n",
            "  Attempting uninstall: nvidia-cuda-runtime-cu12\n",
            "    Found existing installation: nvidia-cuda-runtime-cu12 12.5.82\n",
            "    Uninstalling nvidia-cuda-runtime-cu12-12.5.82:\n",
            "      Successfully uninstalled nvidia-cuda-runtime-cu12-12.5.82\n",
            "  Attempting uninstall: nvidia-cuda-nvrtc-cu12\n",
            "    Found existing installation: nvidia-cuda-nvrtc-cu12 12.5.82\n",
            "    Uninstalling nvidia-cuda-nvrtc-cu12-12.5.82:\n",
            "      Successfully uninstalled nvidia-cuda-nvrtc-cu12-12.5.82\n",
            "  Attempting uninstall: nvidia-cuda-cupti-cu12\n",
            "    Found existing installation: nvidia-cuda-cupti-cu12 12.5.82\n",
            "    Uninstalling nvidia-cuda-cupti-cu12-12.5.82:\n",
            "      Successfully uninstalled nvidia-cuda-cupti-cu12-12.5.82\n",
            "  Attempting uninstall: nvidia-cublas-cu12\n",
            "    Found existing installation: nvidia-cublas-cu12 12.5.3.2\n",
            "    Uninstalling nvidia-cublas-cu12-12.5.3.2:\n",
            "      Successfully uninstalled nvidia-cublas-cu12-12.5.3.2\n",
            "  Attempting uninstall: nvidia-cusparse-cu12\n",
            "    Found existing installation: nvidia-cusparse-cu12 12.5.1.3\n",
            "    Uninstalling nvidia-cusparse-cu12-12.5.1.3:\n",
            "      Successfully uninstalled nvidia-cusparse-cu12-12.5.1.3\n",
            "  Attempting uninstall: nvidia-cudnn-cu12\n",
            "    Found existing installation: nvidia-cudnn-cu12 9.3.0.75\n",
            "    Uninstalling nvidia-cudnn-cu12-9.3.0.75:\n",
            "      Successfully uninstalled nvidia-cudnn-cu12-9.3.0.75\n",
            "  Attempting uninstall: nvidia-cusolver-cu12\n",
            "    Found existing installation: nvidia-cusolver-cu12 11.6.3.83\n",
            "    Uninstalling nvidia-cusolver-cu12-11.6.3.83:\n",
            "      Successfully uninstalled nvidia-cusolver-cu12-11.6.3.83\n",
            "Successfully installed nvidia-cublas-cu12-12.4.5.8 nvidia-cuda-cupti-cu12-12.4.127 nvidia-cuda-nvrtc-cu12-12.4.127 nvidia-cuda-runtime-cu12-12.4.127 nvidia-cudnn-cu12-9.1.0.70 nvidia-cufft-cu12-11.2.1.3 nvidia-curand-cu12-10.3.5.147 nvidia-cusolver-cu12-11.6.1.9 nvidia-cusparse-cu12-12.3.1.170 nvidia-nvjitlink-cu12-12.4.127 thop-0.1.1.post2209072238\n"
          ]
        }
      ],
      "source": [
        "!pip install thop"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "9VNkI4RbHHFQ",
        "outputId": "6b51536a-0b46-43af-cc65-d15a133ab00d"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "[CoAtNet-112x112] Params: 23.039M, FLOPs: 692.163M, Avg Inference time (128 images): 20.3671s\n"
          ]
        }
      ],
      "source": [
        "import torch\n",
        "from thop import profile, clever_format\n",
        "import time\n",
        "import torch.nn.functional as F\n",
        "\n",
        "def profile_model(model, device, model_name):\n",
        "    model.eval()\n",
        "\n",
        "    # 用于 FLOPs 和 参数量分析\n",
        "    dummy_input = torch.randn(1, 3, 112, 112).to(device)\n",
        "    macs, params = profile(model, inputs=(dummy_input,), verbose=False)\n",
        "    macs, params = clever_format([macs, params], \"%.3f\")\n",
        "\n",
        "    # 用于推理时间评估\n",
        "    with torch.no_grad():\n",
        "        dummy_batch = torch.randn(128, 3, 112, 112).to(device)\n",
        "\n",
        "        total_time = 0\n",
        "        runs = 10\n",
        "        for _ in range(runs):\n",
        "            start = time.time()\n",
        "            output = model(dummy_batch)\n",
        "            # 如果输出是列表，取第一个做 dummy 操作，避免不必要开销\n",
        "            if isinstance(output, list):\n",
        "                _ = output[0]\n",
        "            else:\n",
        "                _ = output\n",
        "            end = time.time()\n",
        "            total_time += (end - start)\n",
        "\n",
        "        infer_time = total_time / runs\n",
        "\n",
        "    print(f\"[{model_name}] Params: {params}, FLOPs: {macs}, Avg Inference time (128 images): {infer_time:.4f}s\")\n",
        "    return params, macs, infer_time\n",
        "\n",
        "\n",
        "if __name__ == \"__main__\":\n",
        "    import os\n",
        "    # from model import CoAtNet  \n",
        "\n",
        "    os.environ[\"CUDA_LAUNCH_BLOCKING\"] = \"1\"  # 开启 CUDA 同步调试\n",
        "    device = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n",
        "\n",
        "    model = CoAtNet(\n",
        "        inp_h=112, inp_w=112,\n",
        "        in_channels=3,\n",
        "        config='coatnet-0',\n",
        "        num_classes=100\n",
        "    ).to(device)\n",
        "\n",
        "    try:\n",
        "        params, macs, infer_time = profile_model(model, device, \"CoAtNet-112x112\")\n",
        "    except RuntimeError as e:\n",
        "        print(\"❌ 模型分析出错（可能维度不匹配、softmax 越界等）：\")\n",
        "        print(e)\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 373
        },
        "id": "6Di_JxBqNjqL",
        "outputId": "e0c1381b-109a-4bef-f51e-fe7855faa064"
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "Finding LR:   0%|          | 2/782 [01:29<9:44:50, 44.99s/it]\n"
          ]
        },
        {
          "ename": "KeyboardInterrupt",
          "evalue": "",
          "output_type": "error",
          "traceback": [
            "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
            "\u001b[0;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
            "\u001b[0;32m<ipython-input-11-736a4936601b>\u001b[0m in \u001b[0;36m<cell line: 0>\u001b[0;34m()\u001b[0m\n\u001b[1;32m     87\u001b[0m     \u001b[0moptimizer\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0moptim\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mSGD\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mparameters\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlr\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1e-6\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmomentum\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m0.9\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mweight_decay\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m5e-4\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     88\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 89\u001b[0;31m     \u001b[0mlog_lrs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlosses\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfind_lr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrainloader\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0moptimizer\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcriterion\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdevice\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     90\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     91\u001b[0m     \u001b[0mplt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mplot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlog_lrs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlosses\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m<ipython-input-11-736a4936601b>\u001b[0m in \u001b[0;36mfind_lr\u001b[0;34m(model, trainloader, optimizer, criterion, device, init_value, final_value, beta)\u001b[0m\n\u001b[1;32m     42\u001b[0m         \u001b[0mlog_lrs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmath\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlog10\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     43\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 44\u001b[0;31m         \u001b[0mloss\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbackward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     45\u001b[0m         \u001b[0moptimizer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstep\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     46\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/_tensor.py\u001b[0m in \u001b[0;36mbackward\u001b[0;34m(self, gradient, retain_graph, create_graph, inputs)\u001b[0m\n\u001b[1;32m    624\u001b[0m                 \u001b[0minputs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0minputs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    625\u001b[0m             )\n\u001b[0;32m--> 626\u001b[0;31m         torch.autograd.backward(\n\u001b[0m\u001b[1;32m    627\u001b[0m             \u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgradient\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mretain_graph\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcreate_graph\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minputs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0minputs\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    628\u001b[0m         )\n",
            "\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/autograd/__init__.py\u001b[0m in \u001b[0;36mbackward\u001b[0;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables, inputs)\u001b[0m\n\u001b[1;32m    345\u001b[0m     \u001b[0;31m# some Python versions print out the first line of a multi-line function\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    346\u001b[0m     \u001b[0;31m# calls in the traceback and some print out the last line\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 347\u001b[0;31m     _engine_run_backward(\n\u001b[0m\u001b[1;32m    348\u001b[0m         \u001b[0mtensors\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    349\u001b[0m         \u001b[0mgrad_tensors_\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/autograd/graph.py\u001b[0m in \u001b[0;36m_engine_run_backward\u001b[0;34m(t_outputs, *args, **kwargs)\u001b[0m\n\u001b[1;32m    821\u001b[0m         \u001b[0munregister_hooks\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_register_logging_hooks_on_whole_graph\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mt_outputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    822\u001b[0m     \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 823\u001b[0;31m         return Variable._execution_engine.run_backward(  # Calls into the C++ engine to run the backward pass\n\u001b[0m\u001b[1;32m    824\u001b[0m             \u001b[0mt_outputs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    825\u001b[0m         )  # Calls into the C++ engine to run the backward pass\n",
            "\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/autograd/function.py\u001b[0m in \u001b[0;36mapply\u001b[0;34m(self, *args)\u001b[0m\n\u001b[1;32m    305\u001b[0m             )\n\u001b[1;32m    306\u001b[0m         \u001b[0muser_fn\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mvjp_fn\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mvjp_fn\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mFunction\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvjp\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0mbackward_fn\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 307\u001b[0;31m         \u001b[0;32mreturn\u001b[0m \u001b[0muser_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    308\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    309\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0mapply_jvp\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/content/activations.py\u001b[0m in \u001b[0;36mbackward\u001b[0;34m(ctx, grad_output)\u001b[0m\n\u001b[1;32m     36\u001b[0m         \u001b[0mh\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mv\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlog\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     37\u001b[0m         \u001b[0mgrad_gh\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m1.\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0mh\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcosh\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpow_\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 38\u001b[0;31m         \u001b[0mgrad_hx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mi\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msigmoid\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     39\u001b[0m         \u001b[0mgrad_gx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgrad_gh\u001b[0m \u001b[0;34m*\u001b[0m  \u001b[0mgrad_hx\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     40\u001b[0m         \u001b[0mgrad_f\u001b[0m \u001b[0;34m=\u001b[0m  \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtanh\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mF\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msoftplus\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mi\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mgrad_gx\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
          ]
        }
      ],
      "source": [
        "import matplotlib.pyplot as plt\n",
        "import math\n",
        "from tqdm import tqdm\n",
        "import torch\n",
        "import torch.nn as nn\n",
        "import torch.optim as optim\n",
        "from torch.utils.data import DataLoader\n",
        "from torchvision import datasets, transforms\n",
        "\n",
        "def find_lr(model, trainloader, optimizer, criterion, device, init_value=1e-6, final_value=10., beta=0.98):\n",
        "    num = len(trainloader) - 1\n",
        "    mult = (final_value / init_value) ** (1/num)\n",
        "    lr = init_value\n",
        "    for param_group in optimizer.param_groups:\n",
        "        param_group['lr'] = lr\n",
        "\n",
        "    avg_loss = 0.\n",
        "    best_loss = float('inf')\n",
        "    batch_num = 0\n",
        "    losses = []\n",
        "    log_lrs = []\n",
        "\n",
        "    model.train()\n",
        "    for inputs, targets in tqdm(trainloader, desc=\"Finding LR\"):\n",
        "        batch_num += 1\n",
        "        inputs, targets = inputs.to(device), targets.to(device)\n",
        "\n",
        "        optimizer.zero_grad()\n",
        "        outputs = model(inputs)\n",
        "        loss = criterion(outputs, targets)\n",
        "\n",
        "        avg_loss = beta * avg_loss + (1 - beta) * loss.item()\n",
        "        smoothed_loss = avg_loss / (1 - beta ** batch_num)\n",
        "\n",
        "        if smoothed_loss < best_loss or batch_num == 1:\n",
        "            best_loss = smoothed_loss\n",
        "\n",
        "        if smoothed_loss > 4 * best_loss:\n",
        "            break\n",
        "\n",
        "        losses.append(smoothed_loss)\n",
        "        log_lrs.append(math.log10(lr))\n",
        "\n",
        "        loss.backward()\n",
        "        optimizer.step()\n",
        "\n",
        "        lr *= mult\n",
        "        for param_group in optimizer.param_groups:\n",
        "            param_group['lr'] = lr\n",
        "\n",
        "    return log_lrs, losses\n",
        "\n",
        "if __name__ == \"__main__\":\n",
        "    device = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n",
        "\n",
        "    model = CoAtNet(\n",
        "        inp_h=112, inp_w=112,\n",
        "        in_channels=3,\n",
        "        config='coatnet-0',\n",
        "        num_classes=100\n",
        "    ).to(device)\n",
        "\n",
        "    train_transform = transforms.Compose([\n",
        "        transforms.Resize(128),  # 先放大到128x128或更大\n",
        "        transforms.RandomCrop(112, padding=4),\n",
        "        transforms.RandomHorizontalFlip(),\n",
        "        transforms.AutoAugment(policy=transforms.AutoAugmentPolicy.CIFAR10),\n",
        "        transforms.ToTensor(),\n",
        "        transforms.Normalize(mean=[0.507, 0.487, 0.441], std=[0.267, 0.256, 0.276])\n",
        "    ])\n",
        "\n",
        "\n",
        "    transform_test = transforms.Compose([\n",
        "        transforms.Resize((112, 112)),\n",
        "        transforms.ToTensor(),\n",
        "        transforms.Normalize((0.507, 0.487, 0.441), (0.267, 0.256, 0.276)),\n",
        "    ])\n",
        "\n",
        "    train_dataset = datasets.CIFAR100(root='./data', train=True, download=True, transform=train_transform)\n",
        "    test_dataset = datasets.CIFAR100(root='./data', train=False, download=True, transform=transform_test)\n",
        "\n",
        "    trainloader = DataLoader(train_dataset, batch_size=64, shuffle=True, num_workers=2)\n",
        "    testloader = DataLoader(test_dataset, batch_size=64, shuffle=False, num_workers=2)\n",
        "\n",
        "    criterion = nn.CrossEntropyLoss()\n",
        "    optimizer = optim.SGD(model.parameters(), lr=1e-6, momentum=0.9, weight_decay=5e-4)\n",
        "\n",
        "    log_lrs, losses = find_lr(model, trainloader, optimizer, criterion, device)\n",
        "\n",
        "    plt.plot(log_lrs, losses)\n",
        "    plt.xlabel(\"Log10 Learning Rate\")\n",
        "    plt.ylabel(\"Loss\")\n",
        "    plt.title(\"Learning Rate Finder\")\n",
        "    plt.grid(True)\n",
        "    plt.show()\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 8,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "nKQINCShmKns",
        "outputId": "afef70c1-a54d-43d3-835a-3bd197c69c66"
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "                                                                          "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [1/10], Loss: 3.8828\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [1/10], Loss: 3.8828, Accuracy: 15.25%\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "                                                                          "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [2/10], Loss: 3.2523\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [2/10], Loss: 3.2523, Accuracy: 25.27%\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "                                                                          "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [3/10], Loss: 2.8177\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [3/10], Loss: 2.8177, Accuracy: 33.47%\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "                                                                          "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [4/10], Loss: 2.5085\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [4/10], Loss: 2.5085, Accuracy: 38.31%\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "                                                                          "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [5/10], Loss: 2.2903\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [5/10], Loss: 2.2903, Accuracy: 44.24%\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "                                                                          "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [6/10], Loss: 2.1183\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [6/10], Loss: 2.1183, Accuracy: 44.57%\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "                                                                          "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [7/10], Loss: 1.9699\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [7/10], Loss: 1.9699, Accuracy: 49.95%\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "                                                                         "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [8/10], Loss: 1.8492\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [8/10], Loss: 1.8492, Accuracy: 51.18%\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "                                                                          "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [9/10], Loss: 1.7347\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [9/10], Loss: 1.7347, Accuracy: 52.67%\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "                                                                           "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [10/10], Loss: 1.6473\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [10/10], Loss: 1.6473, Accuracy: 54.41%\n"
          ]
        }
      ],
      "source": [
        "import os\n",
        "import pickle\n",
        "import numpy as np\n",
        "import torch\n",
        "import torch.nn as nn\n",
        "import torch.optim as optim\n",
        "from torch.utils.data import Dataset, DataLoader\n",
        "from torchvision import transforms\n",
        "import json\n",
        "from tqdm import tqdm\n",
        "from torch.utils.data import Dataset, DataLoader\n",
        "\n",
        "# CIFAR-100 本地数据读取\n",
        "class CIFAR100Local(Dataset):\n",
        "    def __init__(self, root, train=True, transform=None):\n",
        "        self.transform = transform\n",
        "        self.train = train\n",
        "        self.data = []\n",
        "        self.targets = []\n",
        "\n",
        "        file = 'train' if train else 'test'\n",
        "        path = os.path.join(root, file)\n",
        "        with open(path, 'rb') as f:\n",
        "            entry = pickle.load(f, encoding='latin1')\n",
        "            self.data = entry['data']\n",
        "            self.targets = entry['fine_labels']\n",
        "\n",
        "        self.data = np.reshape(self.data, (-1, 3, 32, 32)).transpose((0, 2, 3, 1))\n",
        "\n",
        "    def __len__(self):\n",
        "        return len(self.data)\n",
        "\n",
        "    def __getitem__(self, index):\n",
        "        img, target = self.data[index], self.targets[index]\n",
        "        img = transforms.ToPILImage()(img)\n",
        "        if self.transform is not None:\n",
        "            img = self.transform(img)\n",
        "        return img, target\n",
        "\n",
        "# 训练配置\n",
        "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
        "batch_size = 32\n",
        "epochs = 10\n",
        "learning_rate = 0.0001\n",
        "save_path = 'training_log.json'\n",
        "# 本地数据集路径（绝对路径）\n",
        "data_root = '/kaggle/input/cifar1001/cifar-100-python/cifar-100-python'\n",
        "\n",
        "\n",
        "train_transform = transforms.Compose([\n",
        "    transforms.RandomCrop(112, padding=4),  #在原图（112×112）基础上先四周填充4像素（变成120×120），然后随机裁剪出112×112的图像，制造平移偏移，提升模型鲁棒性。\n",
        "    transforms.RandomHorizontalFlip(),  #以0.5概率左右翻转图像，模拟图像在不同视角下的变化。\n",
        "    transforms.AutoAugment(policy=transforms.AutoAugmentPolicy.CIFAR10), #使用自动数据增强策略（由AutoAugment搜索得出），包括颜色增强、旋转、剪裁、亮度调整等组合策略，极大地提升模型泛化能力。\n",
        "    #为什么是 CIFAR10 策略而不是 CIFAR100？\n",
        "    #transforms.AutoAugmentPolicy.CIFAR10 是 PyTorch 中 AutoAugment 支持的增强策略之一，它虽然是基于 CIFAR-10 搜索得到的，但在 CIFAR-100、TinyImageNet 等类似分辨率的数据集上同样有效。\n",
        "    transforms.ToTensor(), #将图像从PIL格式转换为PyTorch张量，准备送入模型训练。\n",
        "    transforms.Normalize(mean=[0.507, 0.487, 0.441], std=[0.267, 0.256, 0.276])  #标准化图像像素值为 0 均值和单位方差，有助于更快收敛。\n",
        "])\n",
        "\n",
        "transform_test = transforms.Compose([\n",
        "    transforms.Resize((112, 112)),\n",
        "    transforms.ToTensor(),\n",
        "    transforms.Normalize((0.507, 0.487, 0.441), (0.267, 0.256, 0.276)),\n",
        "])\n",
        "\n",
        "train_dataset = datasets.CIFAR100(root='./data', train=True, download=True, transform=transform_train)\n",
        "test_dataset = datasets.CIFAR100(root='./data', train=False, download=True, transform=transform_test)\n",
        "\n",
        "train_loader = DataLoader(train_dataset, batch_size=64, shuffle=True, num_workers=2)\n",
        "test_loader = DataLoader(test_dataset, batch_size=64, shuffle=False, num_workers=2)\n",
        "\n",
        "\n",
        "\n",
        "# 模型定义\n",
        "model = CoAtNet(112, 112, 3, config='coatnet-0', num_classes=100)\n",
        "\n",
        "\n",
        "def replace_bn_with_ln(model):\n",
        "    for name, module in model.named_children():\n",
        "        if isinstance(module, nn.BatchNorm2d):\n",
        "            ln = nn.GroupNorm(1, module.num_features)  # 替代LN，不需permute\n",
        "            setattr(model, name, ln)\n",
        "        else:\n",
        "            replace_bn_with_ln(module)\n",
        "\n",
        "replace_bn_with_ln(model)\n",
        "model = model.to(device)\n",
        "criterion = nn.CrossEntropyLoss()\n",
        "optimizer = optim.Adam(model.parameters(), lr=learning_rate)\n",
        "# 日志记录\n",
        "history = {\"train_loss\": [], \"test_acc\": []}\n",
        "\n",
        "\n",
        "\n",
        "for epoch in range(epochs):\n",
        "    model.train()\n",
        "    running_loss = 0.0\n",
        "    progress_bar = tqdm(train_loader, desc=f\"Epoch [{epoch+1}/{epochs}]\", leave=False)\n",
        "\n",
        "    for images, labels in progress_bar:\n",
        "        images, labels = images.to(device), labels.to(device)\n",
        "        optimizer.zero_grad()\n",
        "        outputs = model(images)\n",
        "        loss = criterion(outputs, labels)\n",
        "        loss.backward()\n",
        "        optimizer.step()\n",
        "        running_loss += loss.item()\n",
        "\n",
        "        # 动态更新进度条信息\n",
        "        progress_bar.set_postfix(loss=loss.item())\n",
        "\n",
        "    avg_loss = running_loss / len(train_loader)\n",
        "    history[\"train_loss\"].append(avg_loss)\n",
        "\n",
        "    # 每个 epoch 打印一次概况\n",
        "    print(f\"Epoch [{epoch+1}/{epochs}], Loss: {avg_loss:.4f}\")\n",
        "\n",
        "\n",
        "    # 测试准确率\n",
        "    model.eval()\n",
        "    correct = total = 0\n",
        "    with torch.no_grad():\n",
        "        for images, labels in test_loader:\n",
        "            images, labels = images.to(device), labels.to(device)\n",
        "            outputs = model(images)\n",
        "            _, predicted = torch.max(outputs, 1)\n",
        "            total += labels.size(0)\n",
        "            correct += (predicted == labels).sum().item()\n",
        "    acc = 100 * correct / total\n",
        "    history[\"test_acc\"].append(acc)\n",
        "\n",
        "    print(f\"Epoch [{epoch+1}/{epochs}], Loss: {avg_loss:.4f}, Accuracy: {acc:.2f}%\")\n",
        "\n",
        "# 保存日志\n",
        "with open(save_path, 'w') as f:\n",
        "    json.dump(history, f)\n",
        "\n",
        "\n",
        "#数据增强\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "id": "Wsi0_UmCSQyS"
      },
      "outputs": [],
      "source": [
        "import torch\n",
        "import torch.nn as nn\n",
        "import torch.nn.functional as F\n",
        "from MBConv import MBConvForRelativeAttention\n",
        "from Transformer import TransformerWithRelativeAttention, ProjectionHead\n",
        "\n",
        "class CoAtNetCifar(nn.Module):\n",
        "    def __init__(self, inp_h=112, inp_w=112, in_channels=3, num_classes=100, head_act_fn='relu', head_dropout=0.1):\n",
        "        super().__init__()\n",
        "        self.stem = nn.Sequential(\n",
        "            nn.Conv2d(in_channels, 64, kernel_size=3, stride=1, padding=1),\n",
        "            nn.BatchNorm2d(64),\n",
        "            nn.ReLU()\n",
        "        )\n",
        "        self.stage1 = nn.Sequential(\n",
        "            MBConvForRelativeAttention(inp_h, inp_w, 64, 64, expand_ratio=2, use_downsampling=False),\n",
        "            MBConvForRelativeAttention(inp_h, inp_w, 64, 96, expand_ratio=2, use_downsampling=True),\n",
        "        )\n",
        "        self.stage2 = nn.Sequential(\n",
        "            MBConvForRelativeAttention(inp_h//2, inp_w//2, 96, 128, expand_ratio=2, use_downsampling=True),\n",
        "            TransformerWithRelativeAttention(inp_h//4, inp_w//4, 128, 128, n_head=4, expand_ratio=2, use_downsampling=False)\n",
        "        )\n",
        "        self.stage3 = nn.Sequential(\n",
        "            TransformerWithRelativeAttention(inp_h//4, inp_w//4, 128, 192, n_head=4, expand_ratio=2, use_downsampling=True),\n",
        "            TransformerWithRelativeAttention(inp_h//8, inp_w//8, 192, 256, n_head=4, expand_ratio=2, use_downsampling=False)\n",
        "        )\n",
        "\n",
        "        self.pool = nn.AdaptiveAvgPool2d(1)\n",
        "        self.head = ProjectionHead(256, num_classes, act_fn=head_act_fn, ff_dropout=head_dropout)\n",
        "\n",
        "    def forward(self, x):\n",
        "        x = self.stem(x)\n",
        "        x = self.stage1(x)\n",
        "        x = self.stage2(x)\n",
        "        x = self.stage3(x)\n",
        "        x = self.pool(x).view(x.size(0), -1)\n",
        "        return self.head(x)\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 3,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "u7TNF9XgT0ho",
        "outputId": "977721d5-69b5-4f30-de90-7bb9f6d4e6e1"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Requirement already satisfied: torch in /usr/local/lib/python3.11/dist-packages (2.6.0+cu124)\n",
            "Requirement already satisfied: filelock in /usr/local/lib/python3.11/dist-packages (from torch) (3.18.0)\n",
            "Requirement already satisfied: typing-extensions>=4.10.0 in /usr/local/lib/python3.11/dist-packages (from torch) (4.13.2)\n",
            "Requirement already satisfied: networkx in /usr/local/lib/python3.11/dist-packages (from torch) (3.4.2)\n",
            "Requirement already satisfied: jinja2 in /usr/local/lib/python3.11/dist-packages (from torch) (3.1.6)\n",
            "Requirement already satisfied: fsspec in /usr/local/lib/python3.11/dist-packages (from torch) (2025.3.2)\n",
            "Collecting nvidia-cuda-nvrtc-cu12==12.4.127 (from torch)\n",
            "  Downloading nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl.metadata (1.5 kB)\n",
            "Collecting nvidia-cuda-runtime-cu12==12.4.127 (from torch)\n",
            "  Downloading nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl.metadata (1.5 kB)\n",
            "Collecting nvidia-cuda-cupti-cu12==12.4.127 (from torch)\n",
            "  Downloading nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl.metadata (1.6 kB)\n",
            "Collecting nvidia-cudnn-cu12==9.1.0.70 (from torch)\n",
            "  Downloading nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl.metadata (1.6 kB)\n",
            "Collecting nvidia-cublas-cu12==12.4.5.8 (from torch)\n",
            "  Downloading nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_x86_64.whl.metadata (1.5 kB)\n",
            "Collecting nvidia-cufft-cu12==11.2.1.3 (from torch)\n",
            "  Downloading nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_x86_64.whl.metadata (1.5 kB)\n",
            "Collecting nvidia-curand-cu12==10.3.5.147 (from torch)\n",
            "  Downloading nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_x86_64.whl.metadata (1.5 kB)\n",
            "Collecting nvidia-cusolver-cu12==11.6.1.9 (from torch)\n",
            "  Downloading nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_x86_64.whl.metadata (1.6 kB)\n",
            "Collecting nvidia-cusparse-cu12==12.3.1.170 (from torch)\n",
            "  Downloading nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_x86_64.whl.metadata (1.6 kB)\n",
            "Requirement already satisfied: nvidia-cusparselt-cu12==0.6.2 in /usr/local/lib/python3.11/dist-packages (from torch) (0.6.2)\n",
            "Requirement already satisfied: nvidia-nccl-cu12==2.21.5 in /usr/local/lib/python3.11/dist-packages (from torch) (2.21.5)\n",
            "Requirement already satisfied: nvidia-nvtx-cu12==12.4.127 in /usr/local/lib/python3.11/dist-packages (from torch) (12.4.127)\n",
            "Collecting nvidia-nvjitlink-cu12==12.4.127 (from torch)\n",
            "  Downloading nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl.metadata (1.5 kB)\n",
            "Requirement already satisfied: triton==3.2.0 in /usr/local/lib/python3.11/dist-packages (from torch) (3.2.0)\n",
            "Requirement already satisfied: sympy==1.13.1 in /usr/local/lib/python3.11/dist-packages (from torch) (1.13.1)\n",
            "Requirement already satisfied: mpmath<1.4,>=1.1.0 in /usr/local/lib/python3.11/dist-packages (from sympy==1.13.1->torch) (1.3.0)\n",
            "Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.11/dist-packages (from jinja2->torch) (3.0.2)\n",
            "Downloading nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_x86_64.whl (363.4 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m363.4/363.4 MB\u001b[0m \u001b[31m4.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl (13.8 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m13.8/13.8 MB\u001b[0m \u001b[31m109.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl (24.6 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m24.6/24.6 MB\u001b[0m \u001b[31m56.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl (883 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m883.7/883.7 kB\u001b[0m \u001b[31m54.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl (664.8 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m664.8/664.8 MB\u001b[0m \u001b[31m2.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_x86_64.whl (211.5 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m211.5/211.5 MB\u001b[0m \u001b[31m5.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_x86_64.whl (56.3 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m56.3/56.3 MB\u001b[0m \u001b[31m17.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_x86_64.whl (127.9 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m127.9/127.9 MB\u001b[0m \u001b[31m11.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_x86_64.whl (207.5 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m207.5/207.5 MB\u001b[0m \u001b[31m5.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl (21.1 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m21.1/21.1 MB\u001b[0m \u001b[31m86.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hInstalling collected packages: nvidia-nvjitlink-cu12, nvidia-curand-cu12, nvidia-cufft-cu12, nvidia-cuda-runtime-cu12, nvidia-cuda-nvrtc-cu12, nvidia-cuda-cupti-cu12, nvidia-cublas-cu12, nvidia-cusparse-cu12, nvidia-cudnn-cu12, nvidia-cusolver-cu12\n",
            "  Attempting uninstall: nvidia-nvjitlink-cu12\n",
            "    Found existing installation: nvidia-nvjitlink-cu12 12.5.82\n",
            "    Uninstalling nvidia-nvjitlink-cu12-12.5.82:\n",
            "      Successfully uninstalled nvidia-nvjitlink-cu12-12.5.82\n",
            "  Attempting uninstall: nvidia-curand-cu12\n",
            "    Found existing installation: nvidia-curand-cu12 10.3.6.82\n",
            "    Uninstalling nvidia-curand-cu12-10.3.6.82:\n",
            "      Successfully uninstalled nvidia-curand-cu12-10.3.6.82\n",
            "  Attempting uninstall: nvidia-cufft-cu12\n",
            "    Found existing installation: nvidia-cufft-cu12 11.2.3.61\n",
            "    Uninstalling nvidia-cufft-cu12-11.2.3.61:\n",
            "      Successfully uninstalled nvidia-cufft-cu12-11.2.3.61\n",
            "  Attempting uninstall: nvidia-cuda-runtime-cu12\n",
            "    Found existing installation: nvidia-cuda-runtime-cu12 12.5.82\n",
            "    Uninstalling nvidia-cuda-runtime-cu12-12.5.82:\n",
            "      Successfully uninstalled nvidia-cuda-runtime-cu12-12.5.82\n",
            "  Attempting uninstall: nvidia-cuda-nvrtc-cu12\n",
            "    Found existing installation: nvidia-cuda-nvrtc-cu12 12.5.82\n",
            "    Uninstalling nvidia-cuda-nvrtc-cu12-12.5.82:\n",
            "      Successfully uninstalled nvidia-cuda-nvrtc-cu12-12.5.82\n",
            "  Attempting uninstall: nvidia-cuda-cupti-cu12\n",
            "    Found existing installation: nvidia-cuda-cupti-cu12 12.5.82\n",
            "    Uninstalling nvidia-cuda-cupti-cu12-12.5.82:\n",
            "      Successfully uninstalled nvidia-cuda-cupti-cu12-12.5.82\n",
            "  Attempting uninstall: nvidia-cublas-cu12\n",
            "    Found existing installation: nvidia-cublas-cu12 12.5.3.2\n",
            "    Uninstalling nvidia-cublas-cu12-12.5.3.2:\n",
            "      Successfully uninstalled nvidia-cublas-cu12-12.5.3.2\n",
            "  Attempting uninstall: nvidia-cusparse-cu12\n",
            "    Found existing installation: nvidia-cusparse-cu12 12.5.1.3\n",
            "    Uninstalling nvidia-cusparse-cu12-12.5.1.3:\n",
            "      Successfully uninstalled nvidia-cusparse-cu12-12.5.1.3\n",
            "  Attempting uninstall: nvidia-cudnn-cu12\n",
            "    Found existing installation: nvidia-cudnn-cu12 9.3.0.75\n",
            "    Uninstalling nvidia-cudnn-cu12-9.3.0.75:\n",
            "      Successfully uninstalled nvidia-cudnn-cu12-9.3.0.75\n",
            "  Attempting uninstall: nvidia-cusolver-cu12\n",
            "    Found existing installation: nvidia-cusolver-cu12 11.6.3.83\n",
            "    Uninstalling nvidia-cusolver-cu12-11.6.3.83:\n",
            "      Successfully uninstalled nvidia-cusolver-cu12-11.6.3.83\n",
            "Successfully installed nvidia-cublas-cu12-12.4.5.8 nvidia-cuda-cupti-cu12-12.4.127 nvidia-cuda-nvrtc-cu12-12.4.127 nvidia-cuda-runtime-cu12-12.4.127 nvidia-cudnn-cu12-9.1.0.70 nvidia-cufft-cu12-11.2.1.3 nvidia-curand-cu12-10.3.5.147 nvidia-cusolver-cu12-11.6.1.9 nvidia-cusparse-cu12-12.3.1.170 nvidia-nvjitlink-cu12-12.4.127\n"
          ]
        }
      ],
      "source": [
        "!pip install torch"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "Rgu8TkIKSnca",
        "outputId": "ff21971e-892c-463a-a484-86df56ad8f7d"
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "/usr/local/lib/python3.11/dist-packages/torch/functional.py:539: UserWarning: torch.meshgrid: in an upcoming release, it will be required to pass the indexing argument. (Triggered internally at /pytorch/aten/src/ATen/native/TensorShape.cpp:3637.)\n",
            "  return _VF.meshgrid(tensors, **kwargs)  # type: ignore[attr-defined]\n",
            "                                                                            "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [1/10], Loss: 4.2009\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [1/10], Loss: 4.2009, Accuracy: 14.11%\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "                                                                           "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [2/10], Loss: 3.6278\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [2/10], Loss: 3.6278, Accuracy: 23.50%\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "                                                                            "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [3/10], Loss: 3.2517\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [3/10], Loss: 3.2517, Accuracy: 28.98%\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "                                                                            "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [4/10], Loss: 3.0167\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [4/10], Loss: 3.0167, Accuracy: 32.01%\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "                                                                            "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [5/10], Loss: 2.8200\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [5/10], Loss: 2.8200, Accuracy: 37.85%\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "                                                                            "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [6/10], Loss: 2.6659\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [6/10], Loss: 2.6659, Accuracy: 40.60%\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "                                                                            "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [7/10], Loss: 2.5157\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [7/10], Loss: 2.5157, Accuracy: 43.22%\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "                                                                            "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [8/10], Loss: 2.4007\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [8/10], Loss: 2.4007, Accuracy: 45.66%\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "                                                                            "
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [9/10], Loss: 2.3146\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch [9/10], Loss: 2.3146, Accuracy: 46.95%\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "Epoch [10/10]:  10%|▉         | 154/1563 [00:30<04:47,  4.91it/s, loss=2.03]"
          ]
        }
      ],
      "source": [
        "import torch\n",
        "import torch.nn as nn\n",
        "import torch.optim as optim\n",
        "from torchvision import datasets, transforms\n",
        "from torch.utils.data import DataLoader\n",
        "from tqdm import tqdm\n",
        "import json\n",
        "\n",
        "\n",
        "# 训练设备\n",
        "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
        "\n",
        "# 超参数\n",
        "batch_size = 32\n",
        "epochs = 10\n",
        "learning_rate = 3e-4\n",
        "weight_decay = 1e-4\n",
        "save_path = 'training_log.json'\n",
        "\n",
        "# 数据增强和预处理\n",
        "train_transform = transforms.Compose([\n",
        "    transforms.Resize((128, 128)),               # 先放大\n",
        "    transforms.RandomCrop(112, padding=4),       # 再随机裁剪\n",
        "    transforms.RandomHorizontalFlip(),\n",
        "    transforms.AutoAugment(policy=transforms.AutoAugmentPolicy.CIFAR10),\n",
        "    transforms.ToTensor(),\n",
        "    transforms.Normalize(mean=[0.507, 0.487, 0.441], std=[0.267, 0.256, 0.276])\n",
        "])\n",
        "\n",
        "test_transform = transforms.Compose([\n",
        "    transforms.Resize((112, 112)),\n",
        "    transforms.ToTensor(),\n",
        "    transforms.Normalize(mean=[0.507, 0.487, 0.441], std=[0.267, 0.256, 0.276])\n",
        "])\n",
        "\n",
        "# 加载数据集（在线下载）\n",
        "train_dataset = datasets.CIFAR100(root='./data', train=True, download=True, transform=train_transform)\n",
        "test_dataset = datasets.CIFAR100(root='./data', train=False, download=True, transform=test_transform)\n",
        "\n",
        "train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True, num_workers=2)\n",
        "test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=False, num_workers=2)\n",
        "\n",
        "# 初始化模型\n",
        "model = CoAtNet(112, 112, 3, config='coatnet-0', num_classes=100)\n",
        "model = model.to(device)\n",
        "\n",
        "# 损失函数和优化器\n",
        "criterion = nn.CrossEntropyLoss()\n",
        "optimizer = optim.AdamW(model.parameters(), lr=learning_rate, weight_decay=weight_decay)\n",
        "scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=epochs)\n",
        "\n",
        "# 训练日志\n",
        "history = {\"train_loss\": [], \"test_acc\": []}\n",
        "\n",
        "for epoch in range(epochs):\n",
        "    model.train()\n",
        "    running_loss = 0.0\n",
        "    progress_bar = tqdm(train_loader, desc=f\"Epoch [{epoch+1}/{epochs}]\", leave=False)\n",
        "\n",
        "    for images, labels in progress_bar:\n",
        "        images, labels = images.to(device), labels.to(device)\n",
        "        optimizer.zero_grad()\n",
        "        outputs = model(images)\n",
        "        loss = criterion(outputs, labels)\n",
        "        loss.backward()\n",
        "        optimizer.step()\n",
        "        running_loss += loss.item()\n",
        "        progress_bar.set_postfix(loss=loss.item())\n",
        "\n",
        "    scheduler.step()\n",
        "\n",
        "    avg_loss = running_loss / len(train_loader)\n",
        "    history[\"train_loss\"].append(avg_loss)\n",
        "\n",
        "    print(f\"Epoch [{epoch+1}/{epochs}], Loss: {avg_loss:.4f}\")\n",
        "\n",
        "    # 测试阶段\n",
        "    model.eval()\n",
        "    correct = 0\n",
        "    total = 0\n",
        "    with torch.no_grad():\n",
        "        for images, labels in test_loader:\n",
        "            images, labels = images.to(device), labels.to(device)\n",
        "            outputs = model(images)\n",
        "            _, predicted = torch.max(outputs, 1)\n",
        "            total += labels.size(0)\n",
        "            correct += (predicted == labels).sum().item()\n",
        "\n",
        "    acc = 100 * correct / total\n",
        "    history[\"test_acc\"].append(acc)\n",
        "    print(f\"Epoch [{epoch+1}/{epochs}], Loss: {avg_loss:.4f}, Accuracy: {acc:.2f}%\")\n",
        "\n",
        "# 保存训练日志\n",
        "with open(save_path, 'w') as f:\n",
        "    json.dump(history, f)\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 3,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 663
        },
        "id": "0dt5qbjY_FUF",
        "outputId": "b505a1a9-536a-4679-8975-cf070b67d304"
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "100%|██████████| 169M/169M [00:12<00:00, 13.2MB/s]\n"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Epoch 001: Train Acc: 9.15% | Test Acc: 15.97%\n",
            "Epoch 002: Train Acc: 19.80% | Test Acc: 25.14%\n",
            "Epoch 003: Train Acc: 30.20% | Test Acc: 30.86%\n",
            "Epoch 004: Train Acc: 39.13% | Test Acc: 35.97%\n",
            "Epoch 005: Train Acc: 45.78% | Test Acc: 41.49%\n",
            "Epoch 006: Train Acc: 49.86% | Test Acc: 42.84%\n",
            "Epoch 007: Train Acc: 52.63% | Test Acc: 48.72%\n",
            "Epoch 008: Train Acc: 54.84% | Test Acc: 49.38%\n",
            "Epoch 009: Train Acc: 56.54% | Test Acc: 49.71%\n",
            "Epoch 010: Train Acc: 58.50% | Test Acc: 46.69%\n"
          ]
        },
        {
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjIAAAHHCAYAAACle7JuAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAd9BJREFUeJzt3Xd0FFUbx/HvphdSCCGBQAi99yqC9F6UJmIDsSsWQFRAEbCBBcSCIEixgCgoiK+NooIiCIQivfeS0NJJstmd94+FlRBKAtnsJvl9ztlD5s7s3GdzA3mY20yGYRiIiIiI5ENuzg5ARERE5EYpkREREZF8S4mMiIiI5FtKZERERCTfUiIjIiIi+ZYSGREREcm3lMiIiIhIvqVERkRERPItJTIiIiKSbymRERERkXxLiYzITZg9ezYmk8n+8vDwoFSpUjzwwAMcO3bMYfWOGTMGk8lEeHg4KSkpWc6XLVuWbt263dC9P/74Y2bPnn3Fc2+88Qa333474eHhmEwmxowZc9X7LFu2jNatWxMaGkpwcDCNGzfmiy++uKGYHCk1NZX33nuPJk2aEBQUhI+PD5UrV+app55i9+7d9usufs9Pnz5tL3vggQcytf+lr19++SVTPX379sVkMvHiiy9eMY4//vgj0/vd3d0JCwujT58+7NixI1ufJSkpidGjR9OpUydCQkIwmUxXbUuAHTt20KlTJ4oUKUJISAj3338/p06dynKd1Wrl7bffply5cvj4+FC7dm2++uqrbMUk4mgezg5ApCB49dVXKVeuHKmpqaxZs4bZs2fz119/sXXrVnx8fBxWb2xsLFOmTOG5557LtXt+/PHHhIaG8sADD2Q59/LLL1OiRAnq1avHr7/+etV7LF68mB49etC0aVN7AvDNN9/Qv39/Tp8+zZAhQ3It3ptx+vRpOnXqRHR0NN26deOee+6hSJEi7Nq1i3nz5jFt2jTS09OveQ9vb28+/fTTLOV16tSxf52QkMAPP/xA2bJl+eqrrxg/fjwmk+mK93vmmWdo1KgRZrOZf//9l6lTp/LHH3+wdetWSpQocd3P8+qrr1KmTBnq1KnDH3/8cdVrjx49SosWLQgKCuLNN98kKSmJd999ly1btrB27Vq8vLzs17700kuMHz+eRx55hEaNGvH9999zzz33YDKZ6Nev3zVjEnE4Q0Ru2KxZswzAWLduXabyF1980QCMr7/+2iH1jh492gCMunXrGuHh4UZKSkqm81FRUUbXrl1v6N41atQwWrZsecVzBw4cMAzDME6dOmUAxujRo694Xfv27Y2IiAgjNTXVXmY2m40KFSoYtWvXvqG4HKFr166Gm5ubsWDBgiznUlNTjeeee85+fPF7furUKXvZgAEDDH9//+vWM3PmTMPT09P47bffDMD4448/slzz+++/G4Axf/78TOVTpkwxAOOtt966bj2pqanGiRMnDMMwjHXr1hmAMWvWrCte+8QTTxi+vr7GoUOH7GVLly41AOOTTz6xlx09etTw9PQ0Bg0aZC+zWq3GbbfdZpQuXdrIyMi4blwijqSuJREHuO222wDYt29fpvKdO3fSp08fQkJC8PHxoWHDhixevDjTNWazmbFjx1KpUiV8fHwoVqwYzZs3Z+nSpVnqeeWVV4iJiWHKlCnXjclqtTJp0iRq1KiBj48P4eHhPPbYY5w7d85+TdmyZdm2bRsrVqywd3G0atUq0/nsSEhIoGjRonh7e9vLPDw8CA0NxdfX97rvz8jI4LXXXqNChQp4e3tTtmxZRo4cSVpaWqbrLnah/fXXXzRu3BgfHx/Kly/P559/ft06/vnnH3788UceeughevfuneW8t7c37777bjY+7fXNmTOH9u3b07p1a6pVq8acOXOy/d6r/Sxdibe393Wf2lz07bff0q1bN8qUKWMva9euHZUrV+abb76xl33//feYzWaefPJJe5nJZOKJJ57g6NGjrF69OrsfRcQhlMiIOMDBgwcBKFq0qL1s27Zt3HLLLezYsYPhw4czYcIE/P396dGjBwsXLrRfN2bMGMaOHUvr1q356KOPeOmllyhTpgwbNmzIUs9tt91GmzZtePvttzl//vw1Y3rsscd4/vnnadasGe+//z4DBw5kzpw5dOzYEbPZDMCkSZMoXbo0VatW5YsvvuCLL77gpZdeyvHnb9WqFdu2bWPUqFHs3buXffv28dprr7F+/XpeeOGF677/4Ycf5pVXXqF+/fq89957tGzZknHjxl2xG2Pv3r306dOH9u3bM2HCBIoWLcoDDzzAtm3brlnHxQTy/vvvz/Hnu9zp06czveLj4+3njh8/zu+//87dd98NwN13382CBQuu22V10ZV+lm7WsWPHiI2NpWHDhlnONW7cmI0bN9qPN27ciL+/P9WqVcty3cXzIk7l7EdCIvnZxa6lZcuWGadOnTKOHDliLFiwwChevLjh7e1tHDlyxH5t27ZtjVq1amXqbrFarcatt95qVKpUyV5Wp06d63YLXdrNsWLFCgMwJk6caD9/edfSn3/+aQDGnDlzMt3nl19+yVJ+ra6li67XtZSUlGT07dvXMJlMBmAAhp+fn7Fo0aJr3tcwDGPTpk0GYDz88MOZyocNG2YAxm+//ZbpcwLGypUr7WWxsbGGt7d3pm6hK+nZs6cBGOfOnbtuTIZx9a6li5/v0tel3793333X8PX1NRISEgzDMIzdu3cbgLFw4cJM97/YtTRz5kzj1KlTxvHjx41ffvnFqFixomEymYy1a9dmK86LrtW1dPHc559/nuXc888/bwD2n9OuXbsa5cuXz3JdcnKyARjDhw/PUVwiuU1PZERyQbt27ShevDiRkZH06dMHf39/Fi9eTOnSpQE4e/Ysv/32G3379iUxMdH+P/czZ87QsWNH9uzZY5/lFBwczLZt29izZ0+26m7RogWtW7e+5lOZ+fPnExQURPv27TM9OWjQoAFFihTh999/z51vxAXe3t5UrlyZPn368NVXX/Hll1/SsGFD7rvvPtasWXPN9/70008ADB06NFP5xQHNP/74Y6by6tWr27tfAIoXL06VKlXYv3//NetJSEgAICAgIHsf6ip8fHxYunRppteECRPs5+fMmUPXrl3t9VSqVIkGDRpctXvpwQcfpHjx4kRERNCpUyfi4+P54osvaNSo0U3FeamLPyeXdv1d+nkuveb8+fPZuk7EWTRrSSQXTJ48mcqVKxMfH8/MmTNZuXJlpn/89+7di2EYjBo1ilGjRl3xHrGxsZQqVYpXX32VO+64g8qVK1OzZk06derE/fffT+3ata9a/5gxY2jZsiVTp0694oygPXv2EB8fT1hY2FXrzk1PPfUUa9asYcOGDbi52f6/1LdvX2rUqMGzzz7LP//8c9X3Hjp0CDc3NypWrJipvESJEgQHB3Po0KFM5ZeO8bioaNGimcb+XElgYCAAiYmJBAcHZ+djXZG7uzvt2rW74rkdO3awceNG+vfvz969e+3lrVq1YvLkySQkJNjjuOiVV17htttuIykpiYULFzJv3jz79xDAYrFkmSIdEhKSaZbR9Vwcp3T5mCOwTUe/9BpfX99sXSfiLEpkRHJB48aN7eMNevToQfPmzbnnnnvYtWsXRYoUwWq1AjBs2DA6dux4xXtc/MXdokUL9u3bx/fff8+SJUv49NNPee+995g6dSoPP/zwFd/bokULWrVqxdtvv83jjz+e5bzVaiUsLOyqTwGKFy+e4898Nenp6cyYMYMXXngh0y9gT09POnfuzEcffUR6evp1f/FebXry5dzd3a9YbhjGNd9XtWpVALZs2ZLpiU5u+vLLLwEYMmTIFRPMb7/9loEDB2Yqq1Wrlj0x6tGjBykpKTzyyCM0b96cyMhIjhw5Qrly5TK95/fff880KPt6SpYsCcCJEyeynDtx4gQhISH2RLxkyZL8/vvvGIaRqU0uvjciIiLb9Yo4ghIZkVzm7u7OuHHj7IN1hw8fTvny5QHbL/Or/e/9UiEhIQwcOJCBAweSlJREixYtGDNmzFUTGbA9lWnVqhWffPJJlnMVKlRg2bJlNGvW7Lr/g85uAnE1Z86cISMjA4vFkuWc2WzGarVe8dxFUVFRWK1W9uzZk2mAaUxMDHFxcURFRd1UfBd1796dcePG8eWXXzokkTEMg7lz59K6detMM34ueu2115gzZ06WROZy48ePZ+HChbzxxhtMnTqVEiVKZJnBdumaNdlRqlQpihcvzvr167OcW7t2LXXr1rUf161bl08//ZQdO3ZQvXp1e/nFp2qXXiviDBojI+IArVq1onHjxkyaNInU1FTCwsLsScaV/hd8aVfBmTNnMp0rUqQIFStWvOLj/Uu1bNmSVq1a8dZbb9kf+1/Ut29fLBYLr732Wpb3ZWRkEBcXZz/29/fPdJxTYWFhBAcHs3Dhwkwzc5KSkvjhhx+oWrXqNZOpLl26ALYZVJeaOHEiAF27dr3h2C7VtGlTOnXqxKeffsqiRYuynE9PT2fYsGE3fP9Vq1Zx8OBBBg4cSJ8+fbK87rrrLn7//XeOHz9+zftUqFCB3r17M3v2bE6ePImPjw/t2rXL9LqRGU29e/fmf//7H0eOHLGXLV++nN27d3PnnXfay+644w48PT35+OOP7WWGYTB16lRKlSrFrbfemuO6RXKTnsiIOMjzzz/PnXfeyezZs3n88ceZPHkyzZs3p1atWjzyyCOUL1+emJgYVq9ezdGjR9m8eTNgG7zaqlUrGjRoQEhICOvXr2fBggU89dRT161z9OjRtG7dOkt5y5Yteeyxxxg3bhybNm2iQ4cOeHp6smfPHubPn8/7779Pnz59AGjQoAFTpkzh9ddfp2LFioSFhdGmTRsAvvjiCw4dOmTfFmHlypW8/vrrgG0ac1RUFO7u7gwbNoyXX36ZW265hf79+2OxWJgxYwZHjx61d7dcTZ06dRgwYADTpk0jLi6Oli1bsnbtWj777DN69Ohxxc93oz7//HM6dOhAr1696N69O23btsXf3589e/Ywb948Tpw4ccNrycyZMwd3d/erJl633347L730EvPmzcsysPlyzz//PN988w2TJk1i/Pjx17z2o48+Ii4uzp4g/fDDDxw9ehSAp59+mqCgIABGjhzJ/Pnzad26Nc8++yxJSUm888471KpVK9NTotKlSzN48GDeeecdzGYzjRo1YtGiRfz555/2zyjiVE6dMyWSz11tZV/DMAyLxWJUqFDBqFChgn3103379hn9+/c3SpQoYXh6ehqlSpUyunXrlmll2ddff91o3LixERwcbPj6+hpVq1Y13njjDSM9Pd1+zZWmAl/UsmVLA7jiFO5p06YZDRo0MHx9fY2AgACjVq1axgsvvGAcP37cfs3JkyeNrl27GgEBAVmmEl+895Vev//+e6a65syZk+lzNGnS5Ior6F6J2Ww2xo4da5QrV87w9PQ0IiMjjREjRmSaum4YV1/BuGXLltedQn5RSkqK8e677xqNGjUyihQpYnh5eRmVKlUynn76aWPv3r3263Kysm96erpRrFgx47bbbrtm3eXKlTPq1atnGMbVV/a9qFWrVkZgYKARFxd3zXtenJJ+pdfFlZkv2rp1q9GhQwfDz8/PCA4ONu69917j5MmTWe5psViMN99804iKijK8vLyMGjVqGF9++eU14xDJKybDuM6IOBEREREXpTEyIiIikm8pkREREZF8S4mMiIiI5FtKZERERCTfUiIjIiIi+ZYSGREREcm3CvyCeFarlePHjxMQEHDTS6+LiIhI3jAMg8TERCIiIjLt23a5Ap/IHD9+nMjISGeHISIiIjfgyJEjlC5d+qrnC3wiExAQANi+EYGBgbl2X7PZzJIlS+xLvYvzqU1ci9rDtag9XIva4/oSEhKIjIy0/x6/GqcnMseOHePFF1/k559/JiUlhYoVKzJr1iwaNmwI2B4tjR49munTpxMXF0ezZs2YMmUKlSpVytb9L3YnBQYG5noi4+fnR2BgoH4IXYTaxLWoPVyL2sO1qD2y73rDQpw62PfcuXM0a9YMT09Pfv75Z7Zv386ECRMy7eT69ttv88EHHzB16lT++ecf/P396dixY5bdfUVERKTwceoTmbfeeovIyEhmzZplLytXrpz9a8MwmDRpEi+//DJ33HEHYNutNjw8nEWLFtGvX788j1lERERch1OfyCxevJiGDRty5513EhYWRr169Zg+fbr9/IEDBzh58iTt2rWzlwUFBdGkSRNWr17tjJBFRETEhTj1icz+/fuZMmUKQ4cOZeTIkaxbt45nnnkGLy8vBgwYwMmTJwEIDw/P9L7w8HD7uculpaWRlpZmP05ISABs/ZFms/mqsVgsFjIyMsjuZuAZGRl4eHiQlJSEh4fThxoVKiaTCQ8PD9zd3TOVX2zfa7Wz5B21h2tRe7gWtcf1Zfd749TfwFarlYYNG/Lmm28CUK9ePbZu3crUqVMZMGDADd1z3LhxjB07Nkv5kiVL8PPzu+J7AgICCAgIuOY89SspUaIE+/fvv6E45eZYrVYSExNJTEzMcm7p0qVOiEiuRu3hWtQerkXtcXUpKSnZus6piUzJkiWpXr16prJq1arx7bffArZEASAmJoaSJUvar4mJiaFu3bpXvOeIESMYOnSo/fji9K0OHTpccdZSTEwMCQkJFC9eHD8/v2wvmmcYBsnJyfj7+2uhvTxmGAYpKSmcOnWKypUr25/Ymc1mli5dSvv27TULwAWoPVyL2sO1qD2u72KPyvU4NZFp1qwZu3btylS2e/duoqKiANvA3xIlSrB8+XJ74pKQkMA///zDE088ccV7ent74+3tnaXc09Mzyw+LxWIhMTGR8PBwihUrlqPYrVYrZrMZX1/fHD/JkZvn7++Pm5sbsbGxlCxZMlM305XaWpxH7eFa1B6uRe1xddn9vjg1kRkyZAi33norb775Jn379mXt2rVMmzaNadOmAbaxEIMHD+b111+nUqVKlCtXjlGjRhEREUGPHj1uuv6L/W9X63IS13ax3cxmc5bxMiIiUjg4NZFp1KgRCxcuZMSIEbz66quUK1eOSZMmce+999qveeGFF0hOTubRRx8lLi6O5s2b88svv+Dj45NrcahrKH9Su4mIiNOn23Tr1o1u3bpd9bzJZOLVV1/l1VdfzcOoREREJD/Q4A4BoGzZskyaNMnZYYiIiOSIEpl8xmQyXfM1ZsyYG7rvunXrePTRR3Mlxq+++gp3d3cGDRqUK/cTERG5GiUy+cyJEyfsr0mTJhEYGJipbNiwYfZrDcMgIyMjW/e9OP08N8yYMYMXXniBr776SntiiYgUYFarwdZj8SSlZe93jSMokclnSpQoYX8FBQVhMpnsxzt37iQgIICff/6ZBg0a4O3tzV9//cW+ffu44447CA8Pp0iRIjRq1Ihly5Zluu/lXUsmk4lPP/2Unj174ufnR6VKlVi8ePF14ztw4AB///03w4cPp3Llynz33XdZrpk5cyY1atTA29ubkiVL8tRTT9nPxcXF8dhjjxEeHo6Pjw81a9bkf//7341/w0REJFcdPpPC3H8OM2jOBhq8vpRuH/7Fn7tPOS0epw/2dSWGYXDebMnWtVarlfPpFjzSM3JlHRlfT/dcm4UzfPhw3n33XcqXL0/RokU5cuQIXbp04Y033sDb25vPP/+c7t27s2vXLsqUKXPV+4wdO5a3336bd955hw8//JB7772XQ4cOERISctX3zJo1i65duxIUFMR9993HjBkzuOeee+znL25JMX78eDp37kx8fDyrVq0CbN/Tzp07k5iYyJdffkmFChXYvn27plaLiDjRueR0/t53hr/2nmbV3tMcPpt5xd0i3h6cTkq7yrsdT4nMJc6bLVR/5Ven1L391Y74eeVOc7z66qu0b9/efhwSEkKdOnXsx6+99hoLFy5k8eLFmZ6GXO6BBx7g7rvvBuDNN9/kgw8+YO3atXTq1OmK11utVmbPns2HH34IQL9+/Xjuuec4cOCAfVfz119/neeee45nn33W/r5GjRoBsGzZMtauXcuOHTuoXLkyAOXLl7+Rb4GIiNygVLOFdQfP2hOXbccTuHQbQg83E/XLFKVZxVCaVypG7dLBeLo7r4NHiUwB1LBhw0zHSUlJjBkzhh9//JETJ06QkZHB+fPnOXz48DXvU7t2bfvX/v7+BAYGEhsbe9Xrly5dSnJyMl26dAEgNDSU9u3bM3PmTF577TViY2M5fvw4bdu2veL7N23aROnSpe1JjIiIOJ7FarDteLw9cVl38BzpGdZM11QJD6B5pVCaVwylcbkQ/L1dJ31wnUhcgK+nO9tf7Zita61WK4kJiQQE5nyzyavVnVv8/f0zHQ8bNoylS5fy7rvvUrFiRXx9fenTpw/p6enXvM/ly0ObTCasVutVrrYN8j179iy+vr72MqvVyr///svYsWMzlV/J9c6LiMjNMwyDQ2dS7InL3/vOEH8+807TJQJ97InLrRWLERaQe4vQ5jYlMpcwmUzZ7t6xWq1keLnj5+Xh8nstrVq1igceeICePXsCtic0Bw8ezNU6zpw5w/fff8+8efOoUaOGvdxisdC8eXOWLFlCp06dKFu2LMuXL6d169ZZ7lG7dm2OHj3K7t279VRGRCQXnUlKY9W+M6zac5q/9p7mWNz5TOcDvD1oWqEYzSuF0qxiKOVD88+GyEpkCoFKlSrx3Xff0b17d0wmE6NGjbrmk5Ub8cUXX1CsWDH69u2b5Ye/S5cuzJgxg06dOjFmzBgef/xxwsLC7AN7V61axdNPP03Lli1p0aIFvXv3ZuLEiVSsWJGdO3diMpmuOi5HRESyOp9uYe3Bs6zae5q/9pxm+4nMO0l7utvGuTSvGEqzSqHULhWEhxPHudwMJTKFwMSJE3nwwQe59dZbCQ0N5cUXX8z29ujZNXPmTHr27HnFDL53797cf//9nD59mgEDBpCamsp7773HsGHDCA0NpU+fPvZrv/32W4YNG8bdd99NcnIyFStWZPz48bkaq4hIQWOxGvx7NM6WuOw9zYZDcaRbMv+HtVrJQJpXLEazC+NccmuCibOZDOPSscgFT0JCAkFBQcTHxxMYGJjpXGpqqn1GTU43obRarSQkJBAYGOjyXUsF1eXtZzab+emnn+jSpUu2t38Xx1F7uBa1h2u52fYwDIMDp5Pticvf+86QmJp5UbqIIB97V9GtFUIpHuCdW+HniWv9/r5UwUjHRERECrhTiWn8vc/WVbRq72mOx2deOT3Qx4NbK9i6ippXDKVsMb98M87lZiiRERERcUHJaRmsPXjWnrjsPJmY6byXuxsNooran7rUKhWEu1vBT1wup0RGRETEBWRYrGw+Gm/vLtp4+BxmS+bRHzUiAm0DdCuG0qhsCL5eWvlciYyIiIgTGAbsO5XMmgPn+GvvGf7Zf4bEyzZfLBXsy232cS7FKFYkf41zyQtKZERERPKAxWqw62Qi0YfOsvbAGVbudCd+zapM1wT5etLswsyi5hVDKRNSOMa53AwlMiIiIg6Qkp7BpsNxrD90jvWHzrHx0LnLnriY8PJwo1FZ275Ft1UsTvWIwEI5zuVmKJERERHJBTEJqaw/eI71h84Sfegc244nYLFmHuPi7+VOvTJFqRcZiOXkHp64sy0Bfq67/H9+oERGREQkh6xWg92xiaw/eI7oQ+dYd/AsR8+dz3JdySAfGpYNoWFUURpEFaVqiQA83N0urCOzG59c3GevsFIiIyIich3n0y1sOhJH9KGzrDt4jg2Hz2VZgM7NBFVLBNKwrC1paVg2hFLB2gzX0ZTIiIiIXCY2MZXog+dYd/Ac0YfOsu14AhmXdRP5eblTr0wwDaJsT1zqlQkmwEerJuc1JTL5zPVGr48ePZoxY8bc8L0XLlxIjx49snX9Y489xqeffsq8efO48847b6hOERFns1oN9p5KYt3Bs0QftA3MPXw2Jct1JQJ9aFC2KA2jitIwKoRqJQPy7UaLBYkSmXzmxIkT9q+//vprXnnlFXbt2mUvK1KkSJ7EkZKSwrx583jhhReYOXOmEhkRyTdSzRe7ic6x/qBtYG7CZd1EJhNUCQ+gYVlb0tKwbFFKBftqKrQLUiKTz5QoUcL+dVBQECaTKVPZp59+yoQJEzhw4ABly5blmWee4cknnwQgPT2doUOH8u2333Lu3DnCw8N5/PHHGTFiBGXLlgWgZ8+eAERFRXHw4MGrxjF//nyqV6/O8OHDiYiI4MiRI0RGRtrPp6Wl8corrzB37lxiY2OJjIxkxIgRPPTQQwBs27aNF198kZUrV2IYBnXr1mX27NlUqFAht75VIiKAbY+i6ENnL8woOse24/FZVsz19XSnbmSwLXEpG0K9MsEEqpsoX1AicynDAHPWx4lXZLXark13h9zY/drTz/ZfgJswZ84cXnnlFT766CPq1avHxo0beeSRR/D392fAgAF88MEHLF68mG+++YYyZcpw5MgRjhw5AsC6desICwtj1qxZdOrUCXf3a4+knzFjBvfddx9BQUF07tyZ2bNnM2rUKPv5/v37s3r1aj744APq1KnDgQMHOH36NADHjh2jRYsWtGrVit9++43AwEBWrVpFRkbG1aoTEckWq9Vg36kk29otF8a3HDyT9d/1sADvTE9bqpUMxFPdRPmSEplLmVPgzYhsXeoGBOdm3SOPg5f/Td1i9OjRTJgwgV69egFQrlw5tm/fzieffMKAAQM4fPgwlSpVonnz5phMJqKiouzvLV68OADBwcGZnvBcyZ49e1izZg3fffcdAPfddx9Dhw7l5ZdfxmQysXv3br755huWLl1Ku3btAChfvrz9/ZMnTyYoKIh58+bZt6+vXLnyTX12ESmcUs0W/j0ab1u75eA5og+fIy7FnOkakwkqh13oJrqQvJQuqm6igkKJTAGRnJzMvn37eOihh3jkkUfs5RkZGQQFBQHwwAMP0L59e6pUqUKnTp3o1q0bHTp0yHFdM2fOpGPHjoSGhgLQpUsXHnroIX777Tfatm3Lpk2bcHd3p2XLlld8/6ZNm7jtttvsSYyISHbFpaTzz4Gz9rVbth7L2k3k4+lm6yaKCqFB2aLUL1OUIF/9e1NQKZG5lKef7clINlitVhISEwkMCMAtt7qWbkJSUhIA06dPp0mTJpnOXewmql+/PgcOHODnn39m2bJl9O3bl3bt2rFgwYJs12OxWPjss884efIkHh4emcpnzpxJ27Zt8fW99roJ1zsvInKpfaeSWL4jhmU7Yok+dC7LarnFA7ztC841LBtCjQh1ExUmSmQuZTJlv3vHagVPi+363EhkblJ4eDgRERHs37+fe++996rXBQYGctddd3HXXXfRp08fOnXqxNmzZwkJCcHT0xOLxXLNen766ScSExPZuHFjpnE0W7duZeDAgcTFxVGrVi2sVisrVqywdy1dqnbt2nz22WeYzWY9lRGRLDIsVtYdPMfyHTEs3xnLgdPJmc5XDCtCowur5TYsW1QbKxZySmQKkLFjx/LMM88QFBREp06dSEtLY/369Zw7d46hQ4cyceJESpYsSb169XBzc2P+/PmUKFGC4OBgAMqWLcvy5ctp1qwZ3t7eFC1aNEsdM2bMoGvXrtSpUydTefXq1RkyZAhz5sxh0KBBDBgwgAcffNA+2PfQoUPExsbSt29fnnrqKT788EP69evHiBEjCAoKYs2aNTRu3JgqVarkxbdKRFxM/HkzK3afYvmOGP7YdYr48/+Nc/F0N3FL+WK0rRpG22rhRIbc3BNsKViUyBQgDz/8MH5+frzzzjs8//zz+Pv7U6tWLQYPHgxAQEAAb7/9Nnv27MHd3Z1GjRrx008/2bvGJkyYwNChQ5k+fTqlSpXKMv06JiaGH3/8kblz52ap283NjZ49ezJjxgwGDRrElClTGDlyJE8++SRnzpyhTJkyjBw5EoBixYrx22+/8fzzz9OyZUvc3d2pW7cuzZo1c+j3R0Rcy8HTySzbEcPyHbGsO3g208q5Rf08aV01jHbVwrmtUqhWzJWrMhmGYVz/svwrISGBoKAg4uPjCQwMzHQuNTWVAwcOUK5cOXx8crb7qNVqJSEhgcDAwNwZIyM5dnn72TZh+4kuXbqoy8oFqD1ciyu0R4bFyobDcRfGu8Sw71TWLqO21WzJS/0yRXF3K7jdRa7QHq7uWr+/L6UnMiIi4jCJqWZW7j7N8h0x/L4rlnOXTI32cDPRuFwIbauF065aGFHFbm4JCimclMiIiEiuOnI2xd5l9M+BM5mmRwf5etK6SnHaVgunReXimhYtN02JjIiI3BSL1WDTkXMs2xHL8h0x7I5JynS+fHF/2lULp23VMBpEFdVGi5KrlMiIiEiOJadl8OeeUyzbEcvvO2M5k5xuP+fuZqJhVFFb8lItjPLF82YzWymclMgABXy8c4GldhPJW8fiztsXpluz7wzpFqv9XICPB62qhNGuWhgtKxcn2M/LiZFKYVKoE5mLI8VTUlK02mw+lJJi2whOI/5FHMNqNdh8NI7lO2JZtiOGnScTM50vW8yPtheeujQqG6LVdMUpCnUi4+7uTnBwMLGxsQD4+WV/dUir1Up6ejqpqamafp3HDMMgJSWF2NhYgoODr7tTt4hkX0p6Bn/tOc3yHbEs3xnL6aQ0+zk3EzSwdxmFU6G4v1bUFacr1IkMYN/p+WIyk12GYXD+/Hl8fbWDqrNkZ6duEbm+E/HnbYnLjhhW7TtDesZ/XUZFvD1oWbk4bauF0bpKGEX91WUkrqXQJzImk4mSJUsSFhaG2Wy+/hsuMJvNrFy5khYtWqhrwwk8PT31JEbkBlmtBluPx9tnGW07npDpfGSIL22rhtOuWjiNy4Xg5aGnzuK6Cn0ic5G7u3uOfjG6u7uTkZGBj4+PEhkRcXmpZgtbz5n4+/vt/LH7FDEJ/3UZmUxQLzKYdtVtyUulsCJ60iz5hhIZEZECyjAMNhw+x4Loo/yw+QRJae7AUQD8vdy5rdKFLqOqYYQW8XZusCI3SImMiEgBcyL+PN9tOMaC6KMcOP3ffkbBXgbd6pWhfY2S3FI+BG8Pdc9K/qdERkSkAEg1W/h120kWRB/lr72nubjMkp+XO11qlaRHnRKc2r6Gbl2rqTtcChQlMiIi+ZRhGGw8Eneh6+g4iakZ9nNNyoXQp0FputQqib+3h2235R1ODFbEQZTIiIjkMyfjU1m48RgLoo+w79R/XUelgn3p3aA0feqXpkwxPydGKJJ3lMiIiOQDqWYLS7fHsCD6KH/uOYX1QteRr6c7nWuVoE+D0txSrhhubpptJIWLEhkRERdlGAabj8azIPoIizcdJ+GSrqPGZS90HdUuSRFv/VMuhZd++kVEXExswsWuo6PsiU2yl0cE+dC7QWl61y9N2VB/J0Yo4jqUyIiIuIC0DAvLd8Qyf/0RVuz+r+vI28ONzjVLcGfDSJqWV9eRyOWUyIiIOIlhGGw9lsD86CN8v+k48ef/2yalQVRR7rzQdRToo+nSIlejREZEJI/FJqby/cbjLIg+yq6YRHt5ySAfetUvRe/6pSlfvIgTIxTJP5TIiIjkgfQMK7/ttM06+n3XKSwX+o68PdzoWMM266hZxVDc1XUkkiNKZEREHMQwDLYdT2BB9FG+33SMcyn/dR3VKxNMnwal6VY7giBfdR2J3CglMiIiuex0UhqLLsw62nnyv66j8EBvetW3zTqqGKauI5HcoERGRCQXmC1WftsZa+s62hlLxoWuIy8PNzpUD6dPg9LcVqm4uo5EcplTE5kxY8YwduzYTGVVqlRh586dAKSmpvLcc88xb9480tLS6NixIx9//DHh4eHOCFdEJIvtl3QdnUlOt5fXibR1Hd1eO4IgP3UdiTiK05/I1KhRg2XLltmPPTz+C2nIkCH8+OOPzJ8/n6CgIJ566il69erFqlWrnBGqiAgAZ5PT+X7TMeavP8r2Ewn28uIB3vSqV4o+DUpTKTzAiRGKFB5OT2Q8PDwoUaJElvL4+HhmzJjB3LlzadOmDQCzZs2iWrVqrFmzhltuuSWvQxWRQsxssbJi1ynmRx/ht52xmC0Xuo7c3WhXPYw7G0RyW6VQPNzdnBypSOHi9ERmz549RERE4OPjQ9OmTRk3bhxlypQhOjoas9lMu3bt7NdWrVqVMmXKsHr16qsmMmlpaaSlpdmPExJs/1sym82YzeYrvudGXLxXbt5Tbo7axLUUlPbYG5vE/OhjfL/5RKauo1qlAulVL4KutUpQ1M8LAMNqwWy1OCvUayoo7VFQqD2uL7vfG5NhGIaDY7mqn3/+maSkJKpUqcKJEycYO3Ysx44dY+vWrfzwww8MHDgwU1IC0LhxY1q3bs1bb711xXteadwNwNy5c/Hz07b2IpI98enw42E31p4yYWAboFvE06BRqEHj4lYitNWRZJdhxcOahrs1DQ9Lqu1PayoWN2/ifaPApAHgV5KSksI999xDfHw8gYGBV73OqYnM5eLi4oiKimLixIn4+vreUCJzpScykZGRnD59+prfiJwym80sXbqU9u3b4+mpgXyuQG3iWvJreySnZfDpXweZseog581WANpWLc6dDUrRolIonvm06yi/tkeespghPRnMKRf+TMaU6TjlsuOs50lPxnT5cUbqVau0FquE0WAg1lp3gU9QHn5Y15eQkEBoaOh1Exmndy1dKjg4mMqVK7N3717at29Peno6cXFxBAcH26+JiYm54piai7y9vfH29s5S7unp6ZC/vI66r9w4tYlryS/tkWGxMj/6KBOW7OZ0ku0/Qw2jijKyazXqlynq5OhyT35pj6syDMhIsycSpCdDegqkJ/2XUFx8ZTqfjeutju7mMYFXEfDyw/D0wxJ/DI8ze2DJSNx/fx1q9oZGD0FEPQfHkT9k9+fUpRKZpKQk9u3bx/3330+DBg3w9PRk+fLl9O7dG4Bdu3Zx+PBhmjZt6uRIRaSgMAyDP3afYtxPO9gdkwRA2WJ+DO9clY41SmDSY3/HMwxIjYOkU5AcC0mxkHzqwp+xl5SfspVnnHdsPG4e4OVvSzo8/S58feHl6WdPRmzHF8/5Zf76Su/18LF3I2WYzSz5YQGdSsbjvmE2nNoBG7+wvUo1gIYPQc1e4Onr2M9aADg1kRk2bBjdu3cnKiqK48ePM3r0aNzd3bn77rsJCgrioYceYujQoYSEhBAYGMjTTz9N06ZNNWNJRHLFtuPxjPtpJ3/tPQ1AsJ8nz7atxL1NovDyyJ9dSC7DaoXz566TmFwoTz4FlvTr3/NyHj5Zk4ksx1dLRq6RmHh45f734woy3P2wNuyD+y2PweHVsG4GbP8ejkXbXr+OhLr3QsMHIbRinsSUHzk1kTl69Ch33303Z86coXjx4jRv3pw1a9ZQvHhxAN577z3c3Nzo3bt3pgXxRERuxon480xYsptvNxzFMGxTqAc2K8uTrStq36NrsVog+fS1k5PkU/89OTFyOIPLOwiKFAf/sAt/Xvp1GBQJs5X5hdiSDzd3x3zOvGYyQdSttlfSeNtTmehZEHcY1ky2vcq1tHU7VekC7voZvZRTE5l58+Zd87yPjw+TJ09m8uTJeRSRiBRkSWkZfLJiH9P/3E/qhYG8t9eJ4PmOVYgMKaSzGi3mSxKSazw1SYqFlDNADueH+BbNnIRk+vOSJMW/OHj6OOQj5itFisNtQ6HZs7B3OayfAbt/hQMrbK8iJaDBAKg/AIJKOTtal+BSY2RERBwhw2Ll6/VHeG/pbk4n2bowGpcNYWTXatSNDHZucHnEtPsXah79EveF39kSkosJy/lzOb0T+BW7ekJyablfaJ510xQ4bu5QuYPtFXcYomfDhs8h6SSseAtWvgtVOtu6ncq3BrfC2xWqREZECizDMPh9Vyxv/rSTvbG2gbzlQv0Z3rkqHaqHF46BvBYz/DoSj7XTqABw6grXmNwvJB9XSEYu7+bxKwbu+tWRp4LLQNtXoOVw2PmDbSzNoVWw83+2V0h5W0JT915bt1sho59GESmQth6L582fdvD3vjMAFPXzZHC7ytzTpEy+XQsmx5Ji4ZsBcPhvAA4Wa0Vk/Xa4B5bMnKj4Fi3U/6PPNzy8bFO0a/aG2B2wfiZsngdn98OSl2H5a7aZTg0fgtINC81Ce0pkRKRAOR53nneX7GLhxmO2gbwebjzYrBxPtq5AoE8hGiR5NBq+vg8Sj4NXABl3fMzmvQalGnfBPT+vIyM2YdWgyzvQdjRsXWB7SnPyX9j8le1VopYtoal1J3gXcXa0DqVERkQKhMRUM1NX7OPTPw+QlmEbyNujbgTDOlahdNFCNpB3wxfw41DblOZilaDfXIzgcrD3J2dHJrnNuwg0eMA2+PdYtC2h2fYdnNwC/xsMS0ZBnX62GU9h1ZwdrUMokRGRfM1ssTJv3REmLd1t39SxcbkQXu5ajdqlg50bXF7LSIdfR8C6T23HVbpAz0/AJxC0OWHBZjLZupNKN4SOb8Cmubaup7P7YN1026vMrbaEplp38Mi6An5+pURGRPIlwzBYviOWcT/vYN+pZADKF/dnROdqtKsWVjgG8l4qMQbmD7AtrAbQaiS0eF5jXwojvxC49Sm45UnblO31M2DnT7axUof/ts0mq38/NBgIRaOcHe1NUyIjIvnOlqPxvPHTdtbsPwtAiL8XQ9pVol/jQjSQ91JH18PX99vGw3gHQq9ptqm5Uri5uUGF1rZXwnHb9O3o2ZB4Av56D/6aBJXa28bSVGqfbxcYVCIjIvnGsbjzvPurbSAvgLeHGw81L8fjrQrZQN5LXToeJrQy9JsLoZWcHZW4msAIaDUcbnsOdv1se0qz/w/Ys8T2CioDDR+Aev1t0+3zESUyIuLyElLNTPljHzP+OkD6hYG8veqV4rmOVSgVXEg31ctIh1+G234hAVTpCj2n2sbDiFyNuydUv932OrPPNo5m45cQfxiWvwq/j7Oda/iQbcuEfNBFq0RGRFyW2WLlq7WHmbRsD2cvDOS9pXwIL3WpTq3SQU6OzokSY+Cb/nBkDWCC1iPhtmEaDyM5U6yCbWBwm5dh20LbjKdj62Hrt7ZX8Wq2hfbq3AU+rvv3TYmMiLgcwzBYuj2G8T/vZP9p20DeCsX9GdmlGm2qFsKBvJc6sg6+ud82zsE7EHpNhyqdnB2V5GeevlD3HtvrxGZbQrNlPpzaAT8/D8vGQK0+thlPJes4O9oslMiIiEvZfCSON37awdoDtoG8xfy9GNK+Mv0aReJRGAfyXir6M/hp2IXxMFUujIep6OyopCApWQdu/wA6vAabv7Z1XZ7aCRs+s71KNbQlNDV62hIgF6BERkRcwpGzKbzz6y4Wbz4O2AbyPnJbeR5rWZ6AwjqQ96KMdPjlRdt4BoCq3WzjYbwDnBuXFFw+QdDkUWj8CBz625bQbF9s63o6th5+HWnb26nhg7YuKidSIiMiThV/3szHf+xl1qqDpGdYMZmgZ71SDOtQhYjCOpD3UoknL4yH+QfbeJiXbDNPNB5G8oLJBGWb2V5JsbDxC1g/2zY4ePVHtlf51tB8CJRv6ZQQlciIiFOkZ1iZ+88h3l++h3MptlVnb61QjJFdqlGzlOsOLMxTR9ba1odJOgneQdB7OlTu6OyopLAqEmZLopsNhr3LbCtI71kK+3+Hql2VyIhI4WAYBr9ui+GtX3Zy4MJA3ophRRjZpSqtqxTygbyXip4NPw4DqxmKV7WNh3HyI3wRwLZwXuWOtte5Q7aF9mrf5bRwlMiISJ7ZePgcb/60g3UHzwEQWsQ2kPeuhhrIa5eRBj+/YEtkwLYvTo8pGg8jrqloFLQd5dQQlMiIiMMdOZvC27/u4ocLA3l9PC8O5K1AEW/9M2SXeNLWlXR0LWCyre9x23P5YlEyEWfRvyAi4jApGTD+l118seYI6RbbQN7e9UvzXIfKlAzSQN5MDv9jG9RrHw/zKVTu4OyoRFyeEhkRyXWGYfDFmsO8u9GdlIxDADSraBvIWyNCA3mzWD8Lfnpe42FEboASGRHJVWaLleHfbuHbDUcBE5XC/BnZtTqtKhfXQN7LZaTZEpgNn9mOq90OPT7WeBiRHFAiIyK5JiU9g0FzNvD7rlO4meCOKAtvPtAUXx9vZ4fmehJO2LYaOLoOjYcRuXFKZEQkV5xNTufB2evYdCQObw833r+rNmn712s20pUc/seWxCTF2FZQ7T0DKrV3dlQi+ZISGRG5aUfPpdB/5lr2n0omyNeTmQ80pHZEAD/td3ZkLsYwIHoW/PTChfEw1aDfHI2HEbkJSmRE5KbsPJnAgJlriUlIo2SQD58/2JhK4QGYzWZnh+ZaMtJsGz5u+Nx2XP0OuONj8C7i3LhE8jklMiJyw/7Zf4aHP19PYmoGlcKK8PlDjTWt+koSjtvWhzm2HjBB21dse9NoPIzITVMiIyI35JetJ3lm3kbSM6w0jCrKpwMaEuzn5eywXM/hNRfWh7k4HmYmVGrn7KhECgwlMiKSY3P+OcSoRVuxGtCuWjgf3VMPH093Z4flWgwD1s+An18EawaEVbeNhwkp7+zIRAoUJTIikm2GYfD+8j1MWrYHgH6NInm9R03NTLqcOdU2HmbjF7bj6j3gjskaDyPiAEpkRCRbLFaDUd9vZe4/hwF4pk1FhrSvrEXuLpdwHL6+D45Fg8kN2o6GZs9qPIyIgyiREZHrSjVbeHbeRn7dFoPJBK/eXoP7m5Z1dliu59Bq23iY5FjwCYY+M6FiW2dHJVKgKZERkWuKP2/mkc/Ws/bgWbzc3ZjUry5dapV0dliuxTBg3afwy/AL42FqQL8vNR5GJA8okRGRqzoZn8oDs9ay82QiAd4eTOvfkKYVijk7LNdiToUfn4NNX9qOa/S0jYfx8nduXCKFhBIZEbmivbFJDJi5lmNx5yke4M1nAxtTPSLQ2WG5lvhjtvEwxzdoPIyIkyiREZEsNhw+x0Oz13EuxUz5UH8+e7AxkSF+zg7LtRz6+8J4mFMaDyPiREpkRCST33fG8sScaFLNVupEBjNzQEOKFdHu1XaXj4cJrwl3fQkh5ZwdmUihpERGROzmrz/C8O+2YLEatKhcnCn31sffW/9M2GUZD9ML7vhI42FEnEj/QokIhmEwdcV+3vplJwC96pXirT618dRCd/+JP3phPMxG23iYdmPh1qc1HkbEyZTIiBRyVqvBaz9uZ9aqgwA81qI8L3aqipubfkHbHVwF8wfYxsP4FrWNh6nQxtlRiQhKZEQKtbQMC8Pm/8sPm48D8HLXajx8m9Y+AeB8HMRshQMr4c8J/42H6TcHipZ1dnQicoESGZFCKiktg8e/iOavvafxcDPx7p116FGvlLPDyntWK5w7ACe32BKXk1ttf8YfyXxdzT5w+wcaDyPiYpTIiBRCpxLTGDh7LVuPJeDn5c7U+xrQonJxZ4fleGmJELMdYrb8l7DEbAdz8pWvD4q0PYWp2gXq3a/xMCIuSImMSCFz6Ewy/Weu5dCZFIr5ezFrYCNqlw52dli5yzAg7vAlT1guJC7nDlz5endvCKsGJWpCeK0Lf9awjYcREZemREakENl6LJ4HZq3ldFI6kSG+fP5gE8qF5vOuEvN5iN3+3xOWk1shZhukxV/5+oCStqcsJWra/gyvCcUqgrv+ORTJj/Q3V6SQ+GvPaR77Yj3J6Raqlwxk9oONCAvwcXZY2WcYkHD8QrJyyXiWs/vAsGa93s0Tilf9L2G5+Kd/aN7HLiIOo0RGpBBYvPk4z32zCbPF4NYKxfjk/gYE+Hg6O6yry0iDUzsvecqyxfaU5fzZK1/vF3pJwlLL9mdoZfDwytu4RSTPKZERKeBm/nWAV/+3HYCutUsysW8dvD3cnRzVJZJis84YOr3bNt35ciZ3CK10yROWC+NZioRrIK5IIaVERqSAMgyDt3/dxZQ/9gHwwK1leaVbdectdGcxw+k9WbuGkmOvfL1P8H9PV8Jr2BKW4tXAMx91h4mIwymRESmAzBYrw7/dwrcbjgLwfMcqPNmqAqa8fGpxYjPlY3/B/YefIXabravIkn6FC01QrELWpyyBpfSURUSuS4mMSAGTkp7BoDkb+H3XKdzdTIzrWYu+jSLzLgCLGZaNwXP1R9QCOHbJOa+A/56uXBzPElZNi8yJyA1TIiNSgJxLTmfg7HVsOhKHj6cbk++pT9tq4XkXQNxhWPAgHF0HwMnAOhSv0wH3iDq2xCU4Cty0EaWI5B4lMiIFxNFzKfSfuZb9p5IJ8vVk5gONaBCVhwu67foZFj4OqXHgE0RGtw/5Zx90adEFd08XniElIvmaEhmRAmDnyQQGzFxLTEIaEUE+fP5QYyqGBeRN5Re6klj9ke24VAPoMwujSATs+ylvYhCRQkuJjEg+98/+Mzz8+XoSUzOoHF6Ezx5sTMkg37yp/LKuJG4ZBO3G2NZvMZvzJgYRKdSUyIjkY79sPckz8zaSnmGlUdmifNq/EUF+edSNc1lXEnd8DNW65U3dIiIXKJERyafm/HOIUYu2YjWgffVwPry7Hj6eebDQ3eVdSRH14c5ZULSs4+sWEbmMy0wfGD9+PCaTicGDB9vLUlNTGTRoEMWKFaNIkSL07t2bmJgY5wUp4gIMw2DSst28tNCWxNzdOJIp99bPmyQm7jDM6vxfEnPLk/Dgr0piRMRpXCKRWbduHZ988gm1a9fOVD5kyBB++OEH5s+fz4oVKzh+/Di9evVyUpQizmexGry0aCuTlu0B4Jm2lXizZy083PPgr/Kun2HqbbbxMD5BcNcc6DRO+xmJiFPlqGvJarWyYsUK/vzzTw4dOkRKSgrFixenXr16tGvXjsjInC+6lZSUxL333sv06dN5/fXX7eXx8fHMmDGDuXPn0qZNGwBmzZpFtWrVWLNmDbfcckuO6xLJz1LNFp6dt5Fft8VgMsGrd9Tk/luiHF+xupJExIVlK5E5f/48EyZMYMqUKZw9e5a6desSERGBr68ve/fuZdGiRTzyyCN06NCBV155JUdJxqBBg+jatSvt2rXLlMhER0djNptp166dvaxq1aqUKVOG1atXX7WOtLQ00tLS7McJCQkAmM1mzLk4i+LivXLznnJzCnKbJJw389icjaw/FIenu4mJd9amU41wx3/W+CO4L3wEt2PrAbA0fgxrm9Hgfv1ZSQW5PfIjtYdrUXtcX3a/N9lKZCpXrkzTpk2ZPn067du3x/MKi1sdOnSIuXPn0q9fP1566SUeeeSR69533rx5bNiwgXXr1mU5d/LkSby8vAgODs5UHh4ezsmTJ696z3HjxjF27Ngs5UuWLMHPz++6MeXU0qVLc/2ecnMKWpvEpcHUne6cSDHh427wSJUMrIei+emQY+sNj99I/UPTcLMkk+7ux8Yyj3DS3AB+XZaj+xS09sjv1B6uRe1xdSkpKdm6LluJzJIlS6hWrdo1r4mKimLEiBEMGzaMw4cPX/eeR44c4dlnn2Xp0qX4+OTebrYjRoxg6NCh9uOEhAQiIyPp0KEDgYGBuVaP2Wxm6dKlV03sJO8VxDbZdyqZBz+L5kRKKmEB3szoX5+qJRy80J3FjNvvr+G+/2MArCXrYer1KfWDc9aNVRDbIz9Te7gWtcf1XexRuZ5sJTLXS2Iu5enpSYUKFa57XXR0NLGxsdSvX99eZrFYWLlyJR999BG//vor6enpxMXFZXoqExMTQ4kSJa56X29vb7y9va8YlyN+WBx1X7lxBaVNNhw+x0Oz13EuxUz5UH8+e7AxkSG5/1QxkywL3D2JW7uxuN3EgN6C0h4FhdrDtag9ri6735cbXkcmIyODTz75hD/++AOLxUKzZs0YNGhQtp+utG3bli1btmQqGzhwIFWrVuXFF18kMjIST09Pli9fTu/evQHYtWsXhw8fpmnTpjcatki+8PvOWJ6YE02q2UqdyGBmPdCIEH8Hzw7SAncikg/dcCLzzDPPsHv3bnr16oXZbObzzz9n/fr1fPXVV9l6f0BAADVr1sxU5u/vT7FixezlDz30EEOHDiUkJITAwECefvppmjZtqhlLUqAtiD7Ki9/+i8Vq0LJycabcVx8/LweuXalZSSKSj2X7X8eFCxfSs2dP+/GSJUvYtWsX7u62Rbg6duyY6wnGe++9h5ubG7179yYtLY2OHTvy8ccf52odIq7k+03HGDZ/MwC96pfird618XTkGjFX6Eqi3VitDSMi+Ua2E5mZM2fy2Wef8fHHHxMREUH9+vV5/PHH6d27N2azmenTp9OoUaObCuaPP/7IdOzj48PkyZOZPHnyTd1XJD/YfCSOFxb8C8ADt5ZldPfqmEwmx1V4aVeSdxD0UFeSiOQ/2f6v3g8//MDdd99Nq1at+PDDD5k2bRqBgYG89NJLjBo1isjISObOnevIWEUKrJiEVB79Yj1pGVbaVA1jVDcHJjEWMyx5Gb7qZ0tiIurD4yuVxIhIvpSjjve77rqLjh078sILL9CxY0emTp3KhAkTHBWbSKGQarbw6BfRxCSkUSmsCO/3q4u7m4OSGHUliUgBk+MRhMHBwUybNo2VK1fSv39/OnXqxGuvvZara8GIFBaGYTD823/ZfCSOYD9PPh3QkAAfB03FVFeSiBRA2e5aOnz4MH379qVWrVrce++9VKpUiejoaPz8/KhTpw4///yzI+MUKZCmrtjPok3H8XAz8fG99Ykq5p/7lagrSUQKsGwnMv3798fNzY133nmHsLAwHnvsMby8vBg7diyLFi1i3Lhx9O3b15GxihQoy7bH8PavOwEYfXsNbq0QmvuVxB2GWZ3h7w9tx7c8CQ/+qqnVIlJgZLtraf369WzevJkKFSrQsWNHypUrZz9XrVo1Vq5cybRp0xwSpEhBszsmkWfnbcQw4L5byjhmF2t1JYlIIZDtRKZBgwa88sorDBgwgGXLllGrVq0s1zz66KO5GpxIQXQuOZ2HP1tPcrqFpuWLMbp7jdytwGKG5WP/ewqjBe5EpADLdtfS559/TlpaGkOGDOHYsWN88sknjoxLpEAyW6w8MSeaw2dTKBPix8f31s/dBe/ijmTuSmryhLqSRKRAy/YTmaioKBYsWODIWEQKvLE/bGPN/rMU8fbg0wENKZqb+ydl6UqaDNW65979RURcULYSmeTkZPz9sz+bIqfXixQGX6w5xJdrDmMywfv96lI5PCB3bqyuJBEpxLL1TLtixYqMHz+eEydOXPUawzBYunQpnTt35oMPPsi1AEUKgr/3nmbM4m0AvNCxKm2rhefOjdWVJCKFXLaeyPzxxx+MHDmSMWPGUKdOHRo2bEhERAQ+Pj6cO3eO7du3s3r1ajw8PBgxYgSPPfaYo+MWyTcOnUnmybkbsFgNetYrxeMty+fOjdWVJCKSvUSmSpUqfPvttxw+fJj58+fz559/8vfff3P+/HlCQ0OpV68e06dPp3PnzvbdsEUEElPNPPTZeuJSzNSJDGZcr1o3v4eSupJEROxytEVBmTJleO6553juueccFY9IgWGxGjw7bxN7Y5MID/Rm+v0N8PG8yUQ/7ggsGPjfXklNnoD2r2qvJBEptHK815KIZM/bv+7kt52xeHu4Mb1/Q8ICb3I/sl2/wMLH1JUkInIJJTIiDvDdhqN8smI/AG/3qU3t0sE3fjN1JYmIXJUSGZFctvHwOYZ/twWAQa0rcEfdUjd+M3UliYhckxIZkVx0Iv48j34RTXqGlfbVw3mufZUbv5m6kkRErkuJjEguOZ9u4dHPozmVmEaV8ADeu6subm43MEMpS1dSPbhztrqSRESuIMebvJQtW5ZXX32Vw4cPOyIekXzJMAxe+PZfthyLJ8Tfi08HNKSI9w38P+HsAZjVRQvciYhkU44TmcGDB/Pdd99Rvnx52rdvz7x580hLS3NEbCL5xuTf9/LD5uN4uJn4+N76RIb45ewGhgEbPoepzeHoWltX0l1fQufx4OHtmKBFRAqAG0pkNm3axNq1a6lWrRpPP/00JUuW5KmnnmLDhg2OiFHEpf267STvLtkNwKt31OSW8sVydoOkUzDvXlj8NKQnQZlb4fGVGg8jIpINOU5kLqpfvz4ffPABx48fZ/To0Xz66ac0atSIunXrMnPmTAzDyM04RVzSjhMJDPl6EwADmkZxT5MyObvBzp9gSlPY9SO4eUK7sfDA/9SVJCKSTTc82NdsNrNw4UJmzZrF0qVLueWWW3jooYc4evQoI0eOZNmyZcydOzc3YxVxKWeS0nj4s/WkpFtoVrEYo7pVz/6b0xLh15G27iSAsOrQaxqUqOWYYEVECqgcJzIbNmxg1qxZfPXVV7i5udG/f3/ee+89qlatar+mZ8+eNGrUKFcDFXEl6RlWnvhyA8fizlO2mB+T76mPh3s2H3AeXmObVn3uIGCCW5+C1i+D502u/CsiUgjlOJFp1KgR7du3Z8qUKfTo0QNPT88s15QrV45+/frlSoAirsYwDEYv3srag2cJ8Pbg0wENCfbLxgJ1GenwxzhYNQkMKwRFQs+pULa5w2MWESmocpzI7N+/n6ioqGte4+/vz6xZs244KBFX9tnfB/lq7RFMJvjg7npUDAu4/ptid8B3j8BJ24q/1LnHNiPJJ8ixwYqIFHA5TmRiY2M5efIkTZo0yVT+zz//4O7uTsOGDXMtOBFX89ee07z24w4ARnSuSuuqYdd+g9UK/0yBZWPBkga+IdB9ElS/w/HBiogUAjmetTRo0CCOHDmSpfzYsWMMGjQoV4IScUUHTifz5JxoLFaDXvVL8cht5a/9hrgj8MUdtkG9ljSo1AGeXK0kRkQkF+X4icz27dupX79+lvJ69eqxffv2XAlKxNUkpJp5+LN1JKRmUK9MMG/2rIXJdJXtBwwD/v0Gfnoe0uLB0w86vgENBsLV3iMiIjckx4mMt7c3MTExlC+f+X+jJ06cwMNDWzdJwWOxGjw9dyP7TiVTMsiHT+5vgI+n+5UvTjkL/xsC2xfZjks1tE2rLlYhz+IVESlMcty11KFDB0aMGEF8fLy9LC4ujpEjR9K+fftcDU7EFYz/eQcrdp/Cx9ON6f0bEhZwlWnSe5fBx01tSYybB7R+ybZPkpIYERGHyfEjlHfffZcWLVoQFRVFvXr1ANi0aRPh4eF88cUXuR6giDMtiD7K9D8PAPDunXWoWeoKs4zSU2DpK7Buuu04tDL0/ARKZe2CFRGR3JXjRKZUqVL8+++/zJkzh82bN+Pr68vAgQO5++67r7imjEh+FX3oLCO/s02XfqZNRbrVjsh60dFoWPgonNlrO278GLQbA1453DRSRERuyA0NavH39+fRRx/N7VhEXMbxuPM89sUG0i1WOtYIZ3C7ypkvsJjhzwmw4m0wLBBQEnp8DBXaOCdgEZFC6oZH527fvp3Dhw+Tnp6eqfz222+/6aBEnCklPYNHPl/P6aQ0qpYIYGLfuri5XTLb6PRe21OYY9G245q9ocu74BfinIBFRAqxG1rZt2fPnmzZsgWTyWTf5friVFSLxZK7EYrkIcMweH7+v2w7nkAxfy8+HdAQf2+Piydh/Qz49WXIOG9blbfrRKjVx7lBi4gUYjmetfTss89Srlw5YmNj8fPzY9u2baxcuZKGDRvyxx9/OCBEkbzzwfK9/LjlBJ7uJqbe34DSRS+MdUk4AXP6wI/P2ZKYci3hidVKYkREnCzHT2RWr17Nb7/9RmhoKG5ubri5udG8eXPGjRvHM888w8aNGx0Rp4jD/bzlBO8t2w3A6z1q0qjsha6ibYvgf4Ph/Dnw8IF2Y6Hxo+CW4/8HiIhILstxImOxWAgIsG2SFxoayvHjx6lSpQpRUVHs2rUr1wMUyQvbjscz9JvNAAxsVpa7GpWB1Hj46QX4d57topJ1oNd0KF7FiZGKiMilcpzI1KxZk82bN1OuXDmaNGnC22+/jZeXF9OmTcuy2q9IfnA6KY1HP4/mvNnCbZVCealLNTiwEhY+AQlHweQGtz0HLV4ADy9nhysiIpfIcSLz8ssvk5ycDMCrr75Kt27duO222yhWrBhff/11rgco4khpGRYe/yKaY3HnKR/qz0d3Vsdj2ShY/ZHtgqLlbFsMRDZ2bqAiInJFOU5kOnbsaP+6YsWK7Ny5k7Nnz1K0aNGrb6In4oIMw2DUoq2sP3SOAB8PPuviQ9AX7eHUDtsFDQZCh9fBu4hzAxURkavK0WhFs9mMh4cHW7duzVQeEhKiJEbynZmrDvLN+qN4mKx8X2ctkQu62ZIY/zC45xvoPklJjIiIi8vRExlPT0/KlCmjtWIk31ux+xRv/LidSFMM88M+o8TmTbYTVbtB9/fBP9Sp8YmISPbkeP7oSy+9xMiRIzl79qwj4hFxuH2nknhqbjR93H5nqe9ISsRvAq8A6DEF7vpSSYyISD6S4zEyH330EXv37iUiIoKoqCj8/f0znd+wYUOuBSeS2+JTzDw/axkTLR/Q3jMarECZW6HnVCga5ezwREQkh3KcyPTo0cMBYYg4XobFyoyZk5mWPIFQ9wQMdy9MbV6Gpk+Bm7uzwxMRkRuQ40Rm9OjRjohDxLHSEvl3+pMMPb0YTJAaUhWfvjOgRE1nRyYiIjfhhne/Fsk3Dq8h6asHqX/+GFbDxMEqD1K+7zjw8HZ2ZCIicpNynMi4ubldc6q1ZjSJy8hIhz/GYayaRBHDylEjlNW13+DO3v2cHZmIiOSSHCcyCxcuzHRsNpvZuHEjn332GWPHjs21wERuSuwO+O4ROLkFEzA/owVrKr/AOz2bOzsyERHJRTlOZO64444sZX369KFGjRp8/fXXPPTQQ7kSmMgNMay4/TMFfn8dLGnEmQJ5Me0hjpZoy4J+t+LmpoUbRUQKklwbI3PLLbfw6KOP5tbtRHIu/ii37n0L9022LQb+9WvCQ2cHYBQJZ3H/hvh6aWaSiEhBkyuJzPnz5/nggw8oVapUbtxOJOd2/IDHoicpnpaA4enHsshneWR7Tbzc3fnq/gZEBPs6O0IREXGAHCcyl28OaRgGiYmJ+Pn58eWXX+ZqcCLXZcmA5WPh7w8wAWf9KrDhlg955Kc4AN7sVYsGUUWdGqKIiDhOjhOZ9957L1Mi4+bmRvHixWnSpAlFi+oXhuShpFhY8CAc/BMAS5MnmXemIR8sTQDgkdvK0adBaWdGKCIiDpbjROaBBx7ItcqnTJnClClTOHjwIAA1atTglVdeoXPnzgCkpqby3HPPMW/ePNLS0ujYsSMff/wx4eHhuRaD5FOH/4H5AyDxBHgVgTsmE1OyHdMm/UGq2UqrKsUZ3rmas6MUEREHy/GmkbNmzWL+/PlZyufPn89nn32Wo3uVLl2a8ePHEx0dzfr162nTpg133HEH27ZtA2DIkCH88MMPzJ8/nxUrVnD8+HF69eqV05ClIDEMWDMVZnexJTGhVeCR30mt3J0nv9pEXLqJ8qH+fHB3Pdw1Q0lEpMDLcSIzbtw4QkOz7g4cFhbGm2++maN7de/enS5dulCpUiUqV67MG2+8QZEiRVizZg3x8fHMmDGDiRMn0qZNGxo0aMCsWbP4+++/WbNmTU7DloIgLQm+fRh+eRGsGVCjFzzyGxSvzPifd7LpSDx+7gaf3FeXQB9PZ0crIiJ5IMddS4cPH6ZcuXJZyqOiojh8+PANB2KxWJg/fz7Jyck0bdqU6OhozGYz7dq1s19TtWpVypQpw+rVq7nllluueJ+0tDTS0tLsxwkJtvESZrMZs9l8w/Fd7uK9cvOecg1n9uDx7UBMp3ZiuHlgbTsWa6NHwWTiz+0nmP33QQDuq2SlVKCX2sUF6O+Ia1F7uBa1x/Vl93uT40QmLCyMf//9l7Jly2Yq37x5M8WKFcvp7diyZQtNmzYlNTWVIkWKsHDhQqpXr86mTZvw8vIiODg40/Xh4eGcPHnyqvcbN27cFVcYXrJkCX5+fjmO73qWLl2a6/eUzErGraPeoemYrKmkegSzrtxTnD0dCT//TJIZ3trsDpi4rYSVGkUNtYmLUXu4FrWHa1F7XF1KSkq2rstxInP33XfzzDPPEBAQQIsWLQBYsWIFzz77LP365XwPmypVqrBp0ybi4+NZsGABAwYMYMWKFTm+z0UjRoxg6NCh9uOEhAQiIyPp0KEDgYGBN3zfy5nNZpYuXUr79u3x9FQ3hkNYM3D7/TXcD0y2HZa5Ffee07mliG2wt2EYPDl3EwnmU1Qs7s/7Axvw5x+/qU1chP6OuBa1h2tRe1zfxR6V68lxIvPaa69x8OBB2rZti4eH7e1Wq5X+/fvneIwMgJeXFxUrVgSgQYMGrFu3jvfff5+77rqL9PR04uLiMj2ViYmJoUSJEle9n7e3N97eWXc19vT0dMgPi6PuW+glxtimVh/6y3Z869O4tR2Dm/t/P7JfrT3Msp2n8HQ38f7d9Qjw8wHUJq5G7eFa1B6uRe1xddn9vuQ4kfHy8uLrr7/m9ddfZ9OmTfj6+lKrVi2ioqJyHOSVWK1W0tLSaNCgAZ6enixfvpzevXsDsGvXLg4fPkzTpk1zpS5xUYfXwDcDIOkkeAVAj8lQPfMeX/tPJfHqD9sBeL5jFWpEBKmvWUSkELrhLQoqVapEpUqVbqryESNG0LlzZ8qUKUNiYiJz587ljz/+4NdffyUoKIiHHnqIoUOHEhISQmBgIE8//TRNmza96kBfyecMA/6ZCktets1KKl4V7voSQjP/nJktVoZ8vYnzZgu3VijGw83LOylgERFxthwnMr1796Zx48a8+OKLmcrffvtt1q1bd8U1Zq4mNjaW/v37c+LECYKCgqhduza//vor7du3B2yrCLu5udG7d+9MC+JJAZSWBIufhm3f2Y5r9obuH4B3kSyXvr9sD5uPxhPk68mEvnW0o7WISCGW40Rm5cqVjBkzJkt5586dmTBhQo7uNWPGjGue9/HxYfLkyUyePDlH95V85tRu+OZ+OLUT3DygwxvQ5DEwZU1Q1h08y8d/7AXgzZ61KBmkzSBFRAqzHCcySUlJeHl5ZSn39PTM9ghjEbtti+D7QZCeBAEl4c7ZUObKXYcJqWYGz9uE1YDe9UvTtXbJPA1VRERcT45X9q1VqxZff/11lvJ58+ZRvXr1XAlKCgFLBvz6km2/pPQkKHsbPLbyqkkMwOjvt3Es7jyRIb6MuV0/ayIicgNPZEaNGkWvXr3Yt28fbdq0AWD58uV89dVXORofI4VYYgwsGAiHVtmOb30G2o4G96v/OC7efJyFG4/hZoJJd9UlQFsQiIgIN5DIdO/enUWLFvHmm2+yYMECfH19qV27NsuWLaNly5aOiFEKkkOrYf4Dl0yt/hiq337NtxyLO89LC7cA8FSbSjSICsmDQEVEJD+4oenXXbt2pWvXrlnKt27dSs2aNW86KCmADAPWTIGloy5Mra4Gd32RZWr15SxWg6FfbyIxNYO6kcE806ZiHgUsIiL5QY7HyFwuMTGRadOm0bhxY+rUqZMbMUlBk5ZkW6X31xG2JKZmH3hk+XWTGIBpK/fzz4Gz+Hm5M+muuni43/SPrIiIFCA3/Fth5cqV9O/fn5IlS/Luu+/Spk0b1qxZk5uxSUFwajdMb2NbH8bNAzq/Db0/BS//6751y9F4Ji7dBcCY7jUoG3r994iISOGSo66lkydPMnv2bGbMmEFCQgJ9+/YlLS2NRYsWacaSZJVlavVnUKZJtt56Pt3Cs19vxGwx6FyzBHc2LO3YWEVEJF/K9hOZ7t27U6VKFf79918mTZrE8ePH+fDDDx0Zm+RXFvNVplZnL4kBeP3H7ew/lUx4oDdv9qyF6QqL44mIiGT7iczPP//MM888wxNPPHHTeyxJAZZ4EuYPhMN/246bPQttXrnm1OrLLdsew5x/DgMw4c66FPXPugCjiIgI5OCJzF9//UViYiINGjSgSZMmfPTRR5w+fdqRsUl+c+hv+KSFLYnxCrBt+Nj+1RwlMacS03jx238BeLh5OZpXCnVUtCIiUgBkO5G55ZZbmD59OidOnOCxxx5j3rx5REREYLVaWbp0KYmJiY6MU1yZYcDqyTC7GyTFQFh1ePQPqNY9h7cxeGHBZs4kp1O1RADPd6rimHhFRKTAyPGsJX9/fx588EH++usvtmzZwnPPPcf48eMJCwvj9tuvvbCZFEBpibYF7n4dCYYFat0JDy+D0Jyv9/LFmkP8vusUXh5ufHB3Pbw93HM/XhERKVBualGOKlWq8Pbbb3P06FG++uqr3IpJ8otTu2xTq7cvAjdP6PwO9JqeranVl9sTk8gbP+4AYETnqlQOD8jlYEVEpCC6oZV9L+fu7k6PHj3o0aNHbtxO8oOt38Hipy9MrY6Avp9BZOMbulVahoVn520iLcNKi8rFeeDWsrkbq4iIFFi5kshIIWIxw9JXYM3HtuOyt0GfWVCk+A3fcuKS3Ww/kUCIvxfv9qmtqdYiIpJtSmQk+xJP2sbDHF5tO242GNqMytGspMv9ve800/7cD8D4XrUIC/S5+ThFRKTQUCIj2XNwFSwYaJuV5B0IPaZAtW43dcv4FDPPfbMZw4C7G0fSoUaJXApWREQKCyUycm2GAas/gqWjbbOSwqrb1ocpVuEmb2swctEWTsSnUi7Un1HdtMWFiIjknBIZubq0RNteSdu/tx3X6gvdJ93QrKTLfbfhGD/+ewIPNxOT7qqLn5d+FEVEJOf020OuLHYnfHM/nN5tm1rdaRw0ehhyYSDukbMpjF68DYDB7SpRJzL4pu8pIiKFkxIZyWrrt/D902BOvjC1+nOIbJQrt86wWBn89SaS0jJoVLYoT7TK+cJ5IiIiFymRkf9YzLBkFPwzxXZcrgX0nnlTU6sv9/Ef+4g+dI4Abw8m9q2Lu5umWouIyI1TIiM2CSdsU6uPrLEdNx8CrV++qanVl9tw+BzvL98DwGs9ahIZ4pdr9xYRkcJJiYzAwb9g/kBIjrVNre45Fap2zdUqktIyGPL1JixWg9vrRNCjXqlcvb+IiBROSmQKu20LYcFDF6ZW14C7vrjpqdVX8uoP2zh0JoVSwb681qNmrt9fREQKJyUyhdnJLbDwCVsSU7MP3P5BrkytvtwvW0/wzfqjmEwwoW8dgnw9c70OEREpnJTIFFYpZ2HePZBxHiq2g17TwM0916s5GZ/K8O+2APB4ywrcUr5YrtchIiKFl5uzAxAnsGTYBvbGHYai5aD3pw5JYqxWg2HzNxOXYqZmqUCGtKuc63WIiEjhpkSmMFo2Gg6sAE9/6DcXfIs6pJqZqw7w197T+Hi6Memuenh56MdNRERyl36zFDb/fmPbOwmg5xQId8weRztOJPD2L7sAeLlrdSqGFXFIPSIiUrgpkSlMjm+CxU/bvr5tGFS/wyHVpJotDJ63iXSLlXbVwri3SRmH1CMiIqJEprBIPg1f3wcZqVCpA7Qe6bCq3vplJ7tiEgkt4sX43rUx5cL+TCIiIleiRKYwsJhtg3vjj0BIBeg13SGDewFW7j7FrFUHAXinTx1Ci3g7pB4RERFQIlM4LBkFB/8EryIXBvcGO6Sas8npPDd/MwD9m0bRumqYQ+oRERG5SIlMQbfpq/82gez5CYRVdUg1hmEw/Nt/OZWYRsWwIozsUs0h9YiIiFxKiUxBdmwD/PCs7euWL0K1bg6r6ut1R1iyPQZPdxPv96uLj6djuq5EREQupUSmoEqKtQ3utaRB5c7QcrjDqjpwOpmxP2wH4PmOVagREeSwukRERC6lRKYguji4N+EYFKsEvT4BN8c0tdliZfC8jZw3W7i1QjEebl7eIfWIiIhciRKZgujXkXBoFXgF2Ab3+jjuCcn7y/aw+Wg8Qb6eTOhbBzc3TbUWEZG8o0SmoNn4JaydZvu693Qo7rj9jdYdPMvHf+wF4M2etSgZ5OuwukRERK5EiUxBcjQa/jfE9nWrkVCls8OqSkg1M3jeJqwG9K5fmq61SzqsLhERkatRIlNQJMZcGNybDlW7QYvnHVrd6O+3cSzuPJEhvoy53TH7NYmIiFyPEpmCICMdvukPicchtAr0nOqwwb0AizcfZ+HGY7iZYNJddQnw8XRYXSIiIteiRKYg+GU4HFkD3kG2wb3eAQ6r6ljceV5auAWAp9pUokFUiMPqEhERuR4lMvld9GewfgZgsg3uDa3osKosVoOhX28iMTWDupHBPNPGcXWJiIhkhxKZ/OzIWvhpmO3rNi9B5Y4OrW7ayv38c+Asfl7uTLqrLh7u+vERERHn0m+i/CrhBHx9v21wb7Xb4bZhDq1u67F4Ji7dBcCY7jUoG+rv0PpERESyQ4lMfpSRZhvcm3QSileDHlPA5LiF6M6nW3hm3kbMFoNONUpwZ8PSDqtLREQkJ5TI5Ec/vwBH19pW7O03B7yLOLS6N37azv5TyYQHejOuVy1MDkyaREREckKJTH6zfiZEz8Y2uHcmFKvg0OqW74jhyzWHAXj3zjoU9fdyaH0iIiI5oUQmPzm8Bn56wfZ1u9FQqZ1DqzuVmMYLC/4F4OHm5bitUnGH1iciIpJTSmTyi4TjtsG9VjPU6AnNBju0OsMweGHBZs4kp1O1RADPd6ri0PpERERuhBKZ/MCcatt+IDkWwmrAHZMdOrgX4Ms1h/h91ym8PNz44O56eHu4O7Q+ERGRG6FExtUZBvz0HByLBp9g2+BeL8dOfd4bm8jrP+4AYETnqlQOd9xKwSIiIjdDiYyrW/cpbPwSTG5w5ywIKefQ6tIyLDzz1SbSMqy0qFycB24t69D6REREboYSGVd2cJVtHyWAdmOhQhuHVzlxyW62n0ggxN+Ld/vU1lRrERFxaU5NZMaNG0ejRo0ICAggLCyMHj16sGvXrkzXpKamMmjQIIoVK0aRIkXo3bs3MTExToo4D8UfhfkDwJoBNfvArU87vMq/951m2p/7ARjfqxZhgT4Or1NERORmODWRWbFiBYMGDWLNmjUsXboUs9lMhw4dSE5Otl8zZMgQfvjhB+bPn8+KFSs4fvw4vXr1cmLUecA+uPcUlKgFt3/o8MG98SlmnvtmM4YBdzeOpEONEg6tT0REJDd4OLPyX375JdPx7NmzCQsLIzo6mhYtWhAfH8+MGTOYO3cubdrYulVmzZpFtWrVWLNmDbfccoszwnYsw4D/DYHjG8E3BO6aA15+Dq7SYOSiLZyIT6VcqD+julV3aH0iIiK5xamJzOXi4+MBCAkJASA6Ohqz2Uy7dv8t/Fa1alXKlCnD6tWrr5jIpKWlkZaWZj9OSEgAwGw2Yzabcy3Wi/fKzXsCuK2bjvvmuRgmNyw9P8UoEgG5XMflFm48zo//nsDDzcS7vWviaTJy/XPlBUe1idwYtYdrUXu4FrXH9WX3e+MyiYzVamXw4ME0a9aMmjVrAnDy5Em8vLwIDg7OdG14eDgnT5684n3GjRvH2LFjs5QvWbIEP7/cf7KxdOnSXLtXscQd3Lr3LQC2RvRj/44k2PFTrt3/Ss6kwlv/ugMmOpbK4Oi/qzj6r0OrdLjcbBO5eWoP16L2cC1qj6tLSUnJ1nUuk8gMGjSIrVu38tdff93UfUaMGMHQoUPtxwkJCURGRtKhQwcCAwNvNkw7s9nM0qVLad++PZ6enjd/w/ijeMwcggkr1pp9qHr7+1R18LgYi9XgvpnrSLPE0TAqmAkPNsLdLf/OUsr1NpGbovZwLWoP16L2uL6LPSrX4xKJzFNPPcX//vc/Vq5cSenSpe3lJUqUID09nbi4uExPZWJiYihR4sqDUb29vfH29s5S7unp6ZAflly5r/k8fDsAUs5AyTq43fERbp6O35xxxop9rD8Uh7+XO+/dVQ8f74KxIaSj2lpujNrDtag9XIva4+qy+31x6qwlwzB46qmnWLhwIb/99hvlymVe7K1BgwZ4enqyfPlye9muXbs4fPgwTZs2zetwHcMw4Idn4cRm8CtmG9zr6evwanecSGDikt0AjO5eg8gQxw4oFhERcQSnPpEZNGgQc+fO5fvvvycgIMA+7iUoKAhfX1+CgoJ46KGHGDp0KCEhIQQGBvL000/TtGnTgjNjac0U+PdrMLnDnZ9BcKTDq0zLsDDk602kW6y0qxbOnQ1LX/9NIiIiLsipicyUKVMAaNWqVabyWbNm8cADDwDw3nvv4ebmRu/evUlLS6Njx458/PHHeRypg+xfAUtetn3d8U0od1ueVDtx6W52nkykmL8X43vX0uq9IiKSbzk1kTEM47rX+Pj4MHnyZCZPnpwHEeWhc4dg/gNgWKDOPdDksTypdu2Bs0xbaVu9d1yvWoQWyTqeSEREJL/QXkvOkJ4CX98L589CRD3o9p7DV+4FSErL4Ln5mzAMuLNBaa3eKyIi+Z4SmbxmGLD4aTi5BfyLw11fgmfe7Gn02g/bOXL2PKWL+vJKd63eKyIi+Z8Smbz294ewdQG4edgG9wblzUDbZdtj+Hr9EUwmmHBnHQJ8NN1PRETyPyUyeWnfb7BstO3rTuOhbLM8qfZMUhrDv7Mt1/vIbeVpUr5YntQrIiLiaEpk8srZAzB/IBhWqHcfNHo4T6o1DIORC7dwOimdKuEBDG1fOU/qFRERyQtKZPJCejLMuxdS46BUA+gyIU8G9wJ8u+EYv26LwdPdxMS76uDj6Z4n9YqIiOQFJTKOZhjw/SCI3Qb+YXk6uPfouRTGLN4GwOB2lakREZQn9YqIiOQVJTKOtmoSbFsIbp5w1xcQGJEn1VqtBsPmbyYpLYMGUUV5vGWFPKlXREQkLymRcaQ9y2DZWNvXnd+CMnm3rcLMVQdYs/8sfl7uTOxbJ1/vai0iInI1SmQc5cw++PZBwID6A6Dhg3lW9e6YRN7+dRcAL3etTlQx/zyrW0REJC8pkXGEtKQLg3vjoXRj6PJOng3uTc+wMnjeJtIzrLSpGsbdjR2/CaWIiIizKJHJbYYBi56AUzugSAno+zl45N1+Ru8v3832EwkU9fPUhpAiIlLgKZHJbX9OgB2LLxncWzLPqo4+dJYpf+wD4M2etQgLyJvZUSIiIs6iRCY37V4Cv71u+7rrBIhsnGdVJ6dlMPSbzVgN6FWvFJ1r5V0CJSIi4ixKZHLLmX3w7cOAYRvY22BAnlb/xk87OHQmhYggH8bcUSNP6xYREXEWJTK5IS0Rvrob0uIh8hbo9FaeVv/7rljm/nMYgHfvrEOgNoQUEZFCQonMzTKssPBxOL0LAiIuDO71yrPqzyWn88IC24aQA5uV5daKoXlWt4iIiLMpkblJbn9NhJ3/A3cv2+DegPA8q9swDF5etJVTiWlUDCvCi52q5lndIiIirsDD2QHkZ+HxG3Hf+J7toNt7ULphntb//abj/LjlBB5uJt7rW1cbQoqISKGjJzI36sweGhycavu60SNQ7748rf543HlGfb8VgGfaVqJWaW0IKSIihY+eyNwIw8D9f8/iZj2PtUxT3DqNy9PqrVaD5xdsJjE1g7qRwTzZShtCiohI4aQnMjfCZMJy+2RiAmtj6TUT3PN2ltBnqw+yau8ZfDzdmNi3Dh7uakYRESmc9BvwRhUtx5oKw8C/eJ5Wuzc2kfE/7wTgpS7VKF+8SJ7WLyIi4kqUyOQjZouVIV9vJi3DSovKxbnvlihnhyQiIuJUSmTykQ9/28uWY/EE+XryTp/a2hBSREQKPSUy+cSmI3FM/n0vAK/3qEl4oDaEFBERUSKTD5xPtzD0601YrAa314mge50IZ4ckIiLiEpTI5APjf97B/tPJhAd689odNZ0djoiIiMtQIuPi/txzis9WHwLgnT51CPLThpAiIiIXKZFxYfEpZp6fb9sQsn/TKFpUztup3iIiIq5OiYwLG/X9Vk4mpFI+1J8Rnas5OxwRERGXo0TGRf2w+TiLNx/H3c3ExLvq4uulDSFFREQup0TGBZ2MT+XlRbYNIQe1rkjdyGDnBiQiIuKilMi4GMMweOHbf4k/b6Z26SCeblPR2SGJiIi4LCUyLubLNYdYufsU3h5uTOxbF09tCCkiInJV+i3pQvafSuKNn3YAMLxzVSqGaUNIERGRa1Ei4yIyLFaGfLOZVLOVZhWLMaBpWWeHJCIi4vKUyLiIj//Yx+YjcQT4ePBOnzq4uWlDSBERketRIuMCthyN54PlewB47Y6aRAT7OjkiERGR/EGJjJOlmi0M/nojGVaDLrVKcEddbQgpIiKSXUpknOztX3ax71QyxQO8eaNHLUwmdSmJiIhklxIZJ/p772lmrjoAwNt9alPU38vJEYmIiOQvSmScJP68mWHzNwNwT5MytK4S5uSIRERE8h8lMk4ydvE2jsenElXMj5e6aENIERGRG6FExgl+3nKC7zYew80EE/vWxd/bw9khiYiI5EtKZPJYbEIqIxduAeCJVhVoEFXUyRGJiIjkX0pk8pBhGLz47b+cSzFTIyKQZ9tWdnZIIiIi+ZoSmTz01doj/L7rFF4ebrx3V128PPTtFxERuRn6TZpHDp1J5vUftwPwQscqVA4PcHJEIiIi+Z8SmTxgsRoM/WYzKekWmpQL4cFm5ZwdkoiISIGgRCYPTF2xj+hD5yji7cGEvtoQUkREJLcokXGwbcfjmbRsNwCju1endFE/J0ckIiJScCiRcaBUs4WhX2/GbDHoUD2cPg1KOzskERGRAkWJjANNXLqbXTGJhBbxYlwvbQgpIiKS25TIOMia/WeY/ud+AMb3qk2xIt5OjkhERKTgUSLjAImpZp77ZjOGAXc1jKRd9XBnhyQiIlIgKZFxgFd/2M6xuPNEhvgyqnt1Z4cjIiJSYDk1kVm5ciXdu3cnIiICk8nEokWLMp03DINXXnmFkiVL4uvrS7t27dizZ49zgs2mJdtOMj/6KCYTTLizLkW0IaSIiIjDODWRSU5Opk6dOkyePPmK599++20++OADpk6dyj///IO/vz8dO3YkNTU1jyPNntNJaYz4zrYh5KMtytO4XIiTIxIRESnYnPq4oHPnznTu3PmK5wzDYNKkSbz88svccccdAHz++eeEh4ezaNEi+vXrl5ehXpdhGAz/dgtnktOpWiKAoe21IaSIiIijuWy/x4EDBzh58iTt2rWzlwUFBdGkSRNWr1591UQmLS2NtLQ0+3FCQgIAZrMZs9mca/FdvNfFP+dHH2PZjhg83U2807smboYVs9maa/XJ9V3eJuJcag/XovZwLWqP68vu98ZlE5mTJ08CEB6eecZPeHi4/dyVjBs3jrFjx2YpX7JkCX5+ub+q7tKlSzmTCm9tdgdMdCqVwf4Nf7I/12uS7Fq6dKmzQ5BLqD1ci9rDtag9ri4lJSVb17lsInOjRowYwdChQ+3HCQkJREZG0qFDBwIDA3OtHrPZzNKlS2nTth0Dv9hEmjWOhlHBvPNgI9y1l5JTXGyT9u3b4+np6exwCj21h2tRe7gWtcf1XexRuR6XTWRKlCgBQExMDCVLlrSXx8TEULdu3au+z9vbG2/vrIvPeXp6OuSH5Yt1x1h/KA5/L3cm9q2Hj7dXrtchOeOotpYbo/ZwLWoP16L2uLrsfl9cdh2ZcuXKUaJECZYvX24vS0hI4J9//qFp06ZOjOw/x5PhvWV7ARjVrTplimlDSBERkbzk1CcySUlJ7N2713584MABNm3aREhICGXKlGHw4MG8/vrrVKpUiXLlyjFq1CgiIiLo0aOH84K+IC3Dypd73TFbDNpVC+OuRpHODklERKTQcWois379elq3bm0/vji2ZcCAAcyePZsXXniB5ORkHn30UeLi4mjevDm//PILPj4+zgrZ7sPf9nEsxURRP0/G9aqtDSFFREScwKmJTKtWrTAM46rnTSYTr776Kq+++moeRpU9biYwYfD6HdUpHqANIUVERJzBZQf7urqh7SsRkrCHDtoQUkRExGlcdrBvfhDm6+wIRERECjclMiIiIpJvKZERERGRfEuJjIiIiORbSmREREQk31IiIyIiIvmWEhkRERHJt5TIiIiISL6lREZERETyLSUyIiIikm8pkREREZF8S4mMiIiI5FtKZERERCTfUiIjIiIi+ZaHswNwNMMwAEhISMjV+5rNZlJSUkhISMDT0zNX7y03Rm3iWtQerkXt4VrUHtd38ff2xd/jV1PgE5nExEQAIiMjnRyJiIiI5FRiYiJBQUFXPW8yrpfq5HNWq5Xjx48TEBCAyWTKtfsmJCQQGRnJkSNHCAwMzLX7yo1Tm7gWtYdrUXu4FrXH9RmGQWJiIhEREbi5XX0kTIF/IuPm5kbp0qUddv/AwED9ELoYtYlrUXu4FrWHa1F7XNu1nsRcpMG+IiIikm8pkREREZF8S4nMDfL29mb06NF4e3s7OxS5QG3iWtQerkXt4VrUHrmnwA/2FRERkYJLT2REREQk31IiIyIiIvmWEhkRERHJt5TIiIiISL6lROYGTZ48mbJly+Lj40OTJk1Yu3ats0MqlMaNG0ejRo0ICAggLCyMHj16sGvXLmeHJReMHz8ek8nE4MGDnR1KoXbs2DHuu+8+ihUrhq+vL7Vq1WL9+vXODqtQslgsjBo1inLlyuHr60uFChV47bXXrrufkFydEpkb8PXXXzN06FBGjx7Nhg0bqFOnDh07diQ2NtbZoRU6K1asYNCgQaxZs4alS5diNpvp0KEDycnJzg6t0Fu3bh2ffPIJtWvXdnYohdq5c+do1qwZnp6e/Pzzz2zfvp0JEyZQtGhRZ4dWKL311ltMmTKFjz76iB07dvDWW2/x9ttv8+GHHzo7tHxL069vQJMmTWjUqBEfffQRYNvPKTIykqeffprhw4c7ObrC7dSpU4SFhbFixQpatGjh7HAKraSkJOrXr8/HH3/M66+/Tt26dZk0aZKzwyqUhg8fzqpVq/jzzz+dHYoA3bp1Izw8nBkzZtjLevfuja+vL19++aUTI8u/9EQmh9LT04mOjqZdu3b2Mjc3N9q1a8fq1audGJkAxMfHAxASEuLkSAq3QYMG0bVr10x/T8Q5Fi9eTMOGDbnzzjsJCwujXr16TJ8+3dlhFVq33nory5cvZ/fu3QBs3ryZv/76i86dOzs5svyrwG8amdtOnz6NxWIhPDw8U3l4eDg7d+50UlQCtidjgwcPplmzZtSsWdPZ4RRa8+bNY8OGDaxbt87ZoQiwf/9+pkyZwtChQxk5ciTr1q3jmWeewcvLiwEDBjg7vEJn+PDhJCQkULVqVdzd3bFYLLzxxhvce++9zg4t31IiIwXGoEGD2Lp1K3/99ZezQym0jhw5wrPPPsvSpUvx8fFxdjiCLcFv2LAhb775JgD16tVj69atTJ06VYmME3zzzTfMmTOHuXPnUqNGDTZt2sTgwYOJiIhQe9wgJTI5FBoairu7OzExMZnKY2JiKFGihJOikqeeeor//e9/rFy5ktKlSzs7nEIrOjqa2NhY6tevby+zWCysXLmSjz76iLS0NNzd3Z0YYeFTsmRJqlevnqmsWrVqfPvtt06KqHB7/vnnGT58OP369QOgVq1aHDp0iHHjximRuUEaI5NDXl5eNGjQgOXLl9vLrFYry5cvp2nTpk6MrHAyDIOnnnqKhQsX8ttvv1GuXDlnh1SotW3bli1btrBp0yb7q2HDhtx7771s2rRJSYwTNGvWLMuSBLt37yYqKspJERVuKSkpuLll/tXr7u6O1Wp1UkT5n57I3IChQ4cyYMAAGjZsSOPGjZk0aRLJyckMHDjQ2aEVOoMGDWLu3Ll8//33BAQEcPLkSQCCgoLw9fV1cnSFT0BAQJbxSf7+/hQrVkzjlpxkyJAh3Hrrrbz55pv07duXtWvXMm3aNKZNm+bs0Aql7t2788Ybb1CmTBlq1KjBxo0bmThxIg8++KCzQ8u/DLkhH374oVGmTBnDy8vLaNy4sbFmzRpnh1QoAVd8zZo1y9mhyQUtW7Y0nn32WWeHUaj98MMPRs2aNQ1vb2+jatWqxrRp05wdUqGVkJBgPPvss0aZMmUMHx8fo3z58sZLL71kpKWlOTu0fEvryIiIiEi+pTEyIiIikm8pkREREZF8S4mMiIiI5FtKZERERCTfUiIjIiIi+ZYSGREREcm3lMiIiIhIvqVERkQKHZPJxKJFi5wdhojkAiUyIpKnHnjgAUwmU5ZXp06dnB2aiORD2mtJRPJcp06dmDVrVqYyb29vJ0UjIvmZnsiISJ7z9vamRIkSmV5FixYFbN0+U6ZMoXPnzvj6+lK+fHkWLFiQ6f1btmyhTZs2+Pr6UqxYMR599FGSkpIyXTNz5kxq1KiBt7c3JUuW5Kmnnsp0/vTp0/Ts2RM/Pz8qVarE4sWLHfuhRcQhlMiIiMsZNWoUvXv3ZvPmzdx7773069ePHTt2AJCcnEzHjh0pWrQo69atY/78+SxbtixTojJlyhQGDRrEo48+ypYtW1i8eDEVK1bMVMfYsWPp27cv//77L126dOHee+/l7Nmzefo5RSQXOHvXShEpXAYMGGC4u7sb/v7+mV5vvPGGYRi2Hc0ff/zxTO9p0qSJ8cQTTxiGYRjTpk0zihYtaiQlJdnP//jjj4abm5tx8uRJwzAMIyIiwnjppZeuGgNgvPzyy/bjpKQkAzB+/vnnXPucIpI3NEZGRPJc69atmTJlSqaykJAQ+9dNmzbNdK5p06Zs2rQJgB07dlCnTh38/f3t55s1a4bVamXXrl2YTCaOHz9O27ZtrxlD7dq17V/7+/sTGBhIbGzsjX4kEXESJTIikuf8/f2zdPXkFl9f32xd5+npmenYZDJhtVodEZKIOJDGyIiIy1mzZk2W42rVqgFQrVo1Nm/eTHJysv38qlWrcHNzo0qVKgQEBFC2bFmWL1+epzGLiHPoiYyI5Lm0tDROnjyZqczDw4PQ0FAA5s+fT8OGDWnevDlz5sxh7dq1zJgxA4B7772X0aNHM2DAAMaMGcOpU6d4+umnuf/++wkPDwdgzJgxPP7444SFhdG5c2cSExNZtWoVTz/9dN5+UBFxOCUyIpLnfvnlF0qWLJmprEqVKuzcuROwzSiaN28eTz75JCVLluSrr76ievXqAPj5+fHrr7/y7LPP0qhRI/z8/OjduzcTJ06032vAgAGkpqby3nvvMWzYMEJDQ+nTp0/efUARyTMmwzAMZwchInKRyWRi4cKF9OjRw9mhiEg+oDEyIiIikm8pkREREZF8S2NkRMSlqLdbRHJCT2REREQk31IiIyIiIvmWEhkRERHJt5TIiIiISL6lREZERETyLSUyIiIikm8pkREREZF8S4mMiIiI5FtKZERERCTf+j+acCTRzlY1bAAAAABJRU5ErkJggg==",
            "text/plain": [
              "<Figure size 640x480 with 1 Axes>"
            ]
          },
          "metadata": {},
          "output_type": "display_data"
        }
      ],
      "source": [
        "import torch\n",
        "import torch.nn as nn\n",
        "import torch.nn.functional as F\n",
        "import torchvision\n",
        "import torchvision.transforms as transforms\n",
        "from torch.utils.data import DataLoader\n",
        "import matplotlib.pyplot as plt\n",
        "import os\n",
        "\n",
        "\n",
        "class BasicBlock(nn.Module):\n",
        "    expansion = 1\n",
        "\n",
        "    def __init__(self, in_planes, planes, stride=1, downsample=None, norm_layer=None):\n",
        "        super(BasicBlock, self).__init__()\n",
        "        if norm_layer is None:\n",
        "            norm_layer = nn.BatchNorm2d\n",
        "\n",
        "        self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)\n",
        "        self.bn1 = norm_layer(planes)\n",
        "        self.relu = nn.ReLU(inplace=True)\n",
        "        self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)\n",
        "        self.bn2 = norm_layer(planes)\n",
        "\n",
        "        self.downsample = downsample\n",
        "        self.stride = stride\n",
        "\n",
        "    def forward(self, x):\n",
        "        identity = x\n",
        "\n",
        "        out = self.relu(self.bn1(self.conv1(x)))\n",
        "        out = self.bn2(self.conv2(out))\n",
        "\n",
        "        if self.downsample is not None:\n",
        "            identity = self.downsample(x)\n",
        "\n",
        "        out += identity\n",
        "        out = self.relu(out)\n",
        "        return out\n",
        "\n",
        "def conv1x1(in_planes, out_planes, stride=1):\n",
        "    return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)\n",
        "\n",
        "class ResNet(nn.Module):\n",
        "    def __init__(self, block, layers, num_classes=100, norm_layer=None):\n",
        "        super(ResNet, self).__init__()\n",
        "        if norm_layer is None:\n",
        "            norm_layer = nn.BatchNorm2d\n",
        "        self._norm_layer = norm_layer\n",
        "\n",
        "        self.inplanes = 64\n",
        "\n",
        "\n",
        "        self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)\n",
        "        self.bn1 = norm_layer(64)\n",
        "        self.relu = nn.ReLU(inplace=True)\n",
        "        self.maxpool = nn.Identity()\n",
        "\n",
        "        self.layer1 = self._make_layer(block, 64, layers[0])\n",
        "        self.layer2 = self._make_layer(block, 128, layers[1], stride=2)\n",
        "        self.layer3 = self._make_layer(block, 256, layers[2], stride=2)\n",
        "        self.layer4 = self._make_layer(block, 512, layers[3], stride=2)\n",
        "\n",
        "        self.avgpool = nn.AdaptiveAvgPool2d((1, 1))\n",
        "        self.fc = nn.Linear(512 * block.expansion, num_classes)\n",
        "\n",
        "        self._initialize_weights()\n",
        "\n",
        "    def _make_layer(self, block, planes, blocks, stride=1):\n",
        "        norm_layer = self._norm_layer\n",
        "        downsample = None\n",
        "\n",
        "        if stride != 1 or self.inplanes != planes * block.expansion:\n",
        "            downsample = nn.Sequential(\n",
        "                conv1x1(self.inplanes, planes * block.expansion, stride),\n",
        "                norm_layer(planes * block.expansion),\n",
        "            )\n",
        "\n",
        "        layers = []\n",
        "        layers.append(block(self.inplanes, planes, stride, downsample, norm_layer))\n",
        "        self.inplanes = planes * block.expansion\n",
        "        for _ in range(1, blocks):\n",
        "            layers.append(block(self.inplanes, planes, norm_layer=norm_layer))\n",
        "\n",
        "        return nn.Sequential(*layers)\n",
        "\n",
        "    def _initialize_weights(self):\n",
        "        for m in self.modules():\n",
        "            if isinstance(m, nn.Conv2d):\n",
        "                nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')\n",
        "            elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):\n",
        "                nn.init.constant_(m.weight, 1)\n",
        "                nn.init.constant_(m.bias, 0)\n",
        "\n",
        "    def forward(self, x):\n",
        "        x = self.relu(self.bn1(self.conv1(x)))\n",
        "        x = self.maxpool(x)\n",
        "\n",
        "        x = self.layer1(x)\n",
        "        x = self.layer2(x)\n",
        "        x = self.layer3(x)\n",
        "        x = self.layer4(x)\n",
        "\n",
        "        x = self.avgpool(x)\n",
        "        x = torch.flatten(x, 1)\n",
        "        x = self.fc(x)\n",
        "        return x\n",
        "\n",
        "def resnet18_cifar100():\n",
        "    return ResNet(BasicBlock, [2, 2, 2, 2], num_classes=100)\n",
        "\n",
        "\n",
        "transform_train = transforms.Compose([\n",
        "    transforms.RandomHorizontalFlip(),\n",
        "    transforms.RandomCrop(32, padding=4),\n",
        "    transforms.ToTensor(),\n",
        "    transforms.Normalize((0.5071, 0.4865, 0.4409), (0.2673, 0.2564, 0.2762)),\n",
        "])\n",
        "\n",
        "transform_test = transforms.Compose([\n",
        "    transforms.ToTensor(),\n",
        "    transforms.Normalize((0.5071, 0.4865, 0.4409), (0.2673, 0.2564, 0.2762)),\n",
        "])\n",
        "\n",
        "trainset = torchvision.datasets.CIFAR100(root='./data', train=True, download=True, transform=transform_train)\n",
        "testset = torchvision.datasets.CIFAR100(root='./data', train=False, download=True, transform=transform_test)\n",
        "trainloader = DataLoader(trainset, batch_size=128, shuffle=True, num_workers=2)\n",
        "testloader = DataLoader(testset, batch_size=100, shuffle=False, num_workers=2)\n",
        "\n",
        "\n",
        "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
        "model = resnet18_cifar100().to(device)\n",
        "criterion = nn.CrossEntropyLoss()\n",
        "optimizer = torch.optim.SGD(model.parameters(), lr=0.1, momentum=0.9, weight_decay=5e-4)\n",
        "scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=30, gamma=0.1)\n",
        "\n",
        "\n",
        "train_accs, test_accs = [], []\n",
        "\n",
        "for epoch in range(1, 11):\n",
        "    model.train()\n",
        "    correct, total = 0, 0\n",
        "    for inputs, targets in trainloader:\n",
        "        inputs, targets = inputs.to(device), targets.to(device)\n",
        "\n",
        "        outputs = model(inputs)\n",
        "        loss = criterion(outputs, targets)\n",
        "\n",
        "        optimizer.zero_grad()\n",
        "        loss.backward()\n",
        "        optimizer.step()\n",
        "\n",
        "        _, predicted = outputs.max(1)\n",
        "        total += targets.size(0)\n",
        "        correct += predicted.eq(targets).sum().item()\n",
        "    train_acc = 100. * correct / total\n",
        "    train_accs.append(train_acc)\n",
        "\n",
        "    model.eval()\n",
        "    correct, total = 0, 0\n",
        "    with torch.no_grad():\n",
        "        for inputs, targets in testloader:\n",
        "            inputs, targets = inputs.to(device), targets.to(device)\n",
        "            outputs = model(inputs)\n",
        "            _, predicted = outputs.max(1)\n",
        "            total += targets.size(0)\n",
        "            correct += predicted.eq(targets).sum().item()\n",
        "    test_acc = 100. * correct / total\n",
        "    test_accs.append(test_acc)\n",
        "\n",
        "    print(f\"Epoch {epoch:03d}: Train Acc: {train_acc:.2f}% | Test Acc: {test_acc:.2f}%\")\n",
        "    scheduler.step()\n",
        "\n",
        "\n",
        "plt.plot(train_accs, label='Train Acc')\n",
        "plt.plot(test_accs, label='Test Acc')\n",
        "plt.xlabel(\"Epoch\")\n",
        "plt.ylabel(\"Accuracy (%)\")\n",
        "plt.title(\"ResNet18 on CIFAR-100\")\n",
        "plt.legend()\n",
        "plt.grid(True)\n",
        "plt.show()\n"
      ]
    }
  ],
  "metadata": {
    "accelerator": "GPU",
    "colab": {
      "gpuType": "T4",
      "provenance": []
    },
    "kernelspec": {
      "display_name": "Python 3",
      "name": "python3"
    },
    "language_info": {
      "name": "python"
    }
  },
  "nbformat": 4,
  "nbformat_minor": 0
}
