{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 查看FashionMNIST原始数据格式"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:32.363026Z",
     "start_time": "2025-06-26T01:43:29.447990Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(<PIL.Image.Image image mode=L size=28x28 at 0x1735E804530>, 9)\n"
     ]
    },
    {
     "data": {
      "image/jpeg": "/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/wAALCAAcABwBAREA/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/9oACAEBAAA/APn+tbw1oNx4m8QWmkWx2yXD4LkZCADJJ+gFbviL4a63oc7COE3MW4hdn38duD976jNc9daDqllIsc9lKrMu4YGeMkdR7gj8KzcV7H8BtEvV16+1iWCeG1Wz8mOV02pIzupwCeuAp6Z98cZ90aIzLIlw0c0ZJ4KgjHoeOa+evjS9n/wnMcNxBPCYLKONFhA2FNzMpGenDcgd816V4K03wefC+m3NlpVhP+5QSXBiR5fMx825iMg5zwce3FdbOzTwgW90lu6uCm8eYrL02soIyCPQgggEdMGQ3cluiPNK0rJwrRQBNueuMkt+teNfGKxsdY8WWdxNqcNo66eieXMwVsb5DnH415Hp2rajpE5n02/urOUjBe3laMkehIPIrVm8eeLrhNknibVivoLtx/I1UPinxC3XXtUP1vJP8ay5JZJpGkldnduSzHJP41//2Q==",
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAAAAABXZoBIAAACS0lEQVR4AWKgA2BkYOD1ZGBgZAHZxcjIAKZBbBBm+quS8v3rj1N/GBiZGP8wMKNIMv91cnnCzuU65+X/vww8/76hSP5iMFVgZtpp2HXm8nUz02PHGUHGQTHjf9cugd//GE7f+cUo8ft0yDSEJCMDw/8TCgyMf34x/Ph3/vYfT0VphLH/GRgY3kt+Z2fl+cH5z8aSSWwHqmsZuJiZvn18p/CPkYnr7z9ZBiaofQwMjMwMPFI/frH++sr/j537K9sldhOE5H9mhnBJJg4Gbtlf7L//cQhvusaCkGT5xXDlBxsXl6rSD2Yunr9PoraeYAGZx8T4+x/DHwaGbV+/s/1/zczxm+H3P2a9jwxMDMz///z6+Y+BwW7ime9v//z78/XrXw6GbwxsX4NAYc3AICSlJhmk/oPpN+czVjbhX1zHeOz+fWR9qcnIYNkkKvCX+cMfrl+M36+HneEVVGC4x/v5GycPHxcj83GpP3+/MTB/Z2DgF0lwy3z24/49VeFfrLxsf+UBY0xqv8vDw87Ayv/4mSiTRACHIrexMdMvJjYGRlYLlpeP+X485mHje/eQ5/uPP+svKwj9+vD77y/Wf4xsaixP/z/mFvnw5jULOysHL9Mbza+P37O/+f3nN6fERwOWC+sTn937wcPGwcb88+//by/+/WX5wfPrw4fffxRfMjIweBWLv/7wl5mNhZnxPysrGysjA+NLBrZ/EpfCGJn+MTA4tYnxMzGz/GV8+f/pvy/MDP9/f2Paff0YJBAYGBg0RN/LPPx1Fx5HFDIAaCTYdiCc4RIAAAAASUVORK5CYII=",
      "text/plain": [
       "<PIL.Image.Image image mode=L size=28x28>"
      ]
     },
     "execution_count": 1,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import torch\n",
    "import torchvision\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "from torchvision import datasets, transforms\n",
    "from deeplearning_func import EarlyStopping, ModelSaver,train_classification_model,plot_learning_curves\n",
    "from deeplearning_func import evaluate_classification_model as evaluate_model\n",
    "# 加载Fashion MNIST数据集，张量就是和numpy数组一样\n",
    "transform = transforms.Compose([])\n",
    "train_dataset = datasets.FashionMNIST(root='./data', train=True, download=True, transform=transform)\n",
    "test_dataset = datasets.FashionMNIST(root='./data', train=False, download=True, transform=transform)\n",
    "print(train_dataset[0])\n",
    "train_dataset[0][0]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 加载数据并处理为tensor"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:32.407799Z",
     "start_time": "2025-06-26T01:43:32.363026Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "训练集形状: (60000, 28, 28)\n",
      "训练集标签数量: 60000\n",
      "测试集形状: (10000, 28, 28)\n",
      "测试集标签数量: 10000\n"
     ]
    }
   ],
   "source": [
    "# 加载Fashion MNIST数据集，张量就是和numpy数组一样\n",
    "transform = transforms.Compose([\n",
    "    transforms.ToTensor(),\n",
    "    transforms.Normalize((0.286,), (0.353,))  \n",
    "])\n",
    "train_dataset = datasets.FashionMNIST(root='./data', train=True, download=True, transform=transform)\n",
    "test_dataset = datasets.FashionMNIST(root='./data', train=False, download=True, transform=transform)\n",
    "\n",
    "# 获取图像和标签\n",
    "# 注意：由于使用了transform，图像已经被转换为张量且标准化\n",
    "# 我们需要从dataset中提取原始图像用于显示\n",
    "train_images = train_dataset.data.numpy()\n",
    "train_labels = train_dataset.targets.numpy()\n",
    "test_images = test_dataset.data.numpy()\n",
    "test_labels = test_dataset.targets.numpy()\n",
    "\n",
    "# 定义类别名称\n",
    "class_names = ['T-shirt/top', '裤子', '套头衫', '连衣裙', '外套',\n",
    "               '凉鞋', '衬衫', '运动鞋', '包', '短靴']\n",
    "\n",
    "# 查看数据集基本信息\n",
    "print(f\"训练集形状: {train_images.shape}\")\n",
    "print(f\"训练集标签数量: {len(train_labels)}\")\n",
    "print(f\"测试集形状: {test_images.shape}\")\n",
    "print(f\"测试集标签数量: {len(test_labels)}\")\n",
    "\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 把数据集划分为训练集55000和验证集5000，并给DataLoader"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.144223Z",
     "start_time": "2025-06-26T01:43:33.135368Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "训练集大小: 55000\n",
      "验证集大小: 5000\n",
      "测试集大小: 10000\n",
      "批次大小: 64\n",
      "训练批次数: 860\n"
     ]
    }
   ],
   "source": [
    "# 从训练集中划分出验证集\n",
    "train_size = 55000\n",
    "val_size = 5000\n",
    "# 设置随机种子以确保每次得到相同的随机划分结果\n",
    "generator = torch.Generator().manual_seed(42)\n",
    "train_subset, val_subset = torch.utils.data.random_split(\n",
    "    train_dataset, \n",
    "    [train_size, val_size],\n",
    "    generator=generator #设置随机种子，确保每次得到相同的随机划分结果\n",
    ")\n",
    "\n",
    "# 创建数据加载器\n",
    "batch_size = 64\n",
    "train_loader = torch.utils.data.DataLoader(\n",
    "    train_subset,\n",
    "    batch_size=batch_size,\n",
    "    shuffle=True #打乱数据集，每次迭代时，数据集的顺序都会被打乱\n",
    ")\n",
    "\n",
    "val_loader = torch.utils.data.DataLoader(\n",
    "    val_subset,\n",
    "    batch_size=batch_size,\n",
    "    shuffle=False\n",
    ")\n",
    "\n",
    "test_loader = torch.utils.data.DataLoader(\n",
    "    test_dataset,\n",
    "    batch_size=batch_size,\n",
    "    shuffle=False\n",
    ")\n",
    "\n",
    "# 打印数据集大小信息\n",
    "print(f\"训练集大小: {len(train_subset)}\")\n",
    "print(f\"验证集大小: {len(val_subset)}\")\n",
    "print(f\"测试集大小: {len(test_dataset)}\")\n",
    "print(f\"批次大小: {batch_size}\")\n",
    "print(f\"训练批次数: {len(train_loader)}\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.148120Z",
     "start_time": "2025-06-26T01:43:33.145230Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "55040"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "64*860"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 搭建模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "torch.Size([20, 100])\n"
     ]
    }
   ],
   "source": [
    "#理解每个接口的方法，单独写例子\n",
    "import torch.nn as nn\n",
    "m=nn.BatchNorm1d(100)\n",
    "x=torch.randn(20,100)\n",
    "print(m(x).shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.152657Z",
     "start_time": "2025-06-26T01:43:33.148120Z"
    }
   },
   "outputs": [],
   "source": [
    "import torch.nn as nn\n",
    "import torch.nn.functional as F\n",
    "\n",
    "class NeuralNetwork(nn.Module):\n",
    "    def __init__(self, layers_num=2):\n",
    "        super().__init__()\n",
    "        self.flatten = nn.Flatten()\n",
    "        # 多加几层\n",
    "        self.linear_relu_stack = nn.Sequential(\n",
    "            nn.Linear(28 * 28, 100),\n",
    "            nn.ReLU(),\n",
    "        )\n",
    "        # 加19层\n",
    "        for i in range(1, layers_num):\n",
    "            self.linear_relu_stack.add_module(f\"Linear_{i}\", nn.Linear(100, 100))\n",
    "            self.linear_relu_stack.add_module(f\"BatchNorm_{i}\", nn.BatchNorm1d(100))\n",
    "            self.linear_relu_stack.add_module(f\"relu\", nn.ReLU())\n",
    "        # 输出层\n",
    "        self.linear_relu_stack.add_module(\"Output Layer\", nn.Linear(100, 10))\n",
    "        \n",
    "        # 初始化权重\n",
    "        self.init_weights()\n",
    "        \n",
    "    def init_weights(self):\n",
    "        \"\"\"使用 xavier 均匀分布来初始化全连接层的权重 W\"\"\"\n",
    "        # print('''初始化权重''')\n",
    "        for m in self.modules():\n",
    "            # print(m)\n",
    "            # print('-'*50)\n",
    "            if isinstance(m, nn.Linear):#判断m是否为全连接层\n",
    "                # https://pytorch.org/docs/stable/nn.init.html\n",
    "                nn.init.xavier_uniform_(m.weight) # xavier 均匀分布初始化权重\n",
    "                nn.init.zeros_(m.bias) # 全零初始化偏置项\n",
    "        # print('''初始化权重完成''')\n",
    "    def forward(self, x):\n",
    "        # x.shape [batch size, 1, 28, 28]\n",
    "        x = self.flatten(x)  \n",
    "        # 展平后 x.shape [batch size, 28 * 28]\n",
    "        logits = self.linear_relu_stack(x)\n",
    "        # logits.shape [batch size, 10]\n",
    "        return logits\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.185031Z",
     "start_time": "2025-06-26T01:43:33.152657Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "批次图像形状: torch.Size([64, 1, 28, 28])\n",
      "批次标签形状: torch.Size([64])\n",
      "----------------------------------------------------------------------------------------------------\n"
     ]
    }
   ],
   "source": [
    "# 实例化模型\n",
    "model = NeuralNetwork(layers_num=19)\n",
    "\n",
    "# 从train_loader获取第一个批次的数据\n",
    "dataiter = iter(train_loader)\n",
    "images, labels = next(dataiter)\n",
    "\n",
    "# 查看批次数据的形状\n",
    "print(\"批次图像形状:\", images.shape)\n",
    "print(\"批次标签形状:\", labels.shape)\n",
    "\n",
    "\n",
    "print('-'*100)\n",
    "# 进行前向传播\n",
    "with torch.no_grad():  # 不需要计算梯度\n",
    "    outputs = model(images)\n",
    "    \n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.203053Z",
     "start_time": "2025-06-26T01:43:33.199532Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "需要求梯度的参数总量: 264910\n",
      "----------------------------------------------------------------------------------------------------\n",
      "模型总参数量: 264910\n",
      "\n",
      "各层参数量明细:\n",
      "linear_relu_stack.0.weight: 78400 参数\n",
      "linear_relu_stack.0.bias: 100 参数\n",
      "linear_relu_stack.Linear_1.weight: 10000 参数\n",
      "linear_relu_stack.Linear_1.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_1.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_1.bias: 100 参数\n",
      "linear_relu_stack.Linear_2.weight: 10000 参数\n",
      "linear_relu_stack.Linear_2.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_2.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_2.bias: 100 参数\n",
      "linear_relu_stack.Linear_3.weight: 10000 参数\n",
      "linear_relu_stack.Linear_3.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_3.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_3.bias: 100 参数\n",
      "linear_relu_stack.Linear_4.weight: 10000 参数\n",
      "linear_relu_stack.Linear_4.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_4.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_4.bias: 100 参数\n",
      "linear_relu_stack.Linear_5.weight: 10000 参数\n",
      "linear_relu_stack.Linear_5.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_5.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_5.bias: 100 参数\n",
      "linear_relu_stack.Linear_6.weight: 10000 参数\n",
      "linear_relu_stack.Linear_6.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_6.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_6.bias: 100 参数\n",
      "linear_relu_stack.Linear_7.weight: 10000 参数\n",
      "linear_relu_stack.Linear_7.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_7.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_7.bias: 100 参数\n",
      "linear_relu_stack.Linear_8.weight: 10000 参数\n",
      "linear_relu_stack.Linear_8.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_8.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_8.bias: 100 参数\n",
      "linear_relu_stack.Linear_9.weight: 10000 参数\n",
      "linear_relu_stack.Linear_9.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_9.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_9.bias: 100 参数\n",
      "linear_relu_stack.Linear_10.weight: 10000 参数\n",
      "linear_relu_stack.Linear_10.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_10.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_10.bias: 100 参数\n",
      "linear_relu_stack.Linear_11.weight: 10000 参数\n",
      "linear_relu_stack.Linear_11.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_11.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_11.bias: 100 参数\n",
      "linear_relu_stack.Linear_12.weight: 10000 参数\n",
      "linear_relu_stack.Linear_12.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_12.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_12.bias: 100 参数\n",
      "linear_relu_stack.Linear_13.weight: 10000 参数\n",
      "linear_relu_stack.Linear_13.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_13.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_13.bias: 100 参数\n",
      "linear_relu_stack.Linear_14.weight: 10000 参数\n",
      "linear_relu_stack.Linear_14.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_14.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_14.bias: 100 参数\n",
      "linear_relu_stack.Linear_15.weight: 10000 参数\n",
      "linear_relu_stack.Linear_15.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_15.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_15.bias: 100 参数\n",
      "linear_relu_stack.Linear_16.weight: 10000 参数\n",
      "linear_relu_stack.Linear_16.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_16.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_16.bias: 100 参数\n",
      "linear_relu_stack.Linear_17.weight: 10000 参数\n",
      "linear_relu_stack.Linear_17.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_17.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_17.bias: 100 参数\n",
      "linear_relu_stack.Linear_18.weight: 10000 参数\n",
      "linear_relu_stack.Linear_18.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_18.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_18.bias: 100 参数\n",
      "linear_relu_stack.Output Layer.weight: 1000 参数\n",
      "linear_relu_stack.Output Layer.bias: 10 参数\n"
     ]
    }
   ],
   "source": [
    "# 统计需要求梯度的参数总量\n",
    "trainable_params = sum(p.numel() for p in model.parameters() if p.requires_grad)\n",
    "print(f\"需要求梯度的参数总量: {trainable_params}\")\n",
    "\n",
    "print('-'*100)\n",
    "\n",
    "# 计算模型的总参数量\n",
    "total_params = sum(p.numel() for p in model.parameters())\n",
    "print(f\"模型总参数量: {total_params}\")\n",
    "\n",
    "# 查看每层参数量明细\n",
    "print(\"\\n各层参数量明细:\")\n",
    "for name, param in model.named_parameters():\n",
    "    print(f\"{name}: {param.numel()} 参数\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.217395Z",
     "start_time": "2025-06-26T01:43:33.203561Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "OrderedDict([('linear_relu_stack.0.weight',\n",
       "              tensor([[-0.0364, -0.0258, -0.0469,  ...,  0.0750,  0.0509, -0.0021],\n",
       "                      [-0.0367,  0.0283, -0.0686,  ...,  0.0546,  0.0063,  0.0817],\n",
       "                      [ 0.0709,  0.0398, -0.0108,  ..., -0.0028,  0.0208, -0.0694],\n",
       "                      ...,\n",
       "                      [-0.0815, -0.0483,  0.0022,  ...,  0.0766,  0.0699,  0.0205],\n",
       "                      [ 0.0362,  0.0676,  0.0319,  ...,  0.0732, -0.0631,  0.0556],\n",
       "                      [-0.0304, -0.0332,  0.0401,  ...,  0.0791,  0.0579,  0.0618]])),\n",
       "             ('linear_relu_stack.0.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.Linear_1.weight',\n",
       "              tensor([[-0.0322, -0.0702, -0.0475,  ...,  0.1165,  0.0105,  0.1347],\n",
       "                      [ 0.1020,  0.0696,  0.1115,  ...,  0.0012, -0.0703, -0.0720],\n",
       "                      [ 0.1416, -0.1360, -0.0250,  ..., -0.0160,  0.0495, -0.0225],\n",
       "                      ...,\n",
       "                      [-0.0103,  0.0179, -0.1234,  ...,  0.0670,  0.1713, -0.1509],\n",
       "                      [ 0.1546, -0.1190, -0.0791,  ..., -0.0177, -0.1285,  0.1028],\n",
       "                      [ 0.1239, -0.1703, -0.0247,  ..., -0.1423,  0.1216,  0.1417]])),\n",
       "             ('linear_relu_stack.Linear_1.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_1.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_1.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_1.running_mean',\n",
       "              tensor([-0.0110,  0.0061, -0.0863,  0.0119, -0.0355, -0.0513,  0.0705,  0.0862,\n",
       "                      -0.0798,  0.1359,  0.0358,  0.0453,  0.0267, -0.0154, -0.0283, -0.0390,\n",
       "                      -0.0422,  0.1114, -0.0017,  0.0243,  0.0161,  0.0897, -0.0498, -0.0412,\n",
       "                       0.0316,  0.0391,  0.0361,  0.0446, -0.0055, -0.1831, -0.0272, -0.0181,\n",
       "                       0.0184, -0.0324,  0.1036, -0.0824, -0.0224,  0.1017,  0.0412, -0.0070,\n",
       "                       0.0670, -0.0746, -0.0690, -0.0199, -0.0128, -0.0117,  0.0029,  0.0755,\n",
       "                      -0.0153, -0.0101, -0.0305, -0.0076,  0.1579, -0.0386, -0.0525,  0.0918,\n",
       "                      -0.0037, -0.0529, -0.0405,  0.0030,  0.1041,  0.0284, -0.0142, -0.0521,\n",
       "                       0.0546, -0.0869, -0.0384, -0.0157,  0.0213,  0.0375,  0.0548,  0.0533,\n",
       "                      -0.0182,  0.0055,  0.0351, -0.0043, -0.0065,  0.0795,  0.0010,  0.0665,\n",
       "                      -0.0256,  0.0146,  0.0917, -0.0459, -0.0712, -0.0064, -0.0891,  0.0583,\n",
       "                       0.0651,  0.1284,  0.0284,  0.0333,  0.0784, -0.0961,  0.0799,  0.0911,\n",
       "                       0.0541,  0.0371, -0.0297, -0.0540])),\n",
       "             ('linear_relu_stack.BatchNorm_1.running_var',\n",
       "              tensor([0.9659, 0.9294, 0.9295, 0.9509, 0.9455, 0.9496, 0.9394, 0.9675, 0.9475,\n",
       "                      0.9544, 0.9257, 0.9491, 0.9239, 0.9517, 0.9243, 0.9306, 0.9423, 0.9665,\n",
       "                      0.9256, 0.9363, 0.9326, 0.9880, 0.9223, 0.9312, 0.9199, 0.9379, 0.9482,\n",
       "                      0.9142, 0.9219, 0.9975, 1.0063, 0.9377, 0.9177, 0.9324, 0.9829, 0.9343,\n",
       "                      0.9310, 0.9340, 0.9300, 0.9333, 0.9232, 0.9489, 0.9361, 0.9727, 0.9273,\n",
       "                      0.9500, 0.9503, 0.9481, 0.9484, 0.9354, 0.9473, 0.9301, 0.9954, 0.9497,\n",
       "                      0.9182, 0.9359, 0.9245, 0.9610, 0.9370, 0.9293, 0.9387, 0.9348, 0.9214,\n",
       "                      0.9599, 0.9391, 0.9449, 0.9388, 0.9469, 0.9271, 0.9347, 0.9173, 0.9321,\n",
       "                      0.9380, 0.9301, 0.9431, 0.9281, 0.9195, 0.9267, 0.9404, 0.9341, 0.9471,\n",
       "                      0.9327, 0.9526, 0.9347, 0.9335, 0.9166, 0.9282, 0.9243, 0.9411, 0.9417,\n",
       "                      0.9531, 0.9540, 0.9255, 0.9578, 0.9468, 0.9461, 0.9413, 0.9336, 0.9504,\n",
       "                      0.9404])),\n",
       "             ('linear_relu_stack.BatchNorm_1.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_2.weight',\n",
       "              tensor([[-0.0360, -0.1513,  0.0771,  ..., -0.0178,  0.0012, -0.0223],\n",
       "                      [ 0.1458, -0.0323, -0.1252,  ..., -0.0235,  0.0899,  0.0563],\n",
       "                      [ 0.1148, -0.0521, -0.1082,  ..., -0.0818, -0.0415, -0.1254],\n",
       "                      ...,\n",
       "                      [-0.0912, -0.0436,  0.0632,  ...,  0.0867,  0.0070, -0.0439],\n",
       "                      [-0.1559, -0.0573, -0.0892,  ..., -0.1000,  0.1194, -0.0540],\n",
       "                      [-0.0722,  0.0542, -0.1082,  ..., -0.0133,  0.0341,  0.0860]])),\n",
       "             ('linear_relu_stack.Linear_2.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_2.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_2.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_2.running_mean',\n",
       "              tensor([ 0.0108,  0.0055,  0.0904, -0.0259, -0.0173, -0.0329, -0.0069,  0.0038,\n",
       "                      -0.0509, -0.0305,  0.0380,  0.0236, -0.0131, -0.0400, -0.0467, -0.0380,\n",
       "                       0.0439, -0.0304, -0.0360,  0.0651, -0.0384,  0.0384, -0.0552,  0.0313,\n",
       "                       0.0215, -0.0410, -0.0142,  0.0430, -0.0103, -0.0142, -0.0406, -0.0589,\n",
       "                       0.0328, -0.0676,  0.0470, -0.0646,  0.0126,  0.0363,  0.0174, -0.0152,\n",
       "                       0.0073, -0.0155, -0.0168,  0.0048, -0.0193, -0.0151, -0.0553,  0.0758,\n",
       "                      -0.0181, -0.0048,  0.0598,  0.0237,  0.0146,  0.0202,  0.0363, -0.0254,\n",
       "                      -0.0572,  0.0087,  0.0080,  0.0703, -0.0841,  0.0138, -0.0035,  0.0646,\n",
       "                       0.0146,  0.0421,  0.0366,  0.0132, -0.0315,  0.0316, -0.0549,  0.0133,\n",
       "                       0.0437,  0.0336,  0.0396,  0.0529,  0.0017, -0.0145,  0.0405, -0.0122,\n",
       "                      -0.0206,  0.0311, -0.0568, -0.0009,  0.0374,  0.0288,  0.0157,  0.0138,\n",
       "                       0.0530,  0.0523,  0.0248, -0.0051,  0.0748,  0.0257, -0.0435,  0.0108,\n",
       "                      -0.0136, -0.0141, -0.0339, -0.0632])),\n",
       "             ('linear_relu_stack.BatchNorm_2.running_var',\n",
       "              tensor([0.9346, 0.9148, 0.9427, 0.9356, 0.9340, 0.9264, 0.9352, 0.9238, 0.9230,\n",
       "                      0.9185, 0.9241, 0.9336, 0.9697, 0.9499, 0.9274, 0.9280, 0.9360, 0.9319,\n",
       "                      0.9298, 0.9510, 0.9458, 0.9291, 0.9247, 0.9352, 0.9527, 0.9219, 0.9348,\n",
       "                      0.9430, 0.9247, 0.9405, 0.9199, 0.9285, 0.9238, 0.9446, 0.9321, 0.9399,\n",
       "                      0.9487, 0.9336, 0.9619, 0.9348, 0.9290, 0.9266, 0.9210, 0.9333, 0.9212,\n",
       "                      0.9363, 0.9428, 0.9255, 0.9224, 0.9263, 0.9234, 0.9690, 0.9228, 0.9371,\n",
       "                      0.9301, 0.9396, 0.9252, 0.9278, 0.9183, 0.9536, 0.9427, 0.9394, 0.9309,\n",
       "                      0.9419, 0.9623, 0.9340, 0.9162, 0.9166, 0.9263, 0.9260, 0.9328, 0.9157,\n",
       "                      0.9316, 0.9327, 0.9504, 0.9690, 0.9274, 0.9278, 0.9516, 0.9280, 0.9190,\n",
       "                      0.9283, 0.9383, 0.9226, 0.9381, 0.9291, 0.9522, 0.9397, 0.9246, 0.9284,\n",
       "                      0.9329, 0.9305, 0.9691, 0.9309, 0.9342, 0.9363, 0.9253, 0.9327, 0.9693,\n",
       "                      0.9289])),\n",
       "             ('linear_relu_stack.BatchNorm_2.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_3.weight',\n",
       "              tensor([[-0.1055, -0.0873, -0.0265,  ..., -0.0555, -0.0195, -0.0135],\n",
       "                      [ 0.1027,  0.1454, -0.0646,  ...,  0.1180, -0.0306,  0.0354],\n",
       "                      [-0.1319, -0.0298, -0.0044,  ..., -0.0708, -0.1457, -0.0478],\n",
       "                      ...,\n",
       "                      [-0.1681, -0.0411,  0.0684,  ..., -0.0430,  0.0461, -0.0599],\n",
       "                      [ 0.1724,  0.1532, -0.0363,  ..., -0.1305, -0.1709, -0.0402],\n",
       "                      [-0.0532, -0.0639, -0.1088,  ..., -0.0065,  0.0133,  0.0444]])),\n",
       "             ('linear_relu_stack.Linear_3.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_3.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_3.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_3.running_mean',\n",
       "              tensor([ 8.8476e-10, -4.0047e-09,  6.9849e-10, -6.5193e-10, -3.4925e-09,\n",
       "                       7.1013e-10, -1.3039e-09,  3.3528e-09,  1.7695e-09,  3.3528e-09,\n",
       "                       6.4028e-10, -3.4925e-09,  4.7265e-09,  1.9558e-09, -7.3574e-09,\n",
       "                      -2.5611e-09, -7.1246e-09, -7.1304e-10, -3.3178e-09,  3.9116e-09,\n",
       "                       2.5611e-09, -3.0966e-09, -2.6776e-10, -4.3772e-09,  1.3039e-09,\n",
       "                       4.0978e-09, -5.8208e-09,  5.0291e-09,  2.3283e-09, -1.3970e-09,\n",
       "                      -2.2119e-09,  1.4668e-09, -3.1665e-09,  4.8894e-09,  3.9581e-09,\n",
       "                       3.2131e-09,  5.3085e-09,  6.5193e-09,  7.4506e-10,  4.4005e-09,\n",
       "                       3.3993e-09, -3.7719e-09, -5.7276e-09,  9.3132e-10, -1.0245e-09,\n",
       "                       4.6566e-10,  4.4936e-09, -1.0245e-09,  3.9581e-09, -5.1223e-09,\n",
       "                       1.2107e-09,  8.9873e-09, -4.1910e-10,  2.9569e-09,  4.1444e-09,\n",
       "                       9.3132e-11, -1.6298e-09, -2.3283e-10,  1.6764e-09, -4.0978e-09,\n",
       "                       1.0710e-09,  4.9360e-09,  2.7008e-09, -1.6531e-09,  1.5832e-09,\n",
       "                       1.2107e-09, -1.1176e-09,  0.0000e+00,  2.0489e-09, -6.9849e-10,\n",
       "                      -6.1234e-09, -8.0094e-09, -3.8184e-09,  8.0094e-09, -4.7497e-09,\n",
       "                       4.5868e-09, -5.2620e-09,  1.8161e-09, -5.8673e-09, -9.3132e-11,\n",
       "                       1.8626e-09, -5.5879e-10, -1.6298e-09,  9.1502e-09, -2.7474e-09,\n",
       "                       3.2596e-09,  7.8231e-09,  5.4482e-09,  2.3516e-09, -4.9826e-09,\n",
       "                       7.9162e-10, -7.9162e-10,  1.6298e-09, -5.6811e-09, -8.8010e-09,\n",
       "                       4.0280e-09, -3.7253e-10, -2.8871e-09, -3.2596e-10, -3.7253e-10])),\n",
       "             ('linear_relu_stack.BatchNorm_3.running_var',\n",
       "              tensor([0.9884, 0.9893, 0.9945, 1.0030, 0.9540, 1.0183, 1.1118, 0.9789, 0.9879,\n",
       "                      0.9841, 1.0114, 0.9746, 1.0085, 0.9665, 0.9976, 1.1130, 0.9556, 1.0295,\n",
       "                      0.9935, 1.0032, 0.9604, 0.9660, 0.9938, 1.0062, 1.0566, 0.9644, 0.9760,\n",
       "                      0.9950, 0.9769, 1.0073, 1.0180, 1.0172, 1.0443, 0.9751, 0.9848, 0.9668,\n",
       "                      1.0139, 0.9808, 0.9988, 0.9956, 0.9904, 0.9666, 1.0054, 1.0172, 0.9827,\n",
       "                      0.9908, 0.9465, 0.9717, 1.0182, 1.0460, 0.9871, 0.9563, 1.0539, 0.9989,\n",
       "                      0.9451, 0.9596, 1.0050, 1.0010, 0.9830, 0.9818, 1.0142, 0.9605, 1.0139,\n",
       "                      0.9944, 0.9981, 0.9763, 0.9839, 0.9950, 1.0074, 0.9534, 0.9668, 1.0016,\n",
       "                      0.9907, 1.0078, 0.9703, 0.9702, 0.9618, 1.0274, 1.0713, 0.9620, 0.9888,\n",
       "                      1.0217, 0.9600, 1.0452, 1.0263, 0.9931, 0.9744, 1.0068, 0.9953, 1.0026,\n",
       "                      0.9985, 0.9790, 0.9960, 0.9387, 1.0684, 0.9308, 0.9549, 1.1175, 0.9946,\n",
       "                      0.9930])),\n",
       "             ('linear_relu_stack.BatchNorm_3.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_4.weight',\n",
       "              tensor([[-0.0597,  0.0125,  0.0754,  ...,  0.0718,  0.1574,  0.1361],\n",
       "                      [ 0.0525, -0.0035, -0.1189,  ..., -0.0130,  0.0756, -0.1711],\n",
       "                      [-0.0481,  0.0244, -0.0473,  ...,  0.0713, -0.0202, -0.1342],\n",
       "                      ...,\n",
       "                      [ 0.1494, -0.1509, -0.0767,  ..., -0.1357, -0.1183,  0.1151],\n",
       "                      [-0.0235,  0.1442,  0.0189,  ..., -0.0987, -0.1506, -0.1207],\n",
       "                      [ 0.0756, -0.1558, -0.0043,  ..., -0.0638,  0.1568,  0.1054]])),\n",
       "             ('linear_relu_stack.Linear_4.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_4.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_4.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_4.running_mean',\n",
       "              tensor([-4.2608e-09,  0.0000e+00,  2.2119e-09, -2.5611e-09, -4.0978e-09,\n",
       "                      -4.1910e-09,  2.2585e-09, -2.7940e-10,  1.2398e-09, -5.7626e-10,\n",
       "                      -4.0047e-09, -8.5565e-10,  3.7253e-09,  6.5193e-10, -3.8184e-09,\n",
       "                       3.1432e-09,  3.3062e-09, -1.8626e-10, -2.7008e-09, -1.0710e-09,\n",
       "                       4.1910e-10,  1.4901e-09,  3.0966e-09,  3.4925e-09, -4.6566e-10,\n",
       "                       7.4506e-10,  9.0804e-10,  7.9162e-10, -5.1223e-10,  8.7311e-10,\n",
       "                      -1.9558e-09,  5.1223e-09, -1.3737e-09,  3.3993e-09,  6.3446e-10,\n",
       "                      -1.5832e-09,  2.1420e-09, -1.1642e-09, -2.0722e-09,  3.2596e-09,\n",
       "                      -1.6298e-09, -1.8626e-10, -2.0955e-09,  4.8312e-09, -6.7987e-09,\n",
       "                      -2.2352e-09,  1.4901e-09,  1.0245e-09,  2.4680e-09, -2.1653e-09,\n",
       "                      -6.2864e-10,  3.4459e-09,  2.7940e-10, -2.5611e-10,  5.9255e-09,\n",
       "                      -5.1223e-10,  2.4331e-09,  3.2596e-10,  1.3039e-09, -4.6566e-10,\n",
       "                       7.9162e-10, -2.9802e-09,  3.0268e-10,  2.5146e-09, -2.0955e-10,\n",
       "                       2.7940e-10, -1.8859e-09,  3.9116e-09,  1.1176e-09, -3.2131e-09,\n",
       "                      -2.3283e-10,  3.6438e-09, -9.3132e-10, -5.1688e-09,  1.3039e-09,\n",
       "                      -4.6566e-09,  1.6589e-09, -1.8161e-09, -1.4901e-09,  2.7940e-10,\n",
       "                       1.4668e-09, -3.7253e-10,  4.6566e-11,  1.0710e-09, -2.3283e-10,\n",
       "                      -2.7940e-10, -1.9092e-09, -1.7695e-09, -8.3819e-10, -3.3062e-09,\n",
       "                      -2.1770e-09,  6.9849e-10,  2.3283e-10, -1.8626e-10,  1.3970e-09,\n",
       "                      -2.3982e-09,  4.6566e-10, -5.5879e-10,  3.3062e-09, -9.3132e-11])),\n",
       "             ('linear_relu_stack.BatchNorm_4.running_var',\n",
       "              tensor([0.9602, 1.0232, 0.9880, 1.0687, 1.0420, 1.0292, 1.0218, 0.9998, 0.9644,\n",
       "                      0.9638, 1.0306, 0.9792, 1.0889, 1.0591, 1.0242, 1.0249, 0.9995, 0.9750,\n",
       "                      0.9388, 0.9852, 1.0391, 1.0586, 0.9556, 1.0276, 0.9860, 1.0159, 0.9831,\n",
       "                      0.9761, 1.0269, 1.0240, 1.0227, 0.9688, 0.9602, 1.0329, 0.9411, 0.9876,\n",
       "                      1.0028, 0.9665, 0.9917, 0.9977, 0.9683, 0.9777, 0.9918, 1.0026, 1.0090,\n",
       "                      1.0409, 0.9832, 0.9889, 1.0243, 1.0572, 1.0391, 0.9926, 0.9960, 0.9762,\n",
       "                      0.9993, 0.9570, 0.9840, 0.9794, 1.0308, 1.0497, 0.9605, 0.9802, 0.9789,\n",
       "                      0.9949, 0.9912, 0.9765, 0.9962, 1.0129, 1.0028, 0.9653, 1.0100, 1.0089,\n",
       "                      0.9582, 0.9959, 0.9920, 1.0263, 0.9825, 0.9845, 1.0184, 0.9801, 0.9340,\n",
       "                      0.9856, 1.0100, 0.9774, 0.9986, 1.0050, 0.9879, 1.0110, 1.0160, 0.9614,\n",
       "                      0.9584, 0.9676, 1.0182, 0.9555, 0.9891, 0.9849, 0.9989, 0.9821, 0.9683,\n",
       "                      1.0402])),\n",
       "             ('linear_relu_stack.BatchNorm_4.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_5.weight',\n",
       "              tensor([[-0.0434, -0.0920,  0.1401,  ...,  0.1507,  0.0331, -0.0021],\n",
       "                      [ 0.0931,  0.1430, -0.0555,  ...,  0.0156, -0.1621,  0.0534],\n",
       "                      [-0.1173,  0.1539,  0.0979,  ...,  0.0605, -0.1551, -0.0953],\n",
       "                      ...,\n",
       "                      [ 0.0832, -0.0787, -0.0706,  ...,  0.1532, -0.1503, -0.0260],\n",
       "                      [-0.0116,  0.1212,  0.1160,  ..., -0.1553, -0.1138,  0.1071],\n",
       "                      [-0.0141,  0.1595,  0.0031,  ..., -0.1264,  0.0457,  0.0332]])),\n",
       "             ('linear_relu_stack.Linear_5.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_5.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_5.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_5.running_mean',\n",
       "              tensor([-1.3039e-09, -2.2352e-09,  4.1910e-10, -1.9092e-09,  3.5390e-09,\n",
       "                      -5.4715e-10,  1.1176e-09,  1.7229e-09,  3.3062e-09,  1.8626e-10,\n",
       "                      -2.7008e-09, -5.5879e-10,  9.3132e-11,  1.7695e-09,  1.2107e-09,\n",
       "                       2.4680e-09,  5.0291e-09,  3.7253e-09, -6.5193e-10, -2.2817e-09,\n",
       "                      -3.3528e-09, -1.4901e-09,  3.8650e-09,  1.7229e-09, -1.3039e-09,\n",
       "                      -8.3819e-10, -1.0943e-09, -3.1665e-09, -2.1886e-09,  5.5879e-10,\n",
       "                       2.1886e-09, -2.0023e-09,  6.5775e-10,  5.2154e-09,  4.1910e-09,\n",
       "                      -2.7008e-09,  1.6764e-09, -1.8626e-10,  2.3749e-09,  1.6065e-09,\n",
       "                       1.4436e-09, -2.4156e-09, -1.0477e-09, -6.6124e-09,  5.3085e-09,\n",
       "                      -2.1653e-09,  4.3074e-10,  2.5146e-09,  1.5832e-09, -1.8161e-09,\n",
       "                      -5.3551e-10, -9.3132e-11, -3.7253e-10,  9.3132e-10, -1.8626e-10,\n",
       "                       1.7695e-09,  1.5832e-09,  1.7695e-09,  2.5611e-09, -9.3132e-10,\n",
       "                      -2.1420e-09, -3.1665e-09,  9.7789e-10, -3.2596e-10, -2.0955e-09,\n",
       "                      -7.2178e-10, -2.3283e-10,  1.7695e-09, -1.3970e-09,  8.3819e-10,\n",
       "                       0.0000e+00, -2.7008e-09,  5.5879e-09,  8.3819e-10, -1.7695e-09,\n",
       "                       4.1910e-09,  4.1910e-10, -1.8859e-09, -2.1420e-09, -2.0955e-10,\n",
       "                       5.7044e-10,  3.5390e-09,  5.0990e-09,  4.6566e-11, -7.4506e-10,\n",
       "                       3.6322e-09,  2.4214e-09, -2.7940e-10, -3.9581e-10, -1.5832e-09,\n",
       "                      -3.0268e-09, -3.5856e-09, -4.6566e-10,  7.4506e-10, -2.1420e-09,\n",
       "                      -1.3970e-10, -1.7695e-09, -2.1420e-09, -1.3504e-09,  2.7940e-10])),\n",
       "             ('linear_relu_stack.BatchNorm_5.running_var',\n",
       "              tensor([1.0244, 0.9709, 0.9448, 0.9396, 1.0048, 1.0083, 0.9727, 0.9583, 1.1107,\n",
       "                      0.9717, 1.0625, 1.0333, 1.0614, 0.9731, 0.9405, 0.9809, 1.0597, 0.9840,\n",
       "                      0.9922, 0.9629, 1.0463, 1.0117, 0.9693, 0.9924, 1.0320, 0.9606, 1.0470,\n",
       "                      1.0264, 0.9820, 0.9889, 0.9737, 0.9822, 0.9623, 1.0322, 1.0498, 1.0404,\n",
       "                      0.9791, 0.9947, 0.9742, 0.9419, 1.0565, 1.0508, 1.0213, 1.0756, 1.0194,\n",
       "                      0.9443, 1.0301, 1.0183, 1.0568, 1.0577, 0.9837, 0.9712, 1.0235, 1.0032,\n",
       "                      1.0068, 0.9726, 0.9950, 0.9742, 0.9753, 1.0017, 1.0141, 1.0343, 1.0655,\n",
       "                      0.9519, 0.9973, 1.0542, 0.9553, 1.0532, 1.0603, 1.0053, 1.0383, 1.0125,\n",
       "                      1.0000, 0.9743, 1.0875, 0.9950, 0.9943, 0.9465, 1.0206, 0.9599, 1.0041,\n",
       "                      0.9893, 0.9772, 1.0056, 0.9614, 0.9760, 1.0848, 0.9759, 1.0221, 1.0231,\n",
       "                      1.0386, 1.0251, 0.9970, 0.9948, 1.0275, 1.0152, 1.0007, 1.0662, 0.9851,\n",
       "                      1.0496])),\n",
       "             ('linear_relu_stack.BatchNorm_5.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_6.weight',\n",
       "              tensor([[-0.0494,  0.1533, -0.0131,  ..., -0.0521,  0.1081, -0.1514],\n",
       "                      [-0.1403,  0.0521, -0.0485,  ...,  0.0050,  0.0642,  0.0018],\n",
       "                      [ 0.0286,  0.0099, -0.0103,  ..., -0.1315, -0.0854,  0.0106],\n",
       "                      ...,\n",
       "                      [-0.0467,  0.0719,  0.0656,  ...,  0.0878, -0.1328, -0.1605],\n",
       "                      [ 0.1060, -0.1577,  0.1340,  ...,  0.0844,  0.1663,  0.1253],\n",
       "                      [-0.1474, -0.0572,  0.1387,  ..., -0.0441, -0.0869,  0.1413]])),\n",
       "             ('linear_relu_stack.Linear_6.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_6.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_6.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_6.running_mean',\n",
       "              tensor([ 2.0722e-09,  3.3993e-09,  4.3307e-09, -2.6543e-09, -1.3504e-09,\n",
       "                      -1.2806e-09,  2.9337e-09, -1.5832e-09,  4.6566e-11,  3.7020e-09,\n",
       "                       1.8626e-10,  3.1432e-09, -6.1933e-09, -5.7276e-09, -1.2107e-09,\n",
       "                      -1.3039e-09,  1.2806e-09, -8.3819e-10, -1.3504e-09,  1.0477e-09,\n",
       "                       2.2119e-09, -3.7253e-09,  1.8626e-09,  1.5832e-09,  1.7695e-09,\n",
       "                      -1.6764e-09, -2.7940e-10,  1.7229e-09, -3.9116e-09, -1.1176e-09,\n",
       "                       8.6147e-10,  5.7742e-09,  3.7253e-10,  0.0000e+00, -1.1642e-10,\n",
       "                      -2.7008e-09,  1.9092e-09,  3.2596e-09, -3.8650e-09, -2.7008e-09,\n",
       "                       3.4808e-09,  3.5390e-09, -5.5879e-10,  1.0477e-09,  2.1420e-09,\n",
       "                       6.9849e-10,  9.3132e-11,  2.6776e-10, -1.4203e-09, -6.5193e-10,\n",
       "                      -2.2352e-09, -1.3039e-09, -1.0245e-09, -2.7940e-09, -5.8440e-09,\n",
       "                      -2.8173e-09, -8.3819e-10,  1.6298e-09, -2.8871e-09, -1.6298e-09,\n",
       "                       1.3039e-09,  6.0536e-10, -1.5832e-09, -4.2841e-09, -1.5367e-09,\n",
       "                       1.4901e-09,  1.2107e-09,  5.1223e-09,  2.6077e-09, -1.1409e-09,\n",
       "                       2.4680e-09,  2.3283e-09,  3.0734e-09, -4.1910e-10,  9.3132e-11,\n",
       "                       6.0536e-09, -1.7695e-09,  5.2154e-09, -5.6811e-09,  4.6566e-11,\n",
       "                       1.8161e-09, -3.1199e-09, -3.2596e-09, -1.1874e-09, -2.7474e-09,\n",
       "                       5.6345e-09,  9.3132e-10,  7.2178e-10, -4.2375e-09,  3.5914e-09,\n",
       "                       1.6764e-09,  3.5390e-09,  2.0023e-09, -1.9092e-09,  1.3504e-09,\n",
       "                       1.5832e-09, -3.7253e-10,  8.3819e-10, -1.1642e-09,  9.3132e-11])),\n",
       "             ('linear_relu_stack.BatchNorm_6.running_var',\n",
       "              tensor([0.9573, 1.0473, 0.9806, 1.0055, 0.9644, 0.9994, 1.0508, 1.0437, 1.0794,\n",
       "                      0.9577, 1.0407, 0.9534, 0.9838, 1.1073, 1.0452, 1.0029, 0.9472, 0.9868,\n",
       "                      1.0153, 0.9733, 0.9564, 1.1078, 1.0311, 0.9865, 1.1042, 1.0805, 1.0185,\n",
       "                      1.0531, 1.0033, 0.9835, 0.9630, 0.9990, 0.9717, 1.0211, 0.9808, 1.0591,\n",
       "                      0.9344, 0.9850, 1.0778, 0.9940, 0.9570, 0.9975, 0.9773, 0.9645, 0.9845,\n",
       "                      0.9644, 0.9770, 1.0087, 0.9844, 0.9804, 1.0264, 1.0787, 0.9968, 1.0467,\n",
       "                      0.9665, 1.0308, 0.9659, 1.0458, 0.9981, 1.0490, 1.0701, 0.9961, 0.9646,\n",
       "                      0.9934, 1.0980, 1.0491, 1.0045, 1.0951, 0.9840, 1.0098, 1.0467, 1.0150,\n",
       "                      1.0325, 0.9426, 1.0959, 1.0582, 0.9747, 1.0601, 0.9450, 0.9643, 0.9650,\n",
       "                      1.0298, 1.0352, 1.0526, 1.0423, 0.9998, 1.0223, 1.0163, 1.0467, 0.9755,\n",
       "                      1.0105, 0.9814, 0.9915, 0.9742, 0.9841, 1.0354, 1.0407, 0.9874, 1.0941,\n",
       "                      0.9727])),\n",
       "             ('linear_relu_stack.BatchNorm_6.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_7.weight',\n",
       "              tensor([[-0.0958, -0.0155,  0.1017,  ..., -0.1671, -0.1406,  0.1322],\n",
       "                      [ 0.0233, -0.0540,  0.0946,  ..., -0.0535,  0.0719, -0.0075],\n",
       "                      [ 0.0531, -0.0414,  0.0476,  ..., -0.0158, -0.1586, -0.1116],\n",
       "                      ...,\n",
       "                      [-0.0293, -0.1107,  0.1292,  ...,  0.1121, -0.0341,  0.0982],\n",
       "                      [-0.0130,  0.1431, -0.1127,  ..., -0.0205, -0.1519, -0.1632],\n",
       "                      [-0.1229, -0.1613, -0.0431,  ..., -0.1332,  0.0919,  0.1361]])),\n",
       "             ('linear_relu_stack.Linear_7.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_7.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_7.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_7.running_mean',\n",
       "              tensor([-2.3749e-09,  9.3132e-10, -8.3819e-10,  6.7521e-10, -1.6531e-09,\n",
       "                       2.9569e-09,  1.8626e-10,  1.2515e-09,  1.5600e-09, -1.0245e-09,\n",
       "                       3.0734e-09,  4.6100e-09,  6.0536e-10,  1.3970e-10, -1.1758e-09,\n",
       "                       8.8476e-10,  1.1176e-09,  3.4692e-09, -2.2352e-09, -6.0536e-10,\n",
       "                       2.4214e-09, -3.6554e-09,  4.1910e-10,  1.4901e-09, -3.5390e-09,\n",
       "                      -2.1071e-09,  2.6077e-09, -4.6566e-11,  6.1700e-10,  2.5146e-09,\n",
       "                       5.5879e-10,  3.0268e-09,  2.8405e-09, -1.8626e-10,  1.1118e-09,\n",
       "                       1.8626e-10,  2.0023e-09,  1.8626e-09,  4.9011e-09, -3.8184e-09,\n",
       "                      -6.4261e-09,  1.8626e-09,  5.5879e-10, -2.7008e-09, -5.5879e-10,\n",
       "                       2.6077e-09,  0.0000e+00,  1.1176e-09,  1.4436e-09,  3.4459e-09,\n",
       "                       2.7474e-09,  6.8452e-09,  9.7789e-10, -1.7462e-09,  1.5832e-09,\n",
       "                      -1.3039e-09, -7.2178e-10, -9.3132e-10,  3.1898e-09, -2.5844e-09,\n",
       "                      -2.4680e-09, -1.7695e-09, -4.5169e-09, -1.0245e-09, -2.0489e-09,\n",
       "                      -1.1176e-09, -4.3307e-09, -1.6531e-09,  3.3062e-09,  1.5134e-09,\n",
       "                       2.9802e-09, -3.1199e-09, -9.3132e-10, -2.0489e-09, -1.2107e-09,\n",
       "                      -3.5856e-09, -3.2596e-09,  8.3819e-10, -1.1409e-09,  1.5832e-09,\n",
       "                      -7.4506e-10,  1.2107e-09,  7.4506e-10,  2.0489e-09,  1.0943e-09,\n",
       "                       7.9162e-10, -1.7229e-09,  1.4901e-09,  3.2596e-10, -2.3283e-09,\n",
       "                      -4.4238e-10,  1.5832e-09, -2.3283e-10, -1.8626e-10, -9.7789e-10,\n",
       "                      -5.2969e-09,  2.5146e-09,  9.3132e-10,  2.4214e-09, -8.3819e-10])),\n",
       "             ('linear_relu_stack.BatchNorm_7.running_var',\n",
       "              tensor([0.9831, 1.0053, 0.9564, 1.0410, 1.0746, 1.0256, 0.9613, 0.9898, 1.0022,\n",
       "                      0.9884, 0.9856, 0.9542, 0.9944, 0.9871, 0.9786, 1.0113, 0.9425, 1.0043,\n",
       "                      1.0609, 0.9933, 0.9843, 1.0067, 1.0231, 0.9763, 0.9738, 0.9579, 1.0585,\n",
       "                      0.9898, 0.9395, 1.0153, 0.9819, 1.0240, 0.9591, 0.9936, 0.9732, 1.0559,\n",
       "                      0.9758, 1.0554, 0.9832, 1.0095, 1.0217, 0.9821, 0.9892, 0.9897, 1.0300,\n",
       "                      0.9679, 1.0029, 1.0285, 0.9624, 0.9982, 0.9880, 0.9510, 0.9507, 0.9855,\n",
       "                      0.9932, 0.9743, 0.9570, 1.0489, 0.9637, 0.9640, 0.9789, 1.0739, 1.0605,\n",
       "                      0.9696, 1.0366, 1.0260, 0.9906, 0.9833, 0.9568, 1.0614, 0.9585, 1.0260,\n",
       "                      1.0097, 0.9899, 1.0090, 0.9963, 0.9878, 1.0480, 0.9636, 1.0192, 0.9727,\n",
       "                      1.0197, 0.9813, 0.9809, 0.9809, 0.9901, 0.9966, 0.9923, 0.9687, 1.0143,\n",
       "                      0.9815, 1.0397, 1.1326, 1.0550, 0.9558, 0.9721, 1.0800, 0.9999, 0.9804,\n",
       "                      1.0174])),\n",
       "             ('linear_relu_stack.BatchNorm_7.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_8.weight',\n",
       "              tensor([[ 0.0614, -0.0695, -0.1442,  ...,  0.1379,  0.0928,  0.0498],\n",
       "                      [-0.1473, -0.0090, -0.0458,  ...,  0.1626, -0.1393,  0.0828],\n",
       "                      [-0.1166, -0.0721, -0.1713,  ..., -0.1700,  0.0920, -0.1208],\n",
       "                      ...,\n",
       "                      [-0.1395, -0.0055, -0.0868,  ...,  0.0378, -0.1123,  0.1151],\n",
       "                      [-0.0681, -0.0794,  0.1562,  ...,  0.0743, -0.1718,  0.1452],\n",
       "                      [ 0.1192,  0.1730,  0.1135,  ..., -0.1598,  0.0736,  0.1588]])),\n",
       "             ('linear_relu_stack.Linear_8.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_8.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_8.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_8.running_mean',\n",
       "              tensor([-1.9325e-09, -3.3295e-09,  5.5879e-10, -1.0245e-09, -2.3749e-09,\n",
       "                       3.5681e-09, -7.4506e-10, -8.3819e-10,  3.2596e-10,  1.7462e-09,\n",
       "                      -1.6764e-09,  5.1223e-09, -1.3388e-09, -1.6997e-09,  4.6566e-11,\n",
       "                       4.7497e-09, -3.5390e-09,  3.0734e-09,  5.2154e-09,  2.8813e-09,\n",
       "                       0.0000e+00,  2.2352e-09, -5.6927e-09,  2.7008e-09, -2.4680e-09,\n",
       "                       3.8650e-09,  5.1223e-10, -4.5635e-09, -9.3132e-11,  2.3283e-11,\n",
       "                       3.1665e-09, -3.7253e-10, -2.7940e-09,  6.7987e-09,  3.1665e-09,\n",
       "                       8.8476e-10,  2.4680e-09,  2.1886e-09, -1.6298e-09,  1.0245e-09,\n",
       "                      -6.0536e-10, -2.9337e-09, -6.0536e-10,  1.2107e-09,  6.5193e-10,\n",
       "                       8.8476e-10,  4.1444e-09, -4.1677e-09, -7.6834e-10, -3.3528e-09,\n",
       "                      -6.9849e-10,  1.3970e-10, -2.4214e-09, -6.5193e-10,  1.8161e-09,\n",
       "                       1.4901e-09,  2.7940e-10,  1.9558e-09, -8.8476e-10, -8.7311e-11,\n",
       "                       6.5193e-10,  4.3772e-09, -3.2596e-10,  1.6298e-10, -1.3970e-09,\n",
       "                      -2.9802e-09,  5.8208e-10,  5.1688e-09, -3.2596e-09,  2.7474e-09,\n",
       "                      -6.5193e-10,  6.5193e-10,  5.9605e-09,  2.0023e-09,  2.7940e-10,\n",
       "                       2.3283e-09, -1.8161e-09, -1.2573e-09,  1.7928e-09, -4.2841e-09,\n",
       "                      -1.0710e-09, -1.0710e-09,  1.9558e-09, -1.9558e-09, -9.3132e-11,\n",
       "                      -1.9558e-09, -6.9849e-10,  2.5844e-09,  2.0489e-09,  2.7241e-09,\n",
       "                       1.2282e-09,  2.0489e-09,  1.0245e-09, -2.9802e-09, -2.1886e-09,\n",
       "                       1.1176e-09,  3.4925e-09,  9.3132e-11, -1.9092e-09,  1.8626e-09])),\n",
       "             ('linear_relu_stack.BatchNorm_8.running_var',\n",
       "              tensor([1.0060, 0.9723, 1.0029, 0.9859, 0.9811, 0.9604, 1.0663, 0.9591, 1.0034,\n",
       "                      0.9939, 0.9846, 0.9588, 0.9853, 1.0438, 0.9841, 1.0517, 0.9666, 0.9916,\n",
       "                      1.0962, 0.9752, 1.0316, 0.9850, 0.9544, 1.0052, 0.9554, 1.0602, 1.0328,\n",
       "                      0.9519, 1.0052, 0.9446, 1.0556, 1.0036, 0.9866, 0.9601, 0.9888, 0.9692,\n",
       "                      1.0795, 1.1108, 0.9794, 1.0315, 1.0049, 1.0045, 1.0388, 1.0608, 1.0087,\n",
       "                      0.9786, 0.9926, 0.9822, 0.9685, 1.1330, 0.9724, 1.0051, 1.0750, 1.0413,\n",
       "                      0.9740, 1.0016, 0.9684, 1.0084, 0.9684, 0.9505, 1.0694, 0.9917, 1.0343,\n",
       "                      1.0037, 1.0030, 1.0130, 0.9781, 0.9621, 1.0040, 1.0262, 1.0002, 0.9762,\n",
       "                      1.0113, 0.9848, 1.1296, 1.0199, 0.9704, 0.9925, 0.9623, 0.9815, 0.9795,\n",
       "                      1.0250, 1.0399, 0.9733, 1.0542, 0.9762, 0.9405, 1.0021, 0.9775, 0.9927,\n",
       "                      0.9846, 1.0038, 1.0280, 1.0584, 1.0293, 1.0042, 0.9816, 1.0275, 0.9934,\n",
       "                      0.9938])),\n",
       "             ('linear_relu_stack.BatchNorm_8.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_9.weight',\n",
       "              tensor([[ 0.0066, -0.0424,  0.0174,  ..., -0.1369, -0.0651,  0.1433],\n",
       "                      [ 0.0195, -0.1613, -0.0583,  ..., -0.1406, -0.1554, -0.0184],\n",
       "                      [-0.0604,  0.1715, -0.0365,  ..., -0.0605,  0.0146,  0.0912],\n",
       "                      ...,\n",
       "                      [-0.1577,  0.0477,  0.0898,  ..., -0.1457,  0.0627,  0.0392],\n",
       "                      [ 0.0902,  0.1411,  0.1489,  ...,  0.0036, -0.0678, -0.0470],\n",
       "                      [ 0.1121,  0.1153, -0.1176,  ..., -0.1444,  0.1269, -0.1604]])),\n",
       "             ('linear_relu_stack.Linear_9.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_9.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_9.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_9.running_mean',\n",
       "              tensor([-1.2107e-09,  4.6566e-11, -7.9162e-10, -1.8626e-10, -6.0536e-10,\n",
       "                      -4.7963e-09,  1.7462e-10, -6.5193e-10,  1.7695e-09, -6.6124e-09,\n",
       "                       1.6764e-09,  2.4447e-09, -1.3039e-09, -2.6077e-09,  2.4913e-09,\n",
       "                       4.3772e-09, -5.3311e-09,  1.3039e-09,  1.9092e-09,  1.6298e-09,\n",
       "                       5.2154e-09,  2.3283e-09,  5.1223e-10,  1.8626e-09,  3.0734e-09,\n",
       "                      -6.5193e-10,  5.5879e-10,  2.0489e-09,  7.4506e-10,  4.1910e-10,\n",
       "                       1.1642e-10,  6.9849e-10, -1.0710e-09,  3.3528e-09, -3.7020e-09,\n",
       "                      -1.3970e-09, -6.0536e-10, -5.2154e-09, -9.7789e-10,  1.2573e-09,\n",
       "                       5.4482e-09, -2.2352e-09,  3.1665e-09, -5.5879e-10,  3.7253e-09,\n",
       "                       7.9162e-09,  5.8208e-10, -4.6100e-09,  4.1910e-10, -2.9802e-09,\n",
       "                       4.8894e-09,  1.4901e-09, -1.2107e-09,  3.4925e-09, -1.5134e-09,\n",
       "                       1.5134e-09, -6.5193e-10,  3.5390e-09,  1.0710e-09,  9.0804e-10,\n",
       "                       1.0012e-09,  1.2107e-09,  1.3039e-09,  0.0000e+00,  2.6077e-09,\n",
       "                       2.7940e-10,  2.5429e-10, -1.1176e-09,  2.0489e-09,  2.7940e-10,\n",
       "                       2.7940e-09, -1.4901e-09, -3.6322e-09,  1.1176e-09,  4.3772e-09,\n",
       "                      -3.4692e-09, -2.7940e-10, -9.7789e-10, -3.5157e-09,  3.4925e-10,\n",
       "                      -3.5856e-09, -2.6543e-09, -2.3283e-11, -2.6077e-09, -9.3132e-11,\n",
       "                       6.0536e-10, -8.3819e-10, -2.4214e-09, -3.0268e-10,  4.1910e-09,\n",
       "                      -1.3504e-09, -1.1874e-09,  1.0710e-09,  6.8918e-09, -6.5193e-10,\n",
       "                      -9.3132e-10,  1.5600e-09, -4.6566e-10, -9.3132e-10,  4.4238e-10])),\n",
       "             ('linear_relu_stack.BatchNorm_9.running_var',\n",
       "              tensor([0.9563, 0.9384, 1.0552, 0.9930, 0.9515, 0.9979, 0.9937, 0.9951, 0.9404,\n",
       "                      1.0506, 1.0130, 0.9620, 0.9625, 0.9931, 0.9478, 0.9888, 1.0036, 1.0209,\n",
       "                      1.0050, 0.9548, 1.0339, 0.9672, 0.9508, 0.9758, 1.0747, 0.9526, 0.9933,\n",
       "                      1.0361, 1.0938, 0.9878, 0.9632, 1.0443, 0.9861, 1.0488, 1.0305, 0.9590,\n",
       "                      0.9959, 1.0555, 0.9747, 0.9742, 1.0119, 0.9745, 0.9796, 1.0588, 0.9784,\n",
       "                      1.1016, 0.9590, 0.9721, 0.9742, 0.9883, 1.0390, 0.9718, 0.9989, 0.9864,\n",
       "                      0.9911, 0.9578, 0.9973, 0.9627, 1.0141, 1.0133, 1.0604, 1.0440, 1.0227,\n",
       "                      0.9655, 1.0287, 0.9864, 0.9656, 0.9841, 0.9846, 1.0377, 0.9806, 1.0557,\n",
       "                      0.9621, 0.9670, 1.0020, 0.9579, 1.0242, 1.0251, 0.9729, 0.9594, 1.0800,\n",
       "                      0.9896, 1.0463, 0.9578, 0.9878, 0.9744, 0.9586, 0.9683, 0.9778, 0.9966,\n",
       "                      1.0022, 0.9819, 0.9578, 1.1945, 1.0158, 0.9765, 0.9499, 1.0372, 0.9850,\n",
       "                      0.9657])),\n",
       "             ('linear_relu_stack.BatchNorm_9.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_10.weight',\n",
       "              tensor([[-0.0204,  0.0578, -0.0949,  ...,  0.1534,  0.1355, -0.0330],\n",
       "                      [ 0.1486, -0.1474,  0.1464,  ..., -0.1724, -0.1312,  0.1729],\n",
       "                      [ 0.0184, -0.0868, -0.1006,  ..., -0.0540, -0.1248,  0.0338],\n",
       "                      ...,\n",
       "                      [ 0.0999, -0.1474, -0.0398,  ..., -0.0091, -0.0082,  0.1597],\n",
       "                      [-0.0911, -0.0611,  0.1348,  ...,  0.1615,  0.1166,  0.1553],\n",
       "                      [-0.1633, -0.0902,  0.0815,  ...,  0.0144,  0.1167, -0.1514]])),\n",
       "             ('linear_relu_stack.Linear_10.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_10.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_10.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_10.running_mean',\n",
       "              tensor([ 1.3504e-09, -2.2817e-09, -4.0513e-09,  8.3819e-10, -1.3155e-09,\n",
       "                      -2.3283e-09, -1.5367e-09,  2.3283e-09,  1.0245e-09, -6.5193e-10,\n",
       "                       1.4436e-09, -9.3132e-10,  3.4925e-10, -7.0781e-09, -1.4436e-09,\n",
       "                       4.4238e-10, -1.4436e-09, -6.0536e-10, -9.4296e-10,  1.4901e-09,\n",
       "                       3.4226e-09,  1.7229e-09,  4.6566e-11,  2.8405e-09, -3.4925e-09,\n",
       "                      -3.5856e-09,  2.0489e-09,  1.7229e-09, -1.7812e-09,  1.8626e-10,\n",
       "                       1.2340e-09, -1.5745e-09, -3.8533e-09, -1.0245e-09,  7.7300e-09,\n",
       "                      -1.8626e-09,  6.5193e-10,  2.6310e-09, -2.7940e-10, -7.6834e-10,\n",
       "                       6.8685e-10,  3.5390e-09,  6.9849e-10,  5.6345e-09,  1.3970e-10,\n",
       "                       1.0245e-09,  9.1968e-10, -2.7008e-09, -1.7695e-09,  3.2596e-10,\n",
       "                       7.7300e-09,  3.7253e-09,  1.6065e-09, -4.1910e-10, -1.9092e-09,\n",
       "                       3.1665e-09,  4.8196e-09, -1.1176e-09,  6.0536e-10, -1.4901e-09,\n",
       "                      -3.0734e-09,  3.4925e-10, -3.6322e-09, -6.0216e-09,  1.7695e-09,\n",
       "                       8.8476e-10,  9.7789e-10,  3.3295e-09,  1.0245e-09, -3.0734e-09,\n",
       "                       1.7695e-09, -6.0536e-10, -2.2817e-09, -2.8871e-09,  3.1665e-09,\n",
       "                      -2.3283e-09, -3.2596e-10,  1.5832e-09,  1.9558e-09, -1.0245e-09,\n",
       "                      -2.2817e-09, -3.2596e-10, -4.6566e-10,  9.3132e-10, -2.4214e-09,\n",
       "                      -2.3167e-09, -2.9569e-09, -3.8184e-09,  5.6811e-09, -2.2352e-09,\n",
       "                       1.0245e-09, -1.3039e-09, -6.2399e-09, -6.5193e-10,  5.8673e-09,\n",
       "                      -9.7789e-10, -1.1176e-09,  6.0536e-10, -1.1642e-09, -8.8476e-10])),\n",
       "             ('linear_relu_stack.BatchNorm_10.running_var',\n",
       "              tensor([0.9494, 1.0134, 1.0172, 0.9715, 1.0059, 1.0245, 0.9624, 1.0184, 0.9693,\n",
       "                      0.9872, 1.0025, 0.9662, 0.9467, 1.0942, 1.0168, 0.9914, 1.0063, 1.0514,\n",
       "                      0.9680, 1.0028, 0.9399, 0.9744, 0.9792, 0.9949, 0.9991, 1.0467, 0.9826,\n",
       "                      1.0054, 0.9562, 0.9679, 0.9862, 0.9953, 0.9860, 1.0266, 0.9991, 1.0456,\n",
       "                      1.0010, 0.9830, 1.1243, 0.9648, 0.9946, 1.0497, 1.0045, 1.0999, 1.0129,\n",
       "                      0.9581, 1.0556, 1.1931, 1.0066, 0.9635, 1.0618, 1.0147, 0.9578, 0.9937,\n",
       "                      1.0037, 0.9753, 0.9727, 1.0407, 1.0144, 0.9687, 1.0089, 0.9601, 0.9622,\n",
       "                      1.0739, 0.9736, 0.9629, 0.9663, 0.9869, 1.0108, 1.0633, 0.9343, 0.9526,\n",
       "                      0.9552, 0.9769, 0.9457, 0.9511, 0.9686, 0.9793, 0.9898, 1.0789, 0.9742,\n",
       "                      0.9575, 0.9700, 0.9430, 1.0332, 1.0584, 1.0180, 1.0138, 0.9632, 0.9815,\n",
       "                      1.0009, 0.9814, 1.0143, 0.9997, 1.0259, 0.9448, 0.9307, 1.0338, 1.0244,\n",
       "                      1.0367])),\n",
       "             ('linear_relu_stack.BatchNorm_10.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_11.weight',\n",
       "              tensor([[-0.1464,  0.0136,  0.0735,  ..., -0.0451, -0.0555, -0.0908],\n",
       "                      [-0.0161, -0.1478,  0.0295,  ..., -0.1404,  0.0663, -0.1382],\n",
       "                      [-0.0390, -0.1072,  0.1003,  ..., -0.0572, -0.0052,  0.1153],\n",
       "                      ...,\n",
       "                      [ 0.1563,  0.1329,  0.0217,  ...,  0.0465,  0.1223,  0.1123],\n",
       "                      [ 0.1029,  0.0338, -0.1605,  ...,  0.0259, -0.0200,  0.0024],\n",
       "                      [ 0.0569, -0.0526,  0.0749,  ...,  0.0207, -0.1203,  0.1455]])),\n",
       "             ('linear_relu_stack.Linear_11.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_11.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_11.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_11.running_mean',\n",
       "              tensor([-5.1223e-10, -2.9337e-09, -3.6322e-09,  1.3970e-09,  5.0757e-09,\n",
       "                       1.0245e-09,  3.7020e-09,  2.6310e-09, -8.3819e-10,  2.0023e-09,\n",
       "                      -4.4238e-09,  5.5879e-10,  4.7032e-09,  4.8429e-09, -2.0489e-09,\n",
       "                      -6.0536e-10, -4.0047e-09,  1.5832e-09, -3.9116e-09, -5.1223e-10,\n",
       "                       2.6776e-09,  2.5611e-09, -2.8638e-09, -2.0023e-09, -4.6566e-10,\n",
       "                      -1.8161e-09, -4.0978e-09, -6.5193e-10,  1.0710e-09,  6.1467e-09,\n",
       "                       8.3819e-10, -1.3039e-09,  1.3970e-10,  1.7695e-09, -2.7940e-09,\n",
       "                      -1.3039e-09, -2.1420e-09, -2.7707e-09, -1.9558e-09,  1.2340e-09,\n",
       "                       2.0955e-10, -2.1420e-09, -2.6077e-09, -2.0955e-09, -4.6566e-11,\n",
       "                       7.9162e-10, -2.1420e-09, -9.7789e-10, -2.8871e-09, -9.3132e-10,\n",
       "                      -1.3039e-09, -4.8429e-09, -4.2841e-09,  4.6566e-10,  1.3970e-09,\n",
       "                      -5.1223e-10, -3.5856e-09,  5.2154e-09, -9.3132e-11,  2.5262e-09,\n",
       "                      -5.8673e-09,  3.2596e-10, -4.2841e-09, -4.0047e-09,  5.8673e-09,\n",
       "                      -6.9849e-11, -1.9092e-09, -2.7940e-09,  2.7940e-09,  1.1409e-09,\n",
       "                       3.7253e-10,  2.1653e-09, -2.3749e-09,  3.0734e-09, -6.9849e-11,\n",
       "                      -2.7008e-09, -1.5367e-09, -1.3504e-09, -4.1618e-09, -1.5600e-09,\n",
       "                       4.3772e-09,  3.9581e-10, -7.4506e-09, -2.5146e-09, -2.7707e-09,\n",
       "                       1.4901e-09, -3.5623e-09, -2.9802e-09,  5.5879e-10, -1.9907e-09,\n",
       "                      -1.4901e-09,  4.0047e-09,  0.0000e+00,  3.4459e-09, -1.2573e-09,\n",
       "                      -3.4692e-09,  6.2864e-10,  2.8871e-09, -9.3132e-10,  1.7695e-09])),\n",
       "             ('linear_relu_stack.BatchNorm_11.running_var',\n",
       "              tensor([1.0328, 1.0145, 0.9547, 0.9762, 0.9799, 0.9659, 0.9717, 1.0492, 1.0070,\n",
       "                      1.0322, 0.9491, 1.0056, 1.0230, 1.0789, 1.0663, 1.0084, 1.0007, 0.9728,\n",
       "                      0.9877, 0.9383, 0.9892, 1.0343, 0.9808, 1.0015, 1.0387, 0.9771, 1.0651,\n",
       "                      0.9938, 1.0005, 1.1036, 0.9852, 0.9590, 0.9720, 0.9844, 1.0004, 0.9705,\n",
       "                      0.9942, 0.9441, 0.9748, 1.0636, 1.0672, 0.9824, 0.9995, 0.9737, 1.1030,\n",
       "                      1.0687, 0.9934, 1.0157, 1.0146, 1.0219, 0.9960, 1.0868, 1.0204, 1.0769,\n",
       "                      0.9650, 0.9673, 0.9751, 1.1250, 0.9779, 0.9437, 1.0042, 0.9515, 1.0058,\n",
       "                      1.0176, 1.0241, 1.0074, 0.9874, 1.0702, 1.0076, 0.9474, 1.0341, 0.9581,\n",
       "                      1.0285, 1.0244, 1.0260, 1.0502, 1.0043, 0.9901, 1.0446, 0.9462, 0.9929,\n",
       "                      1.0787, 1.0294, 0.9546, 1.1070, 0.9443, 0.9696, 1.0031, 1.0591, 0.9672,\n",
       "                      0.9946, 1.0216, 0.9392, 1.0157, 0.9770, 1.0065, 1.0307, 0.9955, 0.9727,\n",
       "                      0.9802])),\n",
       "             ('linear_relu_stack.BatchNorm_11.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_12.weight',\n",
       "              tensor([[ 0.0591,  0.0965,  0.0279,  ..., -0.0960,  0.0068, -0.0846],\n",
       "                      [-0.0238, -0.1238, -0.0702,  ..., -0.0635, -0.0032,  0.0280],\n",
       "                      [ 0.0318,  0.0341,  0.1173,  ..., -0.1175,  0.1330,  0.1377],\n",
       "                      ...,\n",
       "                      [-0.1169,  0.0130, -0.1392,  ...,  0.0987,  0.1428,  0.1090],\n",
       "                      [-0.1321,  0.1414, -0.1577,  ...,  0.1056,  0.0985, -0.1556],\n",
       "                      [-0.0610, -0.1400, -0.1290,  ..., -0.0785,  0.0448, -0.1316]])),\n",
       "             ('linear_relu_stack.Linear_12.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_12.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_12.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_12.running_mean',\n",
       "              tensor([-2.2585e-09, -3.6671e-09, -5.2620e-09, -3.7253e-10, -1.3039e-09,\n",
       "                       3.1432e-09,  2.0955e-10,  9.8953e-10, -2.7940e-10,  4.1910e-10,\n",
       "                      -1.3970e-09, -2.5611e-10,  1.1176e-09, -4.6566e-11, -1.1874e-09,\n",
       "                      -2.8871e-09, -1.3504e-09, -1.2573e-09, -2.4214e-09, -2.7008e-09,\n",
       "                      -1.3039e-09,  2.4680e-09,  2.9802e-09,  5.3085e-09,  2.7474e-09,\n",
       "                      -1.1642e-09, -1.1176e-09, -1.9441e-09,  2.3283e-09,  5.5879e-10,\n",
       "                      -1.4785e-09,  2.7940e-09, -1.8626e-09,  1.1176e-09,  1.4901e-09,\n",
       "                       7.2643e-09,  9.3132e-11,  1.6415e-09,  1.3970e-09,  1.8161e-09,\n",
       "                      -1.9791e-09,  1.8626e-10,  0.0000e+00, -2.0955e-09, -1.3970e-09,\n",
       "                      -2.0489e-09,  3.7719e-09, -5.5879e-10,  2.6892e-09,  2.0489e-09,\n",
       "                       1.0710e-09,  2.1188e-09,  3.1665e-09,  1.5134e-10, -1.6764e-09,\n",
       "                       1.3039e-09, -3.4925e-09, -1.1991e-09,  2.5611e-10, -2.5146e-09,\n",
       "                       3.0268e-09, -3.7253e-10,  6.0536e-10, -9.3132e-11,  1.8626e-09,\n",
       "                       4.6566e-10, -4.5431e-09,  2.5611e-09, -1.9092e-09,  2.5611e-09,\n",
       "                       4.1910e-10, -1.8626e-09,  3.4925e-09,  3.2596e-10,  1.8626e-10,\n",
       "                       1.5832e-09,  1.3970e-09, -3.9116e-09, -6.5193e-10, -3.0966e-09,\n",
       "                      -2.5611e-09, -9.0804e-10, -1.3970e-10,  3.4925e-09, -2.2352e-09,\n",
       "                      -4.1910e-10, -5.4017e-09,  6.0536e-10, -6.9849e-10,  4.4238e-10,\n",
       "                      -5.3085e-09,  3.6787e-09, -1.5367e-09, -2.0955e-09, -4.6566e-11,\n",
       "                       2.0023e-09, -4.6566e-11,  0.0000e+00, -1.5367e-09,  1.4436e-09])),\n",
       "             ('linear_relu_stack.BatchNorm_12.running_var',\n",
       "              tensor([0.9731, 1.0369, 1.0088, 1.0353, 1.0390, 0.9693, 0.9583, 1.0379, 0.9979,\n",
       "                      0.9841, 0.9786, 0.9479, 1.0170, 1.0009, 1.0191, 0.9566, 0.9612, 1.0894,\n",
       "                      0.9821, 0.9453, 1.0159, 1.0626, 0.9888, 0.9955, 1.0106, 0.9943, 0.9762,\n",
       "                      0.9665, 1.0321, 0.9621, 0.9812, 0.9632, 1.0289, 1.1380, 0.9425, 1.0337,\n",
       "                      0.9975, 0.9943, 1.1107, 1.0508, 0.9645, 1.0440, 1.0245, 1.0728, 0.9810,\n",
       "                      0.9540, 0.9869, 1.0113, 0.9759, 0.9592, 1.0314, 0.9772, 0.9570, 0.9823,\n",
       "                      1.0138, 1.0293, 1.0308, 0.9652, 0.9535, 0.9600, 1.0029, 0.9901, 0.9942,\n",
       "                      1.0285, 1.0261, 1.0133, 1.0119, 1.0360, 0.9854, 0.9641, 0.9952, 1.0644,\n",
       "                      1.0014, 0.9482, 0.9688, 1.0187, 1.0111, 1.0054, 1.0291, 0.9921, 0.9732,\n",
       "                      1.0310, 1.0707, 1.0555, 0.9769, 1.0089, 1.0484, 0.9425, 0.9738, 0.9704,\n",
       "                      0.9804, 0.9951, 0.9761, 1.0364, 1.0170, 1.0007, 0.9536, 1.1306, 0.9349,\n",
       "                      0.9522])),\n",
       "             ('linear_relu_stack.BatchNorm_12.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_13.weight',\n",
       "              tensor([[ 0.1455,  0.1133, -0.0341,  ..., -0.0362, -0.1638, -0.0244],\n",
       "                      [ 0.0671,  0.1259,  0.1649,  ...,  0.0841,  0.1165,  0.1363],\n",
       "                      [ 0.1457,  0.1726, -0.0366,  ..., -0.0994,  0.0444,  0.0889],\n",
       "                      ...,\n",
       "                      [ 0.0026, -0.0096, -0.1117,  ..., -0.0076,  0.1704,  0.1707],\n",
       "                      [ 0.1200,  0.1498, -0.0296,  ..., -0.1617,  0.0805, -0.0583],\n",
       "                      [ 0.0149,  0.0031, -0.1039,  ..., -0.0310,  0.0821,  0.0956]])),\n",
       "             ('linear_relu_stack.Linear_13.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_13.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_13.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_13.running_mean',\n",
       "              tensor([ 2.1886e-09, -1.4436e-09,  1.3039e-09, -4.7497e-09, -8.8476e-10,\n",
       "                       2.1653e-09, -5.9605e-09, -4.6566e-10, -3.0268e-09,  1.2107e-09,\n",
       "                      -2.1886e-09,  1.7695e-09, -2.2352e-09,  3.2014e-09,  7.2178e-10,\n",
       "                      -8.2655e-10, -5.5879e-09,  4.0513e-09, -1.2340e-09, -1.8161e-09,\n",
       "                       8.8476e-10, -1.7695e-09,  2.3283e-09,  1.4668e-09,  3.9581e-10,\n",
       "                      -3.4925e-09,  8.8476e-10,  2.7940e-09,  9.0804e-10,  1.6298e-09,\n",
       "                      -2.7940e-10, -1.3504e-09,  5.5879e-10,  9.3132e-10,  3.0035e-09,\n",
       "                      -4.1910e-10,  2.2352e-09,  1.2107e-09,  6.9849e-10,  3.1665e-09,\n",
       "                      -1.3970e-09,  2.3283e-11,  1.8626e-10,  1.8626e-10, -1.7229e-09,\n",
       "                      -1.6764e-09, -2.1886e-09,  2.9802e-09,  5.0291e-09, -2.7132e-09,\n",
       "                      -2.3749e-09, -6.3330e-09,  3.7253e-10, -9.3132e-11,  2.4214e-09,\n",
       "                       5.3085e-09,  8.3819e-10,  1.5134e-09, -4.2142e-09,  3.3528e-09,\n",
       "                       8.8476e-10, -2.9802e-09,  4.7963e-09,  2.4680e-09,  4.5635e-09,\n",
       "                       5.5879e-09,  1.8081e-09,  2.3283e-10, -1.5832e-09, -1.6298e-09,\n",
       "                      -1.7695e-09,  3.9116e-09, -6.5193e-10,  3.1199e-09,  2.7008e-09,\n",
       "                       9.3132e-10,  1.9558e-09, -3.4459e-09,  2.7940e-09,  1.3271e-09,\n",
       "                       3.9581e-09,  2.0489e-09,  3.3411e-09, -9.7789e-10, -2.7707e-09,\n",
       "                       1.4086e-09, -1.1409e-09,  2.6077e-09,  3.7253e-10,  7.4506e-10,\n",
       "                       2.4447e-09, -1.5018e-09, -1.1409e-09,  5.8673e-09,  2.8871e-09,\n",
       "                       6.9849e-11,  5.8208e-10,  1.0710e-09, -1.3039e-09,  1.0710e-09])),\n",
       "             ('linear_relu_stack.BatchNorm_13.running_var',\n",
       "              tensor([1.0133, 0.9904, 0.9563, 1.0736, 1.0067, 0.9688, 1.0413, 0.9799, 0.9798,\n",
       "                      0.9882, 0.9448, 0.9582, 1.0193, 0.9644, 0.9818, 0.9812, 0.9857, 0.9609,\n",
       "                      1.0008, 1.0510, 0.9510, 0.9672, 0.9873, 0.9486, 0.9539, 1.0024, 0.9659,\n",
       "                      0.9797, 1.0010, 0.9944, 0.9787, 0.9493, 0.9868, 0.9546, 0.9948, 1.0263,\n",
       "                      0.9599, 1.0074, 0.9740, 1.0976, 1.0317, 0.9486, 0.9992, 1.0182, 0.9570,\n",
       "                      0.9652, 1.0385, 0.9644, 0.9784, 0.9548, 0.9705, 0.9537, 1.0167, 1.0387,\n",
       "                      0.9611, 1.0016, 1.0077, 0.9575, 0.9813, 1.0270, 0.9934, 1.0161, 1.0544,\n",
       "                      0.9856, 1.0155, 0.9551, 0.9612, 1.0585, 0.9902, 0.9576, 0.9865, 0.9901,\n",
       "                      1.0151, 0.9883, 1.0223, 1.0440, 0.9625, 0.9838, 1.0126, 0.9822, 0.9950,\n",
       "                      0.9809, 0.9908, 0.9704, 0.9668, 0.9984, 0.9686, 0.9426, 0.9849, 0.9805,\n",
       "                      0.9554, 0.9975, 0.9785, 1.0483, 1.0339, 1.0095, 0.9862, 0.9738, 1.0006,\n",
       "                      0.9642])),\n",
       "             ('linear_relu_stack.BatchNorm_13.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_14.weight',\n",
       "              tensor([[-0.0640,  0.0452, -0.0984,  ..., -0.0147, -0.0391, -0.0109],\n",
       "                      [ 0.0994, -0.1645,  0.0572,  ..., -0.0801,  0.1208,  0.0603],\n",
       "                      [-0.0643, -0.0305,  0.0037,  ..., -0.0033,  0.0370, -0.0266],\n",
       "                      ...,\n",
       "                      [-0.0137, -0.1105,  0.1280,  ...,  0.0960,  0.0677, -0.0852],\n",
       "                      [ 0.0095,  0.1441,  0.1159,  ...,  0.0112,  0.1500,  0.0167],\n",
       "                      [ 0.0424,  0.0795,  0.1159,  ..., -0.0473,  0.1266, -0.0018]])),\n",
       "             ('linear_relu_stack.Linear_14.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_14.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_14.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_14.running_mean',\n",
       "              tensor([ 3.7253e-10,  4.9826e-09,  1.6531e-09, -6.0536e-10,  6.9849e-10,\n",
       "                       2.6077e-09,  3.0035e-09, -2.8173e-09, -5.5879e-10,  6.4727e-09,\n",
       "                       3.3528e-09,  4.6799e-09, -4.8429e-09, -2.5611e-09,  7.4506e-10,\n",
       "                       2.4214e-09, -6.5193e-10, -2.0489e-09, -9.7789e-10,  6.0536e-10,\n",
       "                      -3.0035e-09, -2.0023e-09,  4.2841e-09,  1.8626e-10, -2.1188e-09,\n",
       "                      -3.7719e-09, -5.9605e-09,  1.1758e-09,  1.3970e-10, -1.6764e-09,\n",
       "                      -1.8161e-09, -2.3283e-11,  3.7253e-09, -1.5832e-09, -1.7695e-09,\n",
       "                       4.6566e-10,  1.3970e-10, -1.8626e-10,  1.8626e-09, -2.7940e-09,\n",
       "                       4.0047e-09,  1.9791e-09, -3.8650e-09, -7.0781e-09, -8.8476e-10,\n",
       "                       3.1665e-09,  1.8394e-09, -1.9092e-09, -1.1176e-09,  6.0536e-10,\n",
       "                       4.1444e-09,  1.1176e-09, -1.4901e-09,  3.1199e-09,  2.9569e-09,\n",
       "                       5.1223e-10,  1.5367e-09,  8.6147e-10, -2.3283e-10, -4.1910e-10,\n",
       "                       2.0955e-09, -1.0245e-09,  4.4238e-10, -2.5611e-09, -1.3970e-10,\n",
       "                       1.1409e-09, -2.2817e-09, -2.7008e-09,  3.2596e-10,  3.2131e-09,\n",
       "                       3.1199e-09, -9.3132e-11, -1.6298e-10,  0.0000e+00, -2.9337e-09,\n",
       "                      -2.8405e-09, -2.2352e-09, -5.5879e-10,  3.5390e-09, -4.1910e-09,\n",
       "                      -3.4925e-10, -2.9802e-09, -1.1642e-10, -1.2689e-09,  2.4273e-09,\n",
       "                       5.4017e-09, -2.3283e-11,  5.5879e-10, -2.9337e-09,  3.2596e-09,\n",
       "                       6.1234e-09, -2.4913e-09,  2.4913e-09, -2.7008e-09,  9.8371e-10,\n",
       "                      -4.4587e-09,  1.8626e-10,  1.8161e-09,  4.5169e-09,  7.6834e-10])),\n",
       "             ('linear_relu_stack.BatchNorm_14.running_var',\n",
       "              tensor([1.0222, 0.9846, 1.0483, 1.0135, 1.0207, 1.0202, 0.9703, 1.0247, 1.0173,\n",
       "                      1.0236, 0.9848, 1.0250, 1.1047, 0.9723, 1.0128, 0.9985, 0.9859, 1.0821,\n",
       "                      1.0094, 1.0055, 1.0032, 0.9584, 1.0070, 1.0621, 1.0540, 1.0040, 1.0513,\n",
       "                      0.9584, 0.9837, 0.9429, 1.0318, 0.9736, 1.0747, 0.9652, 1.0018, 1.0196,\n",
       "                      0.9751, 0.9516, 1.0851, 1.0716, 1.0446, 1.0125, 1.0009, 1.0751, 1.0260,\n",
       "                      0.9904, 0.9909, 0.9587, 0.9618, 0.9704, 0.9529, 1.0077, 0.9880, 1.0284,\n",
       "                      0.9913, 1.0501, 0.9858, 1.0031, 0.9794, 0.9542, 1.0035, 1.0001, 0.9994,\n",
       "                      1.0088, 0.9758, 0.9828, 1.0635, 0.9841, 0.9663, 0.9895, 0.9525, 1.0010,\n",
       "                      0.9329, 0.9616, 1.0105, 0.9937, 0.9865, 0.9565, 1.0118, 0.9574, 1.0058,\n",
       "                      1.0227, 0.9449, 0.9665, 0.9694, 1.0028, 0.9745, 1.0077, 0.9378, 0.9831,\n",
       "                      0.9903, 1.0067, 1.0013, 1.0326, 0.9592, 1.0280, 0.9462, 1.1142, 0.9867,\n",
       "                      1.0235])),\n",
       "             ('linear_relu_stack.BatchNorm_14.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_15.weight',\n",
       "              tensor([[-0.1043,  0.1553,  0.1155,  ...,  0.0089,  0.0495, -0.0233],\n",
       "                      [-0.0400, -0.0864, -0.0263,  ..., -0.0186, -0.1281, -0.1154],\n",
       "                      [-0.1413, -0.1611,  0.1388,  ..., -0.0793, -0.0455, -0.1612],\n",
       "                      ...,\n",
       "                      [ 0.0423, -0.1474,  0.0056,  ...,  0.1554, -0.1710,  0.0220],\n",
       "                      [-0.1473,  0.0949,  0.0469,  ...,  0.1074,  0.0720, -0.0107],\n",
       "                      [ 0.0366, -0.0237, -0.0104,  ...,  0.0401, -0.1600,  0.1183]])),\n",
       "             ('linear_relu_stack.Linear_15.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_15.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_15.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_15.running_mean',\n",
       "              tensor([ 3.1665e-09,  2.0955e-10,  1.7462e-10,  1.8626e-10, -2.8871e-09,\n",
       "                       2.7270e-09, -1.6764e-09,  2.4680e-09, -6.4669e-09, -4.1910e-09,\n",
       "                       2.4680e-09, -2.9337e-09, -1.6298e-09,  2.0023e-09,  4.4703e-09,\n",
       "                      -1.2107e-09, -1.2107e-09,  4.6566e-10,  3.7020e-09, -7.3342e-10,\n",
       "                      -1.1176e-09,  2.0489e-09, -1.7695e-09, -2.0489e-09, -3.2596e-10,\n",
       "                       1.3039e-09,  3.3528e-09,  3.4459e-09,  4.4238e-10, -1.1176e-09,\n",
       "                       1.6531e-09, -1.3039e-09, -6.5193e-10, -2.7474e-09, -2.5961e-09,\n",
       "                       4.3190e-09, -1.3970e-09, -1.5832e-09,  2.3283e-10, -5.1223e-10,\n",
       "                       8.3819e-10,  2.0955e-09, -2.1420e-09, -6.0536e-09,  3.5157e-09,\n",
       "                       9.3132e-11,  1.7229e-09, -2.2352e-09,  2.4214e-09, -3.3993e-09,\n",
       "                       3.1432e-10, -2.0722e-09,  3.1432e-10, -1.5832e-09,  5.0291e-09,\n",
       "                       1.6764e-09,  1.0245e-09, -4.4703e-09, -1.2573e-09, -2.4680e-09,\n",
       "                       3.5856e-09,  3.2596e-09, -1.6298e-09, -6.9849e-09, -5.8208e-10,\n",
       "                      -2.9686e-09,  4.6566e-10, -1.1642e-11,  6.5193e-10, -1.0245e-09,\n",
       "                      -1.7928e-09,  3.1316e-09,  1.3039e-09,  1.7695e-09, -4.5169e-09,\n",
       "                       2.4680e-09,  2.2352e-09, -8.3819e-10,  4.6566e-10, -1.3039e-09,\n",
       "                      -3.1665e-09,  2.1886e-09,  2.7008e-09, -5.8208e-09, -2.2701e-09,\n",
       "                      -2.7474e-09, -3.1665e-09, -2.5728e-09,  2.3283e-10, -1.6298e-09,\n",
       "                       3.3528e-09,  4.4238e-10,  3.7253e-09,  4.6566e-10,  1.7695e-09,\n",
       "                      -2.3283e-09, -4.1910e-10, -2.1653e-09,  8.8476e-10,  1.0710e-09])),\n",
       "             ('linear_relu_stack.BatchNorm_15.running_var',\n",
       "              tensor([1.0735, 1.0513, 0.9874, 1.0597, 1.0182, 0.9386, 0.9914, 1.0038, 1.0182,\n",
       "                      0.9764, 0.9968, 1.0343, 1.0275, 0.9826, 1.0349, 0.9939, 1.0028, 0.9753,\n",
       "                      0.9824, 0.9808, 0.9423, 1.0984, 0.9842, 1.0126, 0.9914, 1.1241, 1.0182,\n",
       "                      1.0303, 0.9990, 0.9902, 0.9714, 1.0073, 1.0256, 1.0586, 0.9559, 0.9612,\n",
       "                      1.0029, 0.9910, 1.0238, 0.9949, 1.0305, 0.9633, 1.0298, 1.1932, 1.0076,\n",
       "                      0.9627, 0.9704, 1.0215, 1.0340, 1.0286, 1.0382, 1.0064, 1.0366, 1.0691,\n",
       "                      1.0324, 1.0774, 0.9335, 0.9845, 0.9876, 0.9775, 0.9814, 0.9819, 0.9616,\n",
       "                      0.9815, 0.9996, 0.9485, 1.0662, 0.9603, 1.0458, 1.0178, 1.0395, 0.9570,\n",
       "                      0.9822, 0.9383, 1.1033, 1.0100, 0.9933, 1.0005, 1.0342, 0.9752, 0.9946,\n",
       "                      1.0381, 0.9651, 1.1202, 1.0782, 1.0108, 1.0525, 1.0530, 0.9893, 1.0098,\n",
       "                      1.0286, 1.0117, 1.0521, 0.9871, 1.0126, 1.0059, 1.0120, 0.9740, 0.9313,\n",
       "                      0.9923])),\n",
       "             ('linear_relu_stack.BatchNorm_15.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_16.weight',\n",
       "              tensor([[ 0.1000,  0.1374, -0.1550,  ...,  0.1421, -0.0856, -0.0145],\n",
       "                      [ 0.0204, -0.1479,  0.0938,  ..., -0.0057, -0.0020,  0.0187],\n",
       "                      [-0.1615,  0.1339,  0.1265,  ..., -0.0815,  0.0353,  0.0765],\n",
       "                      ...,\n",
       "                      [ 0.1539,  0.1030, -0.1282,  ..., -0.0851, -0.1327, -0.0336],\n",
       "                      [-0.1199,  0.0749, -0.1051,  ...,  0.1428, -0.1102, -0.0760],\n",
       "                      [-0.0502, -0.0806,  0.0795,  ...,  0.1270,  0.0969, -0.0393]])),\n",
       "             ('linear_relu_stack.Linear_16.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_16.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_16.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_16.running_mean',\n",
       "              tensor([ 1.2573e-09, -1.2107e-09,  6.5193e-10, -1.6298e-09,  1.1176e-09,\n",
       "                       2.7940e-10, -4.6566e-11, -2.7940e-10,  1.7695e-09, -3.0268e-10,\n",
       "                       9.3132e-11, -1.6764e-09,  9.8953e-10, -4.1910e-10,  2.2352e-09,\n",
       "                       3.7253e-10, -1.0710e-09,  1.1642e-09,  4.6566e-10, -8.3819e-10,\n",
       "                      -5.1688e-09,  1.6764e-09,  4.0047e-09, -1.6298e-10, -7.4506e-10,\n",
       "                       2.7241e-09,  1.4436e-09, -6.5193e-10, -8.1491e-10,  1.6764e-09,\n",
       "                      -4.7497e-09, -1.3039e-09,  3.2596e-10, -6.8918e-09,  1.3504e-09,\n",
       "                       3.7253e-10,  1.9558e-09, -2.1886e-09, -3.5390e-09,  2.3749e-09,\n",
       "                       1.5367e-09, -3.9581e-10,  2.0023e-09, -6.5193e-10, -1.5367e-09,\n",
       "                       2.3749e-09,  2.0489e-09,  9.3132e-10,  1.3271e-09,  3.5157e-09,\n",
       "                      -1.8161e-09,  3.1199e-09,  9.3132e-11, -9.3132e-10, -7.4506e-10,\n",
       "                      -5.3551e-10,  4.7497e-09, -4.0513e-09,  2.4680e-09, -4.0513e-09,\n",
       "                       5.5879e-10,  1.8626e-10, -3.2829e-09, -3.2596e-09, -9.3132e-11,\n",
       "                       3.9581e-10, -2.8871e-09,  6.5193e-10, -1.1642e-11,  4.5635e-09,\n",
       "                      -2.7008e-09, -6.2864e-09,  1.2107e-09,  1.3970e-09, -1.9092e-09,\n",
       "                       4.9360e-09, -9.3132e-11, -3.7253e-09, -1.4901e-09,  3.2596e-10,\n",
       "                       3.5856e-09,  1.8044e-09,  2.0955e-10,  2.9802e-09, -5.7742e-09,\n",
       "                       4.8894e-09,  1.3737e-09,  2.6077e-09, -2.5611e-09,  3.7253e-10,\n",
       "                       3.6322e-09,  1.8626e-10,  3.3993e-09,  1.8626e-10, -4.6566e-10,\n",
       "                      -9.7789e-10, -1.3039e-09,  3.3528e-09, -1.0245e-09, -2.5611e-09])),\n",
       "             ('linear_relu_stack.BatchNorm_16.running_var',\n",
       "              tensor([1.0097, 1.0409, 0.9995, 1.0126, 1.1270, 1.0103, 1.0032, 1.0352, 1.0363,\n",
       "                      0.9448, 0.9541, 0.9428, 1.0346, 1.0259, 0.9834, 0.9924, 0.9846, 0.9801,\n",
       "                      0.9544, 1.0417, 0.9346, 0.9646, 1.0507, 0.9542, 1.0025, 0.9619, 0.9571,\n",
       "                      0.9869, 0.9842, 1.0014, 1.0518, 0.9761, 0.9743, 1.0728, 0.9369, 1.0183,\n",
       "                      0.9700, 0.9876, 0.9666, 1.0571, 1.0152, 0.9856, 0.9672, 1.0298, 1.0442,\n",
       "                      1.0125, 1.0497, 0.9974, 0.9320, 1.0194, 0.9907, 0.9846, 1.0240, 0.9393,\n",
       "                      1.0280, 0.9483, 0.9903, 0.9500, 1.0017, 1.0124, 1.0493, 1.0012, 0.9545,\n",
       "                      0.9829, 1.0079, 0.9270, 1.0383, 0.9753, 1.0159, 1.0029, 1.0150, 0.9634,\n",
       "                      1.0227, 1.0574, 0.9809, 1.0337, 0.9671, 1.0195, 0.9902, 1.0101, 1.0149,\n",
       "                      0.9697, 0.9697, 0.9509, 1.0444, 0.9733, 0.9708, 1.0387, 1.0024, 1.0012,\n",
       "                      1.0197, 0.9936, 0.9843, 1.0995, 1.0282, 0.9673, 1.0064, 1.0045, 1.0004,\n",
       "                      0.9577])),\n",
       "             ('linear_relu_stack.BatchNorm_16.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_17.weight',\n",
       "              tensor([[ 0.1120, -0.0266,  0.1533,  ...,  0.1353, -0.0147,  0.1235],\n",
       "                      [ 0.0923,  0.1497, -0.1158,  ..., -0.0906, -0.1359,  0.0962],\n",
       "                      [-0.0650,  0.1206, -0.0866,  ...,  0.1334,  0.0943,  0.0676],\n",
       "                      ...,\n",
       "                      [ 0.0339, -0.0754,  0.1189,  ..., -0.1495,  0.0601, -0.0262],\n",
       "                      [ 0.0754, -0.0095, -0.1212,  ...,  0.0567,  0.0397,  0.1521],\n",
       "                      [-0.1464,  0.0989, -0.0030,  ...,  0.1001, -0.0015, -0.0846]])),\n",
       "             ('linear_relu_stack.Linear_17.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_17.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_17.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_17.running_mean',\n",
       "              tensor([ 2.2352e-09, -5.5617e-09, -2.5611e-10, -1.3271e-09,  1.2573e-09,\n",
       "                       1.0710e-09,  2.2352e-09, -2.0955e-10, -3.0501e-09, -4.3772e-09,\n",
       "                       1.4668e-09,  1.0594e-09, -1.0943e-09, -6.4727e-09, -1.6764e-09,\n",
       "                       1.3039e-09, -1.6764e-09,  1.9558e-09, -2.9337e-09, -5.5879e-10,\n",
       "                      -2.3283e-09,  3.8883e-09, -1.3737e-09,  1.0245e-09, -1.0477e-09,\n",
       "                      -4.3772e-09, -2.8347e-09,  1.6298e-09, -3.1898e-09, -1.3039e-09,\n",
       "                       1.0245e-09,  1.7695e-09, -1.3970e-10, -8.3819e-10,  3.5041e-09,\n",
       "                       1.3504e-09,  1.8161e-09,  8.3819e-10, -1.2107e-09,  1.1292e-09,\n",
       "                       2.7008e-09,  2.8173e-09,  1.3970e-09, -2.7940e-10,  2.4214e-09,\n",
       "                       4.1910e-10,  2.4447e-09, -1.5367e-09,  1.7695e-09,  1.7695e-09,\n",
       "                       1.7229e-09,  2.2817e-09, -5.6811e-09, -1.6298e-10, -1.3737e-09,\n",
       "                      -2.5146e-09,  1.1176e-09, -1.5832e-09, -1.6764e-09, -6.9849e-11,\n",
       "                       8.3819e-10, -4.6566e-11, -3.2596e-09, -2.2352e-09, -4.0047e-09,\n",
       "                       8.3819e-10,  0.0000e+00,  6.9849e-10,  7.4506e-10, -1.5832e-09,\n",
       "                       5.3551e-10, -6.2864e-10,  1.1176e-09, -6.9849e-10, -4.6566e-10,\n",
       "                       4.6566e-10,  2.2119e-10, -1.9092e-09,  3.0734e-09, -1.0245e-09,\n",
       "                      -3.0734e-09,  1.8626e-10, -9.3132e-10, -4.2142e-09, -5.3085e-09,\n",
       "                      -2.1420e-09,  7.4797e-10, -4.7963e-09, -2.7008e-09, -1.3504e-09,\n",
       "                      -1.6764e-09,  2.5146e-09, -2.6543e-09, -4.1910e-10,  1.7462e-09,\n",
       "                      -2.2352e-09,  7.9162e-10, -1.0245e-09, -1.8626e-10, -1.8626e-10])),\n",
       "             ('linear_relu_stack.BatchNorm_17.running_var',\n",
       "              tensor([1.0188, 0.9509, 0.9568, 0.9794, 1.0143, 0.9949, 1.0134, 1.0228, 1.0041,\n",
       "                      1.0672, 0.9850, 1.0277, 0.9862, 1.0365, 1.0056, 1.0291, 0.9691, 0.9650,\n",
       "                      1.0344, 1.0046, 1.0594, 0.9571, 1.0137, 0.9814, 0.9615, 1.0363, 1.0068,\n",
       "                      0.9829, 1.0156, 1.0100, 0.9767, 0.9964, 0.9895, 1.0404, 1.0143, 0.9753,\n",
       "                      0.9853, 1.0509, 1.0427, 1.0017, 1.0220, 1.0084, 0.9789, 1.0042, 0.9837,\n",
       "                      0.9556, 0.9701, 0.9879, 0.9868, 0.9690, 1.0801, 0.9537, 0.9778, 1.0183,\n",
       "                      0.9578, 1.0770, 0.9901, 1.0272, 1.0028, 1.0643, 1.0170, 0.9813, 1.0060,\n",
       "                      0.9910, 0.9732, 0.9356, 1.0011, 0.9783, 1.0735, 1.0194, 1.0636, 0.9724,\n",
       "                      0.9734, 0.9706, 0.9977, 0.9880, 0.9992, 0.9936, 0.9697, 1.0052, 1.0165,\n",
       "                      1.0690, 1.0661, 1.0227, 1.0335, 1.0566, 1.0058, 0.9740, 0.9476, 1.0456,\n",
       "                      1.0052, 1.0113, 0.9449, 1.0344, 0.9854, 0.9837, 0.9677, 1.0410, 0.9852,\n",
       "                      0.9566])),\n",
       "             ('linear_relu_stack.BatchNorm_17.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_18.weight',\n",
       "              tensor([[ 0.1159, -0.0952, -0.1508,  ..., -0.0403, -0.1651,  0.1409],\n",
       "                      [ 0.0330,  0.1582, -0.1573,  ...,  0.0341, -0.0708,  0.1271],\n",
       "                      [-0.1721,  0.1008,  0.1100,  ..., -0.0338, -0.1207,  0.0439],\n",
       "                      ...,\n",
       "                      [-0.0128,  0.0792,  0.0856,  ..., -0.1124,  0.0360, -0.1038],\n",
       "                      [-0.0045,  0.0085,  0.0118,  ..., -0.0984, -0.0785,  0.1338],\n",
       "                      [-0.0234,  0.0217,  0.1653,  ..., -0.1672,  0.1259, -0.1131]])),\n",
       "             ('linear_relu_stack.Linear_18.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_18.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_18.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_18.running_mean',\n",
       "              tensor([ 2.3050e-09,  2.0489e-09,  1.1642e-09,  7.4506e-10,  1.1292e-09,\n",
       "                       1.1176e-09, -3.3993e-09, -3.3993e-09,  3.9116e-09, -2.3749e-09,\n",
       "                      -6.5193e-10, -1.2107e-09, -6.5193e-10,  3.0734e-09,  2.3283e-09,\n",
       "                      -1.5367e-09,  1.8161e-09, -2.8405e-09, -2.4680e-09, -2.3283e-09,\n",
       "                       2.3749e-09, -1.5832e-09, -2.5611e-09,  2.2817e-09, -3.2596e-10,\n",
       "                      -1.1642e-09,  1.9558e-09,  2.5611e-09, -5.5879e-10, -4.1677e-09,\n",
       "                      -3.7253e-10, -1.9209e-09, -1.9791e-09, -1.5832e-09,  1.6298e-09,\n",
       "                       1.0245e-09, -6.5193e-10, -3.6322e-09,  7.4506e-10,  1.2573e-09,\n",
       "                       8.6147e-10,  8.1491e-09,  1.4901e-09,  7.4506e-10,  4.6566e-09,\n",
       "                       2.9802e-09, -4.0978e-09,  1.1642e-09, -3.7253e-09, -2.5146e-09,\n",
       "                      -4.7497e-09, -3.1199e-09,  1.9558e-09,  2.2817e-09, -4.6566e-09,\n",
       "                       2.7940e-10,  4.0047e-09,  1.3970e-10,  5.5879e-10,  3.4692e-09,\n",
       "                      -6.9849e-10, -8.3819e-10,  2.7474e-09,  3.6787e-09, -3.2596e-10,\n",
       "                       4.9360e-09,  1.1176e-09,  1.3970e-09, -1.4436e-09,  1.3039e-09,\n",
       "                       9.3132e-11,  1.7462e-09, -1.0710e-09,  1.5832e-09, -4.6566e-10,\n",
       "                      -8.3819e-10,  3.7253e-10,  9.3132e-10, -3.4459e-09, -4.6566e-11,\n",
       "                      -1.0245e-09, -1.1176e-09,  1.4901e-09, -8.8476e-10, -3.1199e-09,\n",
       "                      -5.5879e-10,  1.3970e-10,  2.5611e-09, -1.2107e-09,  1.5832e-09,\n",
       "                      -1.1642e-09, -3.2596e-09,  2.0023e-09, -1.0827e-09, -9.3132e-10,\n",
       "                      -2.3283e-10,  3.5856e-09,  2.3283e-10,  2.7474e-09, -3.7253e-10])),\n",
       "             ('linear_relu_stack.BatchNorm_18.running_var',\n",
       "              tensor([0.9910, 1.0471, 0.9563, 1.0256, 0.9791, 0.9602, 1.0041, 0.9755, 1.1004,\n",
       "                      0.9670, 1.0024, 1.0050, 0.9718, 1.0017, 1.0026, 1.0061, 0.9881, 0.9980,\n",
       "                      0.9630, 0.9499, 1.0075, 1.0889, 0.9792, 1.0260, 0.9601, 1.0358, 1.0072,\n",
       "                      1.0377, 0.9877, 1.0017, 1.0587, 0.9635, 0.9376, 0.9572, 0.9498, 0.9559,\n",
       "                      1.0129, 0.9422, 0.9645, 0.9609, 1.0119, 1.0588, 0.9737, 0.9939, 1.0319,\n",
       "                      0.9658, 1.0292, 0.9944, 0.9784, 0.9772, 1.0166, 1.0120, 1.0169, 0.9349,\n",
       "                      0.9748, 0.9578, 1.0049, 0.9380, 0.9857, 0.9736, 0.9610, 0.9916, 1.0601,\n",
       "                      1.0049, 1.0182, 0.9878, 1.0007, 0.9625, 1.0032, 1.0312, 1.0584, 1.1099,\n",
       "                      0.9878, 1.0212, 1.0142, 0.9721, 0.9780, 0.9755, 0.9515, 0.9967, 0.9774,\n",
       "                      0.9898, 1.0961, 1.0676, 0.9564, 0.9683, 0.9655, 0.9708, 0.9552, 0.9902,\n",
       "                      0.9679, 1.0213, 0.9891, 0.9745, 1.0249, 1.0293, 0.9795, 1.0164, 1.0038,\n",
       "                      1.0218])),\n",
       "             ('linear_relu_stack.BatchNorm_18.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Output Layer.weight',\n",
       "              tensor([[-3.9265e-02, -2.9089e-02,  1.6282e-01, -2.2510e-01, -2.3287e-02,\n",
       "                        3.3556e-02, -6.2674e-02,  3.9246e-02,  1.7173e-02,  2.0392e-01,\n",
       "                        1.5283e-01, -3.2743e-02,  1.4431e-01, -5.0592e-02,  1.7499e-01,\n",
       "                        5.6395e-02,  1.3043e-01, -2.2297e-01,  2.2899e-01, -1.8316e-01,\n",
       "                        1.5168e-01,  1.8364e-01,  7.3924e-02, -2.2438e-01, -1.7065e-01,\n",
       "                       -3.7873e-02, -1.3714e-01,  1.3223e-02,  1.8587e-01,  1.0914e-01,\n",
       "                        2.1683e-01, -1.5780e-01, -2.1643e-01, -9.8526e-05, -4.4371e-02,\n",
       "                       -1.6971e-01,  4.6321e-02, -1.3440e-01, -4.8971e-02,  1.8076e-01,\n",
       "                       -1.9713e-01, -1.7785e-01,  4.8871e-02, -3.9865e-02,  6.9212e-02,\n",
       "                       -1.3749e-01,  6.4384e-03,  1.2227e-01,  1.7911e-01,  2.2351e-01,\n",
       "                       -8.0653e-02,  4.5485e-02,  1.9220e-01,  6.4152e-02, -1.1294e-01,\n",
       "                        1.6317e-01,  2.0898e-01,  5.5753e-02, -7.3375e-03,  2.0109e-01,\n",
       "                       -1.6292e-01,  1.6330e-01, -2.0578e-01,  1.7762e-05,  1.5267e-01,\n",
       "                       -1.8094e-01, -1.4292e-01, -7.5499e-02,  2.1049e-01, -1.2866e-01,\n",
       "                       -1.3844e-01,  6.6965e-02,  2.1050e-01,  1.1187e-01, -2.6068e-04,\n",
       "                        2.4262e-02, -1.1713e-01,  8.3927e-02,  4.3493e-02, -7.9454e-02,\n",
       "                       -7.9694e-02, -8.1994e-02,  1.9503e-01, -9.3306e-02, -1.4373e-01,\n",
       "                        2.2052e-01,  8.2399e-03, -1.9281e-01, -1.9025e-01,  9.0733e-02,\n",
       "                        7.7456e-02,  1.8577e-01,  5.3667e-04, -1.7532e-01, -1.1165e-01,\n",
       "                       -1.0612e-01,  1.1398e-02,  1.8328e-01, -1.7583e-01, -6.6724e-02],\n",
       "                      [-1.0794e-01,  2.1208e-01,  1.2841e-01, -1.1077e-01,  9.1836e-02,\n",
       "                        1.4070e-01, -5.9867e-02, -6.1751e-02, -1.4191e-01,  1.6822e-01,\n",
       "                       -2.1264e-01, -8.0805e-02, -1.8279e-01, -2.2381e-01,  4.3044e-02,\n",
       "                       -1.2813e-01,  2.5788e-02,  1.7429e-01,  2.3217e-01,  2.2816e-01,\n",
       "                       -2.0718e-01, -2.2986e-01, -1.6503e-01, -8.2813e-02, -5.4220e-02,\n",
       "                       -8.3200e-02,  2.0687e-01,  1.4976e-01, -9.2058e-02,  1.9264e-01,\n",
       "                       -1.1164e-01,  2.2523e-01, -8.0367e-02,  2.2468e-01, -1.5493e-01,\n",
       "                        5.9679e-03, -5.6920e-02,  9.8328e-02, -8.6595e-02,  2.0308e-01,\n",
       "                       -2.0748e-01,  1.9813e-01,  2.0546e-01, -1.1885e-01, -1.5912e-01,\n",
       "                        5.8816e-02, -3.3097e-02, -2.1165e-01,  8.0579e-02,  1.1662e-01,\n",
       "                        1.1200e-01, -1.3921e-01,  5.4073e-02,  1.4056e-01, -7.0588e-02,\n",
       "                       -1.3749e-01,  1.5500e-01, -6.2979e-02,  1.6956e-02, -2.0880e-01,\n",
       "                       -2.2412e-01,  2.2643e-02,  7.5962e-03, -1.8556e-01, -1.9766e-01,\n",
       "                        8.8480e-02, -1.6044e-01,  1.6697e-01,  1.1656e-01,  6.5172e-02,\n",
       "                       -6.5141e-02,  2.2206e-01, -1.8267e-01, -8.5300e-02, -2.0803e-01,\n",
       "                       -1.7640e-01, -1.3157e-02, -1.7939e-02,  1.8551e-01,  1.4999e-01,\n",
       "                       -1.3499e-01, -2.2614e-01,  2.2270e-01, -2.2518e-01,  2.3104e-01,\n",
       "                        1.3268e-01, -9.4473e-02, -5.1570e-02, -7.2613e-02,  5.9427e-02,\n",
       "                       -5.2217e-02,  6.9430e-02, -1.9800e-01, -1.0632e-01,  6.2133e-02,\n",
       "                        1.8455e-01, -1.6539e-01, -2.2319e-01, -4.5085e-02,  1.6588e-01],\n",
       "                      [ 4.5084e-02,  1.4566e-01,  1.3231e-01, -1.1804e-02, -7.7835e-02,\n",
       "                        4.7739e-02, -6.4152e-02,  4.3586e-03,  5.6249e-02,  1.3933e-01,\n",
       "                       -1.2131e-01,  1.2185e-01,  6.0995e-02,  1.6447e-01, -1.4739e-02,\n",
       "                       -5.9721e-02, -1.4544e-01,  1.6870e-01,  4.2576e-02, -1.0566e-01,\n",
       "                       -8.1301e-02,  4.1285e-02, -1.9102e-02,  2.3033e-01,  1.2570e-01,\n",
       "                       -1.9404e-01, -1.7564e-01, -1.3910e-01,  2.6439e-02, -4.3995e-02,\n",
       "                       -3.9913e-02,  4.9065e-02, -1.1632e-01,  1.2694e-01, -1.6478e-01,\n",
       "                       -1.9672e-01,  1.7744e-01,  9.0101e-02,  2.2512e-01,  1.1976e-01,\n",
       "                        1.9972e-01,  5.1622e-02, -1.6037e-01, -7.5850e-02,  2.0696e-01,\n",
       "                       -1.6888e-01,  1.4299e-01, -1.3826e-01,  2.6360e-03, -7.1487e-02,\n",
       "                        5.8105e-02, -1.4291e-01,  1.4081e-01, -1.5085e-01, -1.3098e-01,\n",
       "                       -1.0524e-01, -5.5445e-02,  1.6264e-01, -1.0341e-01, -2.2272e-01,\n",
       "                        2.0362e-01,  1.7047e-01,  2.1378e-01, -1.5200e-01, -2.2073e-01,\n",
       "                        5.1432e-02, -7.2415e-03,  6.0043e-02,  2.0368e-01, -2.0094e-01,\n",
       "                        1.1645e-01, -2.1194e-01, -1.2630e-01,  3.6508e-02, -2.0751e-01,\n",
       "                        1.3856e-01, -2.3120e-01, -1.6801e-01,  6.9052e-02,  4.3675e-02,\n",
       "                        2.0288e-01,  4.2709e-02, -1.2910e-01,  8.6908e-02, -1.4238e-01,\n",
       "                        1.1874e-01,  4.3850e-02,  1.4565e-01, -3.0318e-02,  3.2498e-02,\n",
       "                       -2.1376e-01, -1.7085e-01,  6.5196e-02,  2.2919e-01,  9.5406e-02,\n",
       "                        2.0628e-01, -1.3805e-01,  1.7070e-01, -1.3111e-01, -7.0342e-02],\n",
       "                      [-1.1591e-01, -6.1975e-02, -1.8922e-01,  1.5222e-01,  1.7461e-01,\n",
       "                        2.4641e-02,  4.1733e-02,  5.3769e-02,  1.4206e-01, -1.6701e-01,\n",
       "                        2.2209e-01,  2.0195e-01, -2.2773e-01,  5.5265e-02,  2.1268e-01,\n",
       "                       -2.2822e-02, -9.8745e-02, -2.2849e-01, -8.0747e-02,  1.8014e-01,\n",
       "                       -2.1972e-01,  1.9922e-01,  4.2655e-02, -1.5886e-02, -1.4506e-01,\n",
       "                        1.6580e-02,  1.5089e-01, -6.1947e-02, -2.0713e-01, -9.5786e-02,\n",
       "                        1.4950e-01,  8.9812e-02, -1.6098e-01, -1.3350e-01, -7.7768e-02,\n",
       "                        7.4276e-02, -7.6166e-03, -8.7082e-02, -2.0758e-01, -2.1584e-01,\n",
       "                        8.3711e-02,  7.2684e-03,  2.0625e-03,  1.2023e-01, -2.0160e-01,\n",
       "                        1.9378e-01,  2.0824e-01,  3.7116e-02,  4.2378e-02,  2.2861e-01,\n",
       "                        1.4817e-01, -1.6197e-02,  1.5524e-01, -2.2242e-01, -1.4778e-01,\n",
       "                       -2.2235e-01, -3.7636e-02,  2.3306e-01, -1.9769e-01, -1.2933e-01,\n",
       "                        1.4427e-01, -1.8233e-01, -1.3566e-02,  1.0289e-01,  3.4872e-02,\n",
       "                       -2.3141e-01, -8.4578e-02,  1.3766e-01,  7.3266e-02,  3.1038e-02,\n",
       "                       -9.5188e-02, -1.7164e-01, -1.8584e-02,  1.2311e-01,  8.7591e-02,\n",
       "                       -1.5874e-01, -3.4183e-02, -6.8639e-02,  6.0453e-02,  9.8269e-02,\n",
       "                       -5.6883e-02,  3.9991e-02,  2.2156e-01,  1.8021e-01, -1.0289e-01,\n",
       "                        1.5293e-01,  1.8277e-01, -2.2142e-01, -1.9415e-01, -1.4727e-01,\n",
       "                       -7.1375e-02,  1.5407e-01, -2.1049e-01, -1.6054e-03,  1.0517e-01,\n",
       "                       -1.9302e-01,  2.0687e-01, -1.5049e-01,  2.2819e-01,  1.9598e-01],\n",
       "                      [ 1.4353e-01,  1.3247e-01,  7.5531e-02,  8.8655e-02,  1.6336e-01,\n",
       "                       -3.2218e-02, -2.0312e-01, -2.3305e-01, -2.2747e-01, -1.5052e-01,\n",
       "                        1.1797e-01, -1.5989e-01, -1.0708e-01, -1.0581e-02, -1.8427e-01,\n",
       "                       -1.3643e-01, -5.7987e-02, -7.2319e-03, -1.4219e-02,  2.2718e-01,\n",
       "                        8.0876e-02, -1.4743e-01,  1.2057e-01,  1.7947e-01, -1.7129e-01,\n",
       "                       -1.5146e-02,  2.0646e-01, -1.8255e-01, -1.4038e-01, -1.9408e-01,\n",
       "                       -1.6747e-02,  4.8430e-03,  1.7705e-01,  1.8502e-01, -1.2580e-01,\n",
       "                        2.2598e-01,  3.3472e-02, -1.0392e-01, -1.5759e-01,  1.0265e-02,\n",
       "                       -2.3167e-01,  2.2971e-01,  2.3412e-02, -2.3350e-01,  1.1250e-01,\n",
       "                        7.0962e-02,  1.6640e-01, -1.8841e-01, -1.0075e-01, -1.5617e-01,\n",
       "                        7.1956e-02,  2.3158e-01, -1.4334e-01, -1.0071e-01, -3.1479e-02,\n",
       "                        1.4079e-01, -6.3791e-02, -8.5237e-03,  5.5860e-02,  4.0117e-02,\n",
       "                       -4.0913e-02,  1.5932e-02, -2.0989e-01, -1.5827e-01,  2.8274e-02,\n",
       "                       -1.3525e-01, -2.2024e-02,  5.2222e-02,  3.7088e-02, -2.2591e-01,\n",
       "                       -1.0896e-01,  4.4013e-02, -1.6907e-02, -4.4271e-02, -1.3336e-01,\n",
       "                        2.1872e-01,  1.3492e-01,  6.2332e-02,  1.8381e-01,  1.6376e-01,\n",
       "                       -1.5199e-01,  1.2398e-01, -1.0133e-01, -1.4799e-01,  1.4184e-01,\n",
       "                       -1.7068e-01, -1.7837e-01,  6.5526e-02, -1.3616e-01,  1.7453e-01,\n",
       "                        2.1531e-01, -9.5400e-02, -1.6734e-01,  1.9262e-01,  1.1824e-01,\n",
       "                       -1.7309e-02, -1.3471e-01,  1.1237e-01, -1.8642e-01,  2.0127e-02],\n",
       "                      [-1.9624e-01,  1.5161e-01, -1.8728e-01, -8.4598e-03, -1.4198e-01,\n",
       "                       -9.6616e-03,  1.3913e-01, -1.2627e-01, -6.4197e-02,  7.0364e-02,\n",
       "                       -1.9044e-01,  2.7603e-02, -2.2949e-01, -2.2344e-01, -1.3172e-01,\n",
       "                       -8.8484e-02,  3.2545e-02, -1.8707e-01,  1.7948e-02, -9.6564e-02,\n",
       "                       -3.7201e-02,  1.2301e-01, -2.0461e-01,  2.1332e-01, -1.1148e-01,\n",
       "                        1.0466e-01,  1.3090e-01,  1.9625e-02, -3.6376e-02, -1.6570e-01,\n",
       "                       -3.3571e-03,  3.9924e-02, -9.3212e-02,  3.3512e-02, -1.5331e-01,\n",
       "                        2.0143e-02, -3.8261e-02,  4.8338e-04, -4.3825e-02, -2.5149e-02,\n",
       "                        3.8010e-02,  1.5432e-01, -1.0202e-01, -1.0194e-01,  3.5395e-02,\n",
       "                       -9.0697e-02, -1.5211e-01, -3.9663e-02, -6.7540e-03, -6.9443e-02,\n",
       "                        2.2809e-01, -2.2418e-01,  7.2303e-02,  1.0782e-01,  1.8613e-03,\n",
       "                       -1.7357e-01,  1.2238e-01,  1.3546e-01,  1.1958e-01, -6.2933e-02,\n",
       "                        8.2530e-03, -1.6033e-01,  9.5077e-02, -7.8268e-02,  1.6913e-01,\n",
       "                       -2.3160e-01, -1.7887e-01,  1.4669e-01, -1.4919e-01,  1.8832e-01,\n",
       "                       -1.7534e-01, -6.3822e-02, -1.7488e-02,  8.7603e-02, -1.7584e-01,\n",
       "                       -1.4462e-01, -8.5306e-02,  2.1159e-01,  8.7230e-02,  2.0439e-01,\n",
       "                        1.2092e-01,  2.0421e-01,  2.3230e-01, -1.3968e-01, -1.3759e-01,\n",
       "                        9.6449e-02, -3.2667e-02,  4.4161e-02,  5.9727e-02,  1.4320e-01,\n",
       "                        1.5264e-01, -1.0082e-01,  2.8802e-02,  3.3797e-02,  1.0109e-01,\n",
       "                        1.8575e-01, -1.0956e-01, -1.1970e-01, -2.1892e-02,  4.0929e-02],\n",
       "                      [ 8.4611e-02, -1.0325e-01,  6.1039e-02,  1.8910e-01,  6.4715e-02,\n",
       "                        2.3188e-01, -1.6807e-01,  1.1527e-01, -1.2141e-02, -1.1862e-01,\n",
       "                        1.6395e-01, -1.1977e-01, -1.3348e-01, -1.5612e-02, -1.0459e-01,\n",
       "                       -1.8906e-01, -6.2314e-03, -1.1028e-01,  6.1709e-02,  3.8406e-03,\n",
       "                        1.3053e-05,  1.9995e-01,  1.0879e-02, -1.7311e-01, -4.4890e-02,\n",
       "                        1.0956e-01, -3.1863e-03, -1.9094e-03, -1.0897e-02, -1.5639e-01,\n",
       "                       -4.1160e-02, -3.1898e-02,  3.4985e-02,  5.7801e-02, -1.1904e-01,\n",
       "                       -2.1978e-01,  1.1131e-01, -7.0503e-02, -9.7522e-02,  1.5660e-01,\n",
       "                       -7.1126e-02,  1.7174e-01,  1.5731e-01, -9.7398e-02, -1.9299e-01,\n",
       "                       -1.9837e-01, -8.7788e-02, -5.9420e-02,  1.0794e-01,  3.3105e-02,\n",
       "                       -5.2807e-02, -1.8341e-01,  2.7070e-02,  1.0308e-01, -1.8247e-01,\n",
       "                        8.5103e-02,  2.0152e-01, -5.2235e-02, -1.4297e-01, -1.8103e-01,\n",
       "                        1.4803e-01,  2.0857e-01, -1.5122e-01,  1.2652e-02,  7.9480e-02,\n",
       "                       -5.9984e-03, -1.8290e-01, -1.6767e-03, -1.1080e-02,  1.3188e-01,\n",
       "                        1.7090e-01,  1.5720e-01, -1.2351e-01, -1.9718e-01,  2.1137e-01,\n",
       "                        2.3453e-02,  2.2545e-01,  2.0252e-01,  2.2726e-01,  7.5889e-02,\n",
       "                        1.8772e-01, -1.7629e-01, -1.5619e-01, -1.7147e-01, -6.3084e-02,\n",
       "                        2.6058e-02,  2.4239e-02,  1.4313e-01,  3.8962e-03, -2.0782e-01,\n",
       "                       -2.0149e-01, -5.4164e-02,  4.4654e-02, -1.9477e-01,  1.5449e-01,\n",
       "                        7.9308e-02, -3.6068e-02, -1.5648e-01,  2.2759e-03,  1.7030e-01],\n",
       "                      [-2.9991e-02, -2.2705e-01,  1.7145e-01, -2.9422e-02,  1.4602e-01,\n",
       "                        1.1510e-01,  1.8990e-02, -5.2609e-02, -6.6298e-02, -4.5425e-02,\n",
       "                       -2.0003e-01, -1.2389e-01, -8.5538e-02,  1.6421e-01, -9.0985e-02,\n",
       "                        1.7211e-01,  6.5942e-02,  1.4468e-02, -1.5329e-01, -1.8709e-01,\n",
       "                       -7.3271e-02, -2.0425e-01, -2.8076e-02, -1.7562e-01,  1.9845e-01,\n",
       "                       -2.1367e-02,  1.7114e-01,  1.4816e-01,  1.4243e-01,  3.0386e-02,\n",
       "                        2.1341e-01, -4.5525e-02, -1.3781e-01,  4.9634e-02,  4.1241e-02,\n",
       "                       -2.2399e-01,  5.7363e-03,  2.2105e-01,  2.0825e-01,  1.7375e-01,\n",
       "                        1.1527e-01, -1.0097e-01, -8.7889e-02, -1.4827e-01, -1.6434e-01,\n",
       "                       -4.2110e-03,  1.7742e-01,  1.6537e-01,  1.3475e-02,  7.0818e-02,\n",
       "                       -2.1796e-01,  5.7811e-03,  1.2388e-01, -5.3341e-02,  1.6635e-01,\n",
       "                        4.4358e-02, -2.0234e-01,  2.2052e-01, -4.6368e-02, -2.1439e-01,\n",
       "                        2.4263e-02,  2.0012e-01,  1.3952e-01,  2.0522e-01, -1.6663e-01,\n",
       "                       -1.2809e-01, -1.6397e-01,  8.5605e-02,  1.7825e-01,  1.1019e-01,\n",
       "                       -8.8544e-02,  1.6262e-04,  1.8662e-01,  1.3521e-01, -1.9922e-01,\n",
       "                       -1.3878e-01, -9.1799e-02,  4.0166e-02, -5.1194e-02,  9.9907e-02,\n",
       "                        1.4578e-01, -1.2296e-01,  9.3981e-02, -1.9673e-01,  8.1969e-02,\n",
       "                       -2.1150e-01, -1.3068e-01, -3.1374e-02, -1.1627e-01,  1.1932e-01,\n",
       "                       -1.8725e-01,  5.0147e-02,  5.5646e-02,  3.8895e-02,  1.5157e-01,\n",
       "                        1.9588e-01, -8.6303e-02,  4.7362e-02,  2.1735e-01, -8.8787e-02],\n",
       "                      [-7.4263e-02,  7.6290e-02, -1.8038e-01, -1.7410e-01,  1.7118e-01,\n",
       "                       -2.1366e-01, -1.1210e-01,  4.9347e-02, -1.6331e-01,  5.6843e-02,\n",
       "                        2.0978e-01,  1.7506e-02,  2.1846e-01, -5.2232e-02,  1.3563e-01,\n",
       "                       -2.3245e-01, -6.9397e-02, -5.8145e-02, -8.4984e-02, -6.9660e-02,\n",
       "                        1.5358e-01,  7.7357e-02, -1.4974e-01, -1.1619e-01,  1.9652e-01,\n",
       "                       -5.7882e-02,  1.4613e-01, -6.3115e-02,  2.0687e-01,  2.3283e-01,\n",
       "                        2.0497e-02, -2.5554e-03,  1.7661e-01,  1.7899e-01, -1.1031e-01,\n",
       "                       -2.2701e-01, -6.9113e-02,  2.2158e-01,  8.5461e-02, -1.5047e-01,\n",
       "                        9.6826e-02,  1.9542e-01,  5.0789e-02, -1.9237e-01, -7.2443e-03,\n",
       "                        2.1985e-01,  1.5936e-01,  6.4650e-02, -3.9402e-02,  5.7860e-02,\n",
       "                       -1.3595e-01,  2.0288e-01, -1.7045e-01,  1.4096e-01,  6.6193e-02,\n",
       "                       -2.0719e-01,  1.5491e-01, -2.6570e-02,  1.1169e-01, -5.7096e-02,\n",
       "                       -2.9353e-02, -1.2944e-01,  6.7791e-02,  4.2900e-04,  2.0497e-01,\n",
       "                        1.0204e-01, -7.2713e-03, -1.9673e-01,  2.3106e-01, -2.0611e-01,\n",
       "                        1.9338e-02,  4.0214e-02,  1.9903e-01,  1.7263e-01, -9.4756e-02,\n",
       "                        9.2561e-02, -2.2890e-01, -1.6849e-01,  2.1686e-01, -3.0884e-04,\n",
       "                        2.1224e-01, -1.1394e-01,  1.4211e-01, -8.2183e-02, -1.2881e-01,\n",
       "                       -5.6105e-02,  1.5174e-01, -9.2037e-02,  6.9782e-02, -2.2286e-01,\n",
       "                       -2.9441e-02,  1.1537e-01,  9.8607e-02, -1.2886e-01,  2.1748e-01,\n",
       "                        1.0811e-01, -2.0807e-01,  1.3151e-01,  1.2300e-01,  2.3159e-01],\n",
       "                      [ 1.9210e-02,  1.1335e-02, -1.9350e-01, -1.1957e-02,  6.4381e-02,\n",
       "                       -2.0242e-02, -9.2263e-02, -7.5466e-02, -1.2704e-01, -1.4708e-01,\n",
       "                       -7.1039e-02, -7.4284e-02,  2.2408e-01,  2.6231e-02, -1.5638e-01,\n",
       "                       -1.6968e-01,  7.1786e-02,  3.8644e-03,  1.5956e-01, -6.3999e-02,\n",
       "                       -2.0153e-01,  8.4712e-02, -1.0420e-01, -1.6985e-01,  1.7007e-01,\n",
       "                       -1.0638e-01, -1.3774e-01, -8.8681e-02,  1.4759e-01, -2.3354e-01,\n",
       "                       -4.6697e-02,  9.0202e-02,  2.5946e-02,  1.6596e-02, -7.8663e-02,\n",
       "                        4.5020e-02,  4.1141e-02,  1.0706e-01,  1.3756e-01, -2.2362e-01,\n",
       "                       -1.8748e-01,  2.2157e-01, -9.4519e-02,  9.8063e-02, -8.4587e-02,\n",
       "                       -6.0335e-02, -3.1540e-02,  1.8626e-01,  1.6613e-01, -3.2851e-02,\n",
       "                       -1.8560e-03, -1.6469e-01, -3.3332e-02, -1.7372e-01,  2.2100e-01,\n",
       "                        1.0235e-01,  9.5474e-02,  1.2469e-01,  9.1852e-03, -1.9952e-01,\n",
       "                       -1.5851e-01, -1.6727e-01,  1.8337e-02, -1.4131e-01,  2.1368e-01,\n",
       "                        1.4792e-03,  6.9317e-02, -1.1324e-01, -1.9237e-01,  4.7051e-02,\n",
       "                       -7.1836e-02, -2.0590e-01, -5.2021e-03, -2.0975e-01, -1.7352e-01,\n",
       "                       -1.4237e-01, -1.5879e-01,  2.2735e-01,  2.4609e-02,  1.9735e-01,\n",
       "                        1.9354e-02,  2.3087e-01, -3.9759e-02, -1.5280e-01, -8.0957e-02,\n",
       "                        2.2464e-01,  5.3091e-03,  1.6281e-01, -1.8817e-01, -2.2428e-01,\n",
       "                        5.2132e-02, -1.4629e-01, -5.8597e-02,  5.4138e-02,  2.0672e-01,\n",
       "                       -1.2293e-01, -1.5538e-01, -4.4637e-02,  1.4567e-01, -1.9575e-01]])),\n",
       "             ('linear_relu_stack.Output Layer.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]))])"
      ]
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model.state_dict()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:40.019027Z",
     "start_time": "2025-06-26T01:43:33.217395Z"
    }
   },
   "outputs": [],
   "source": [
    "from torch.utils.tensorboard import SummaryWriter\n",
    "class TensorboardLogger:\n",
    "    \"\"\"\n",
    "    Tensorboard日志记录类：记录训练过程中的损失和准确率\n",
    "    \n",
    "    参数:\n",
    "        log_dir: 日志保存目录,log_dir的父目录不要有中文\n",
    "    \"\"\"\n",
    "    def __init__(self, log_dir='tensorboard_logs'):\n",
    "\n",
    "        import os\n",
    "        \n",
    "        # 确保日志目录存在\n",
    "        if not os.path.exists(log_dir):\n",
    "            os.makedirs(log_dir)\n",
    "            \n",
    "        self.writer = SummaryWriter(log_dir) # 实例化SummaryWriter, log_dir是log存放路径，flush_secs是每隔多少秒写入磁盘\n",
    "        \n",
    "    def log_training(self, epoch, train_loss, train_acc):\n",
    "        \"\"\"\n",
    "        记录训练数据\n",
    "        \n",
    "        参数:\n",
    "            epoch: 当前训练轮数\n",
    "            train_loss: 训练损失\n",
    "            train_acc: 训练准确率\n",
    "        \"\"\"\n",
    "        self.writer.add_scalar('训练/损失', train_loss, epoch)\n",
    "        self.writer.add_scalar('训练/准确率', train_acc, epoch)\n",
    "        \n",
    "    def log_validation(self, epoch, val_loss, val_acc):\n",
    "        \"\"\"\n",
    "        记录验证数据\n",
    "        \n",
    "        参数:\n",
    "            epoch: 当前训练轮数\n",
    "            val_loss: 验证损失\n",
    "            val_acc: 验证准确率\n",
    "        \"\"\"\n",
    "        self.writer.add_scalar('验证/损失', val_loss, epoch)\n",
    "        self.writer.add_scalar('验证/准确率', val_acc, epoch)\n",
    "    \n",
    "    def log_lr(self, epoch, lr):\n",
    "        \"\"\"\n",
    "        记录学习率\n",
    "        \n",
    "        参数:\n",
    "            epoch: 当前训练轮数\n",
    "            lr: 学习率\n",
    "        \"\"\"\n",
    "        self.writer.add_scalar('学习率', lr, epoch)\n",
    "        \n",
    "    def log_model_graph(self, model, images):\n",
    "        \"\"\"\n",
    "        记录模型结构图\n",
    "        \n",
    "        参数:\n",
    "            model: 模型\n",
    "            images: 输入图像样本\n",
    "        \"\"\"\n",
    "        self.writer.add_graph(model, images)\n",
    "        \n",
    "    def close(self):\n",
    "        \"\"\"\n",
    "        关闭Tensorboard写入器\n",
    "        \"\"\"\n",
    "        self.writer.close()\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 设置交叉熵损失函数，SGD优化器"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:40.023837Z",
     "start_time": "2025-06-26T01:43:40.019952Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "损失函数: CrossEntropyLoss()\n",
      "优化器: SGD (\n",
      "Parameter Group 0\n",
      "    dampening: 0\n",
      "    differentiable: False\n",
      "    foreach: None\n",
      "    fused: None\n",
      "    lr: 0.01\n",
      "    maximize: False\n",
      "    momentum: 0.9\n",
      "    nesterov: False\n",
      "    weight_decay: 0\n",
      ")\n"
     ]
    }
   ],
   "source": [
    "model = NeuralNetwork()\n",
    "# 定义损失函数和优化器\n",
    "loss_fn = nn.CrossEntropyLoss()  # 交叉熵损失函数，适用于多分类问题，里边会做softmax，还有会把0-9标签转换成one-hot编码\n",
    "# 用少量样本就能更新权重，训练更快，且更容易跳出局部最优\n",
    "optimizer = torch.optim.SGD(model.parameters(), lr=0.01, momentum=0.9)  # SGD优化器，学习率为0.01，动量为0.9\n",
    "\n",
    "print(\"损失函数:\", loss_fn)\n",
    "print(\"优化器:\", optimizer)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:40.035848Z",
     "start_time": "2025-06-26T01:43:40.032419Z"
    }
   },
   "outputs": [],
   "source": [
    "model = NeuralNetwork(layers_num=19)\n",
    "\n",
    "optimizer = torch.optim.SGD(model.parameters(), lr=0.001, momentum=0.9)  # SGD优化器，学习率为0.01，动量为0.9"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.732814Z",
     "start_time": "2025-06-26T01:43:40.035848Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "使用设备: cuda:0\n",
      "训练开始，共43000步\n"
     ]
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "33997aace42b41b3945726d8bf61562f",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "  0%|          | 0/43000 [00:00<?, ?it/s]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "早停触发! 最佳验证准确率: 88.6400\n",
      "早停: 在14000 步\n"
     ]
    }
   ],
   "source": [
    "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n",
    "print(f\"使用设备: {device}\")\n",
    "model = model.to(device) #将模型移动到GPU\n",
    "early_stopping=EarlyStopping(patience=5, delta=0.001)\n",
    "model_saver=ModelSaver(save_dir='model_weights', save_best_only=True)\n",
    "#tensorboard_logger=TensorboardLogger(log_dir='logs')\n",
    "\n",
    "model, history = train_classification_model(model, train_loader, val_loader, loss_fn, optimizer, device, num_epochs=50, early_stopping=early_stopping, model_saver=model_saver, tensorboard_logger=None)\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.737721Z",
     "start_time": "2025-06-26T01:45:37.732814Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[{'loss': 0.1920449286699295, 'acc': 92.1875, 'step': 13901},\n",
       " {'loss': 0.33454281091690063, 'acc': 87.5, 'step': 13902},\n",
       " {'loss': 0.28307265043258667, 'acc': 85.9375, 'step': 13903},\n",
       " {'loss': 0.2581883668899536, 'acc': 90.625, 'step': 13904},\n",
       " {'loss': 0.19168637692928314, 'acc': 90.625, 'step': 13905},\n",
       " {'loss': 0.2657345235347748, 'acc': 85.9375, 'step': 13906},\n",
       " {'loss': 0.4622153639793396, 'acc': 82.8125, 'step': 13907},\n",
       " {'loss': 0.23524995148181915, 'acc': 89.0625, 'step': 13908},\n",
       " {'loss': 0.24273128807544708, 'acc': 87.5, 'step': 13909},\n",
       " {'loss': 0.28135398030281067, 'acc': 89.0625, 'step': 13910},\n",
       " {'loss': 0.2109656035900116, 'acc': 90.625, 'step': 13911},\n",
       " {'loss': 0.2133648842573166, 'acc': 92.1875, 'step': 13912},\n",
       " {'loss': 0.17140793800354004, 'acc': 92.1875, 'step': 13913},\n",
       " {'loss': 0.22295674681663513, 'acc': 85.9375, 'step': 13914},\n",
       " {'loss': 0.29204127192497253, 'acc': 87.5, 'step': 13915},\n",
       " {'loss': 0.15159937739372253, 'acc': 93.75, 'step': 13916},\n",
       " {'loss': 0.17341791093349457, 'acc': 95.3125, 'step': 13917},\n",
       " {'loss': 0.11503103375434875, 'acc': 95.3125, 'step': 13918},\n",
       " {'loss': 0.2970981299877167, 'acc': 90.625, 'step': 13919},\n",
       " {'loss': 0.17302948236465454, 'acc': 92.1875, 'step': 13920},\n",
       " {'loss': 0.14247602224349976, 'acc': 95.3125, 'step': 13921},\n",
       " {'loss': 0.07167196273803711, 'acc': 98.4375, 'step': 13922},\n",
       " {'loss': 0.199247807264328, 'acc': 93.75, 'step': 13923},\n",
       " {'loss': 0.2275138795375824, 'acc': 90.625, 'step': 13924},\n",
       " {'loss': 0.1609216034412384, 'acc': 95.3125, 'step': 13925},\n",
       " {'loss': 0.33410054445266724, 'acc': 87.5, 'step': 13926},\n",
       " {'loss': 0.1891935169696808, 'acc': 92.1875, 'step': 13927},\n",
       " {'loss': 0.323361873626709, 'acc': 87.5, 'step': 13928},\n",
       " {'loss': 0.23144026100635529, 'acc': 90.625, 'step': 13929},\n",
       " {'loss': 0.1881040632724762, 'acc': 92.1875, 'step': 13930},\n",
       " {'loss': 0.1250060796737671, 'acc': 96.875, 'step': 13931},\n",
       " {'loss': 0.30219218134880066, 'acc': 93.75, 'step': 13932},\n",
       " {'loss': 0.34087732434272766, 'acc': 87.5, 'step': 13933},\n",
       " {'loss': 0.19860519468784332, 'acc': 92.1875, 'step': 13934},\n",
       " {'loss': 0.21868392825126648, 'acc': 93.75, 'step': 13935},\n",
       " {'loss': 0.3356245458126068, 'acc': 81.25, 'step': 13936},\n",
       " {'loss': 0.274794340133667, 'acc': 90.625, 'step': 13937},\n",
       " {'loss': 0.4412880837917328, 'acc': 85.9375, 'step': 13938},\n",
       " {'loss': 0.2979457974433899, 'acc': 85.9375, 'step': 13939},\n",
       " {'loss': 0.25694698095321655, 'acc': 92.1875, 'step': 13940},\n",
       " {'loss': 0.13487102091312408, 'acc': 95.3125, 'step': 13941},\n",
       " {'loss': 0.41155511140823364, 'acc': 85.9375, 'step': 13942},\n",
       " {'loss': 0.2948593199253082, 'acc': 90.625, 'step': 13943},\n",
       " {'loss': 0.20205891132354736, 'acc': 93.75, 'step': 13944},\n",
       " {'loss': 0.20172545313835144, 'acc': 92.1875, 'step': 13945},\n",
       " {'loss': 0.1850765496492386, 'acc': 93.75, 'step': 13946},\n",
       " {'loss': 0.1531810462474823, 'acc': 92.1875, 'step': 13947},\n",
       " {'loss': 0.2291867882013321, 'acc': 90.625, 'step': 13948},\n",
       " {'loss': 0.28142213821411133, 'acc': 90.625, 'step': 13949},\n",
       " {'loss': 0.27275997400283813, 'acc': 92.1875, 'step': 13950},\n",
       " {'loss': 0.13311077654361725, 'acc': 95.3125, 'step': 13951},\n",
       " {'loss': 0.15679563581943512, 'acc': 93.75, 'step': 13952},\n",
       " {'loss': 0.1431742012500763, 'acc': 95.3125, 'step': 13953},\n",
       " {'loss': 0.14335870742797852, 'acc': 96.875, 'step': 13954},\n",
       " {'loss': 0.22536493837833405, 'acc': 89.0625, 'step': 13955},\n",
       " {'loss': 0.3023344576358795, 'acc': 92.1875, 'step': 13956},\n",
       " {'loss': 0.15509217977523804, 'acc': 93.75, 'step': 13957},\n",
       " {'loss': 0.27479246258735657, 'acc': 90.625, 'step': 13958},\n",
       " {'loss': 0.18598772585391998, 'acc': 92.1875, 'step': 13959},\n",
       " {'loss': 0.12400900572538376, 'acc': 93.75, 'step': 13960},\n",
       " {'loss': 0.2675839066505432, 'acc': 89.0625, 'step': 13961},\n",
       " {'loss': 0.24239186942577362, 'acc': 90.625, 'step': 13962},\n",
       " {'loss': 0.16682319343090057, 'acc': 96.875, 'step': 13963},\n",
       " {'loss': 0.22095854580402374, 'acc': 90.625, 'step': 13964},\n",
       " {'loss': 0.304119735956192, 'acc': 87.5, 'step': 13965},\n",
       " {'loss': 0.18746735155582428, 'acc': 92.1875, 'step': 13966},\n",
       " {'loss': 0.15467260777950287, 'acc': 92.1875, 'step': 13967},\n",
       " {'loss': 0.13678903877735138, 'acc': 93.75, 'step': 13968},\n",
       " {'loss': 0.15788643062114716, 'acc': 96.875, 'step': 13969},\n",
       " {'loss': 0.22917549312114716, 'acc': 90.625, 'step': 13970},\n",
       " {'loss': 0.46108147501945496, 'acc': 82.8125, 'step': 13971},\n",
       " {'loss': 0.27329379320144653, 'acc': 89.0625, 'step': 13972},\n",
       " {'loss': 0.15019437670707703, 'acc': 92.1875, 'step': 13973},\n",
       " {'loss': 0.32665175199508667, 'acc': 85.9375, 'step': 13974},\n",
       " {'loss': 0.3228687644004822, 'acc': 82.8125, 'step': 13975},\n",
       " {'loss': 0.3042748272418976, 'acc': 87.5, 'step': 13976},\n",
       " {'loss': 0.19444748759269714, 'acc': 96.875, 'step': 13977},\n",
       " {'loss': 0.20738722383975983, 'acc': 92.1875, 'step': 13978},\n",
       " {'loss': 0.27506712079048157, 'acc': 89.0625, 'step': 13979},\n",
       " {'loss': 0.1260136067867279, 'acc': 96.875, 'step': 13980},\n",
       " {'loss': 0.2547965943813324, 'acc': 89.0625, 'step': 13981},\n",
       " {'loss': 0.37498730421066284, 'acc': 87.5, 'step': 13982},\n",
       " {'loss': 0.07643705606460571, 'acc': 98.4375, 'step': 13983},\n",
       " {'loss': 0.2946713864803314, 'acc': 90.625, 'step': 13984},\n",
       " {'loss': 0.3494671583175659, 'acc': 92.1875, 'step': 13985},\n",
       " {'loss': 0.16907908022403717, 'acc': 90.625, 'step': 13986},\n",
       " {'loss': 0.3107844293117523, 'acc': 89.0625, 'step': 13987},\n",
       " {'loss': 0.21454589068889618, 'acc': 90.625, 'step': 13988},\n",
       " {'loss': 0.47345536947250366, 'acc': 82.8125, 'step': 13989},\n",
       " {'loss': 0.14994101226329803, 'acc': 93.75, 'step': 13990},\n",
       " {'loss': 0.17124000191688538, 'acc': 95.3125, 'step': 13991},\n",
       " {'loss': 0.2530437111854553, 'acc': 92.1875, 'step': 13992},\n",
       " {'loss': 0.3102998733520508, 'acc': 87.5, 'step': 13993},\n",
       " {'loss': 0.2854682505130768, 'acc': 89.0625, 'step': 13994},\n",
       " {'loss': 0.24477991461753845, 'acc': 92.1875, 'step': 13995},\n",
       " {'loss': 0.22555792331695557, 'acc': 93.75, 'step': 13996},\n",
       " {'loss': 0.5213154554367065, 'acc': 79.6875, 'step': 13997},\n",
       " {'loss': 0.12342200428247452, 'acc': 95.3125, 'step': 13998},\n",
       " {'loss': 0.17078544199466705, 'acc': 93.75, 'step': 13999}]"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "history['train'][-100:-1]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.741226Z",
     "start_time": "2025-06-26T01:45:37.737721Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[{'loss': 2.4819518405914307, 'acc': 12.66, 'step': 0},\n",
       " {'loss': 0.500704292345047, 'acc': 83.34, 'step': 500},\n",
       " {'loss': 0.45524956786632537, 'acc': 83.88, 'step': 1000},\n",
       " {'loss': 0.4207435910463333, 'acc': 85.24, 'step': 1500},\n",
       " {'loss': 0.40294141621589663, 'acc': 85.9, 'step': 2000},\n",
       " {'loss': 0.382917093706131, 'acc': 86.86, 'step': 2500},\n",
       " {'loss': 0.3821672885656357, 'acc': 86.82, 'step': 3000},\n",
       " {'loss': 0.36930070824623107, 'acc': 87.42, 'step': 3500},\n",
       " {'loss': 0.3622932946205139, 'acc': 87.44, 'step': 4000},\n",
       " {'loss': 0.35403671875, 'acc': 88.08, 'step': 4500},\n",
       " {'loss': 0.35214242371320725, 'acc': 87.72, 'step': 5000},\n",
       " {'loss': 0.3642540178775787, 'acc': 87.28, 'step': 5500},\n",
       " {'loss': 0.34922700033187865, 'acc': 87.76, 'step': 6000},\n",
       " {'loss': 0.34389526901245115, 'acc': 88.18, 'step': 6500},\n",
       " {'loss': 0.35518641440868376, 'acc': 87.36, 'step': 7000},\n",
       " {'loss': 0.34589322426319125, 'acc': 87.84, 'step': 7500},\n",
       " {'loss': 0.33901963777542116, 'acc': 88.16, 'step': 8000},\n",
       " {'loss': 0.3573957279205322, 'acc': 86.9, 'step': 8500},\n",
       " {'loss': 0.337330361533165, 'acc': 88.34, 'step': 9000},\n",
       " {'loss': 0.32784361191987993, 'acc': 88.2, 'step': 9500},\n",
       " {'loss': 0.3423274608373642, 'acc': 87.94, 'step': 10000},\n",
       " {'loss': 0.33546537046432495, 'acc': 88.22, 'step': 10500},\n",
       " {'loss': 0.3327456946134567, 'acc': 88.48, 'step': 11000},\n",
       " {'loss': 0.32960613915920256, 'acc': 88.64, 'step': 11500},\n",
       " {'loss': 0.3363837046265602, 'acc': 88.36, 'step': 12000},\n",
       " {'loss': 0.3314089200496674, 'acc': 88.04, 'step': 12500},\n",
       " {'loss': 0.32769620542526245, 'acc': 88.54, 'step': 13000},\n",
       " {'loss': 0.3279969143629074, 'acc': 88.42, 'step': 13500}]"
      ]
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "history['val'][-1000:-1]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 绘制损失曲线和准确率曲线"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.744941Z",
     "start_time": "2025-06-26T01:45:37.741226Z"
    }
   },
   "outputs": [],
   "source": [
    "# 导入绘图库\n",
    "import matplotlib.pyplot as plt\n",
    "from matplotlib import font_manager\n",
    "def plot_learning_curves1(history):\n",
    "    # 设置中文字体支持\n",
    "    plt.rcParams['font.sans-serif'] = ['SimHei']  # 使用黑体\n",
    "    plt.rcParams['axes.unicode_minus'] = False    # 解决负号显示问题\n",
    "\n",
    "    # 创建一个图形，包含两个子图（损失和准确率）\n",
    "    fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5))\n",
    "\n",
    "    # 绘制损失曲线\n",
    "    epochs = range(1, len(history['train_loss']) + 1)\n",
    "    ax1.plot(epochs, history['train_loss'], 'b-', label='训练损失')\n",
    "    ax1.plot(epochs, history['val_loss'], 'r-', label='验证损失')\n",
    "    ax1.set_title('训练与验证损失')\n",
    "    ax1.set_xlabel('轮次')\n",
    "    ax1.set_ylabel('损失')\n",
    "    ax1.legend()\n",
    "    ax1.grid(True)\n",
    "\n",
    "    # 绘制准确率曲线\n",
    "    ax2.plot(epochs, history['train_acc'], 'b-', label='训练准确率')\n",
    "    ax2.plot(epochs, history['val_acc'], 'r-', label='验证准确率')\n",
    "    ax2.set_title('训练与验证准确率')\n",
    "    ax2.set_xlabel('轮次')\n",
    "    ax2.set_ylabel('准确率 (%)')\n",
    "    ax2.legend()\n",
    "    ax2.grid(True)\n",
    "\n",
    "    plt.tight_layout()\n",
    "    plt.show()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.816716Z",
     "start_time": "2025-06-26T01:45:37.744941Z"
    }
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAzoAAAHBCAYAAAChe85HAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAAnC1JREFUeJzt3Qd4VGXWB/D/zKRMQhotBQi9914tSAdF7MpiQ9deF8uufuqKXdde0bVg74KuIIJU6V1675AQeur0+Z7z3pkhCUmYTCaZ9v89z2X6zE3ukHvPPec9r87pdDpBREREREQURvSBXgEiIiIiIiJ/Y6BDRERERERhh4EOERERERGFHQY6REREREQUdhjoEBERERFR2GGgQ0REREREYYeBDhERERERhR0GOkREREREFHYY6BARERERUdiJQghwOBw4dOgQEhMTodPpAr06REQRw+l0Ii8vDw0aNIBez3NjbtwvERGFwL7JGQL279/vlFXlwoULFy6BWeTvcLCYP3++86KLLnJmZGSodZsyZUqJxx0Oh/Pxxx93pqenO41Go3Pw4MHObdu2lXjOsWPHnH/729+ciYmJzuTkZOdNN93kzMvL83oduF/iwoULFwT9vikkMjpyxkzs378fSUlJlX691WrFzJkzMWzYMERHR1fDGlJN4HYMD9yOoSU3NxeZmZmev8PBoKCgAF26dMFNN92Eyy677IzHX3rpJbz55pv49NNP0axZMzz++OMYPnw4Nm3aBKPRqJ4zbtw4ZGVlYdasWeo7OX78eNx666346quvvFoH7pfIjdsyPHA7hue+KSQCHXdZgOxMfN2hxMfHq9fyyxu6uB3DA7djaAqm8qyRI0eqpbxyhtdffx2PPfYYxowZo+777LPPkJaWhqlTp+Kaa67B5s2bMWPGDKxYsQI9e/ZUz3nrrbcwatQovPzyy6oU4my4XyI3bsvwwO0YnvumkAh0iIiIvLF7925kZ2djyJAhnvuSk5PRp08fLFmyRAU6cpmSkuIJcoQ8X+q8ly1bhksvvfSM9zWbzWopfjbRfXAkS2W5X+PLaym4cFuGB27H0OLtdmKgQ0REYUOCHCEZnOLktvsxuUxNTS3xeFRUFOrUqeN5TmnPP/88Jk6ceMb9UuoiZ4F9JaVzFB64LcMDt2NoKCws9Op5DHSIiIjO4pFHHsGECRPOqA+Xen5fS9fkgGro0KEskwlx3JbhgdsxtLiz6mfDQIeIfCbjIWw2G+x2e6V2JnL23GQyVep1VD0MBoPaHsE0Bqcq0tPT1eXhw4eRkZHhuV9ud+3a1fOcnJycEq+T7/Hx48c9ry8tNjZWLaXJAVFVDoqq+noKHtyW4YHbMTR4u40Y6BCRTywWi+pa5W36uHhwJAeT0q0qXA6uQ52UXklQEBMTg1AnXdbk+zV79mxPYCNn/mTszR133KFu9+vXDydPnsSqVavQo0cPdd+cOXPU3DgyloeIiMIDAx0iqjQ5IJRB35INkA5VcoDsbdAir83Pz0dCQgInoAwwCTolYD1y5Ijanq1atQqJbSLfnx07dnhuy7qvXbtWjbFp3Lgx7r//fjzzzDPq53G3l5bv6SWXXKKe365dO4wYMQK33HILJk2apLKMd999t2pU4E3HNSIiCg0MdIio0uTgWAIWGaNQ2YHY8jp5vcxnEgoH1eEuLi5OlQDs3bvXs12C3cqVK3HBBRd4brvHztxwww2YPHkyHn74YTXXjsyLI5mbc845R7WTLv6zffnllyq4GTx4sPoeXn755WruHSIiCh8MdIjIZwxUwkOobceBAweqbFR5JLv41FNPqaU8kv3xdnJQIiIKTaG1dyMiIiIiIvICAx0iIiIiIgo7DHSIiHzUtGlTvP766355r3nz5qmSKxlTQkRERFXHMTpEFFFkfIe0HfZHgLJixQrUqlXLL+tFRERE/sVAh4ioGBnkLhOZyiSaZ1O/fv0aWSciIiKqvLAvXVu04yhGv70Yn28P+x+VKKDBQaHF5vVSZLFX6vkVLRV13yrtxhtvxPz58/HGG2+oMjFZpB2xXP72229q8sjY2FgsXLgQO3fuxJgxY5CWlqbm/OnVqxf++OOPCkvX5H0+/PBDXHrpparttszj8ssvv/j8e/3xxx/RoUMHtU7yWa+88kqJx9999131GdI2Wdbziiuu8Dz2ww8/oFOnTqp9dN26dTFkyBDVcpmIAueXvw7hts9XIt9sC/SqEEWEsM/oyAHVlsP5aJLAGdiJqkuR1Y72T/wekM/e9NRwxMd496dMApxt27ahY8eOntbDGzduVJf/+te/8PLLL6N58+aoXbs29u/fj1GjRuHZZ59VgcZnn32G0aNHY+vWrWpSyvJMnDgRL730Ev7zn//grbfewrhx49QcNdLOuDJWrVqFq666Ck8++SSuvvpqLF68GHfeeacKWiRgk7lk7r33Xnz++efo378/jh8/jj///FO9NisrC2PHjlXrIUFXXl6eeqwyQSER+ZfV7sDEXzbiWIEFQzdk44oejQK9SkRhL+wDHWO0QV1a7IFeEyIKtOTkZMTExKhsS3p6urpvy5Yt6lICn6FDh3qeK4FJly5dPLeffvppTJkyRWVoZKLJ8kgQIkGGeO6559QklMuXL8eIESMqta6vvvqqmszy8ccfV7dbt26NTZs2qQBKPmPfvn1qfNBFF12ExMRENGnSBN26dfMEOjabDZdddpm6X0h2h4gCW2EiQY7YkZMf6NUhiggREOhoJWtWR6DXhCh8xUUbVGbFGw6HA3m5eUhMSvTLRJXy2f7Qs2fPErfz8/NVNmXatGmewKGoqEgFGBXp3Lmz57oEIklJScjJyan0+mzevFmVzhU3YMAAVSonY4gkKJMgRjJQEkTJ4i6ZkwBNgiQJboYPH45hw4apsjbJVBFRYPyy9pDnOgMdopqhj5SMDgMdouojY1OkfMzbJS7GUKnnV7TIZ/tD6e5pDz74oMrgSFZGyr7Wrl2rAgeLRTsjW57o6OgzfjcS3PmbZHFWr16Nr7/+GhkZGXjiiSdUgCPtqQ0GA2bNmqXGHbVv316V0LVp0wa7d+/2+3oQkXdl9L9vzPbc3nmEgQ5RTWCgQ0QRRUrXJCNyNosWLVIlYpIlkQBHSt327NmDmtKuXTu1DqXXSUrYJJAR0hlOmgzIWJx169ap9ZszZ44nwJIMkIwZWrNmjfq5JXAjopr3x+bDKLDYUTteOxGy91gBzDbW1BNVt4gpXbMw0CEiV6e0ZcuWqaBAuqmVl22RbmY//fSTakAgQYOMlamOzEx5HnjgAdXpTcYGSTOCJUuW4O2331ad1sSvv/6KXbt24bzzzlMladOnT1frJ5kb+flmz56tStZSU1PV7SNHjqjgiYhq3s+usrWxvRvj86V7kWeyYc/RQrRJTwz0qpEPpLHL6n0n0D4jWVUo+JPDob1350YpiIkKz3yEw+HElDUH0SI1AV0zU6r1s8LzN1hG/b7NKeUj7DhEFOmkJE0yIlLSJfPglDfmRpoBSAAhHc0k2JGxLt27d6+x9ZTP+u677/DNN9+oLnFSmiYNEyTLJFJSUlQgNmjQIBXATJo0SZWxSTtqGRe0YMEC1TVOMkCPPfaYak09cuTIGlt/ItKcLLRg/jZtnN4l3RqiZWqCus5xOqHdJvzy95bgti9W+b2b5XPTN+OKSUswaf5OhKOVe47jkncX4YHv/8KTv2ys9mPzCMjonI60zTYHYmMDujpEFGBy4C/ZkeLcwUPpzI+7DMztrrvuKnG7dClbWTs8GTPjjYEDB57x+ssvv1wtZTnnnHMwb968Mh+TwGfGjBlefS4RVa/fNmTDaneiXUYSWqclomX9BKzZd5KBTgj7buV+dblg2xH8vvEwRnTUunhW1bbDefhk8R5PueO9g1shXOw/XogXZmzBtHVZ6nZCbBSGd0iHw+mEHtU3BUxEBTommx1JAV0bIiIiiiQ/rz2oLsd0baAuPRkdNiQISYdzTVi885jn9jPTNmFgm/oljjd9ISe6Jv5vI+yuDMeGg6eQa7IiyViywU2oyTNZ8e68nfho4W5YbA7odcDVvTIxYWgb1E+s/uxD2JeuGfQ6RBu0SNHEjgREFCC33367GhNU1iKPEVH4yTpVhGW7j6vro7uUCnSY0QlJ//vrECT53qlhMjKSjThwoggfLNhV5feVrnyLdhxT43IkAJB4R8q8QpXd4cQ3y/fhgpfn4715O1WQM6BlXUy791w8f1nnGglyIiKjIyTKttptMFnZ4YSIAkPG18j4oLLImBoiCt+D4t5N66BhSlyJQGfXkXx1MCgnZCm0xueIK3s2Qu34GNzz9Rq8O28HLuveEI1qx/vcfvzpXzer67ef1xxH8s34evl+LNl5DIPapiHULN5xFE9P24zNWbnqdrN6tfDoqHYY0i7Vb1NCeCsyAp0oPfKY0SGiAJLuZ7IQUeR1W7vYVbYm5GBYztrLuOGDJ4rQuK5vB8dU8yQ4XXfglApOR3XKQN1aMfhi6V6VtXt++ha8M863hjXvL9iJgyeL0CDZiDsGtsTMTdkq0Fm6K7QyOruPFqhmCrM2HVa3k4xRuG9Ia1zXt0nAOsiFfemaiHXVTcoYHSIiIqLqtiMnDxsP5SLKdVDsJgfJzetpExTvOCKnYSnUsjnntKyHegmxKjvx5MUd1LiTaeuzsHjn0Uq/54EThaq0S/zfhe1Vu+q+zeuq2xsPncKpIitCwYd/7sKw1+arIEe+4zf0a4L5D12Am89pFtA22fpIyegIlq4RERFRTfjFlc05v3V91KkVU+IxmT9EcJxO6JBmAe5t6m4sIaSb3rV9m6jrE3/ZBJu9ctVDz07brLJ7fZvXwahOWve2tCSjCoZlnM4K1xivYO9C98y0zaq7oDRm+P3+czFxTEfULvW9D4SICHTckzmxdI2IiIhq4qD457/OLFtzkxbTgoFO6Fh/8BR2HS1QE9EP61CynfSEoa1ROz4aWw/nqVI2by3acVS1H5eMkGSGio9f6ePK6izddbrDWzCav+0IHv1pvbp+x8AWmDy+N1qmBs9EuBER6MQyo0NEREQ1ZO3+k9h7rFBNWj60/ZmDydl5LXTHWw1pl6bmgCkuJT4GDw5vo66/OmsbjuWbz/p+VrtDtZMWMoalbXrJpjT9WmiBzpIgDnQ2HDyFO79YBZvDiUu6NsDDrt9BMImIQMfd21xSg0REREQ1cVA8rEMa4mOiKgx0yppomIKLdMeTDnpiTNeGZT7nml6N0T4jCbkmG16eue2s7ymZn22H81UmSOaUKa1vszrqclNWLk4VBt84nYMni3DT5BUosNjRv0VdvHRFlxrvqOaNyAh0XBmdImZ0iKiKmjZtitdff92r58of/alTp1b7OhFR8JAxGr+6Zn8vPpajOGm3K+VKclAsrYQpuC3bdQw5eWYkx0WrMVdlkQH4E8d0UNe/WbEP6w+cKvf9juabVeZHPDS8LZLjz5wUNDXJiBb1a6n25Mt2B1dW51ShFTd+vFz9TtqkJWLSdT0C2nCgIpHRXtrddY1jdIiIiCKyLXC0QY/MOtXfyllKjeRAVs7Un9uqfrnHJbIuUt4mWZ3URGO1r1c4OF5gUWNWJMPiDUkw9GlWt8qTU7ozdNIsoKID+l5N66jgVp7/71824Mc7+peZ5Xj5963IM9nQoUESru6VWe77Sfe1nUcKVJvp0uOCfGGy2tVYoy6NUnwOTMw2O279fCW25+QjPcmIT8b3QpLxzEAtJAOd559/Hj/99BO2bNmCuLg49O/fHy+++CLatCm/Jm/y5MkYP358iftiY2NhMplQ46VrzOgQERFFFAk6LnproTqwk3a3cla+OrkPii/snKGCq/JIQwIJdHbm5KN/i3rVuk6hTg6uJy/ag7fn7ECe2Vap10qZ4PR7z/X5wF6Cg+kbsiosWyvukZHtVIvl1ftOYurag7i0W6MSj687cBLfrtyvrk+8uEOFE8ZKoPPlsn1+G6cz8X+b8PXyfSqj+H+j2mFwJSfwdDicePD7dWreoMTYKBXkNHBNhBusKrXV58+fj7vuugtLly7FrFmzYLVaMWzYMBQUFFT4Opn1Oysry7Ps3et9Rwp/kA4ZoogZHaLqIbl1S4H3i7Wwcs+vaKlEffsHH3yABg0awOEo+bdgzJgxuOmmm7Bz5051PS0tDQkJCejVqxf++OMPv/2a1q9fj0GDBqkTRXXr1sWtt96K/PzTg5HnzZuH3r17o1atWkhJScGAAQM8fy//+usvXHDBBUhMTFR/U3v06IGVK1f6bd2IwtW0dVkotNhxstCK71ZoB5jVRQ6KZ2zI9uqgmA0Jzk7GL/22PgtDX12A53/booIcabssrZi9WRKNUer3++niPT6vw7ytR1T2RbIXvZtq42Yqkp5sxN2DWqrrMolofrHATAKFf/+yUe22Lu3WED3P8n7u+XS2ZOfiZKEFVVFksePntQc9E3v+/bOVuPajZdiclev1e7w4Y4saqyRzQ0m5mrTWDnaVyujMmDHjjGyNzPS9atUqnHfeeeW+TqLF9PSqp9yq2nVNzggQUTWQwOW5smvRS5P/jSn+/OxHDwEx2uR7Z3PllVfinnvuwdy5czF48GB13/Hjx9XftunTp6ugY9SoUXj22WdV5vmzzz7D6NGjsXXrVjRu3LhKqyknhIYPH45+/fphxYoVyMnJwd///nfcfffd6m+pzWbDJZdcgltuuQVff/01LBYLli9f7jnbNm7cOHTr1g3vvfceDAYD1q5di+jo4C0XIAoWclbdbfLiPRg/oCmiKsi0VMWcLTnqwLZhShx6NK5d4XM9c+kcYaBTXkevp37dhOWueWRSE2Px0PA2uLx7I+gryIIUJ4Htwz+uwxuzt2NMtwY+lQj+8tdBT5twbz9XJsn8dsV+lbF7a852leURU9YcxJp9JxEfY8C/RrY96/tIyZ0ExBKsSRZleBXK12ZtPqwC/sw6cbiocwN8tHA3Fu04hgvf/BNX92qMB4a1VpOgluezJXvw/oJd6vpLV3TGgJahkYWs0hidU6e0gVZ16lQckcrBQ5MmTdRZ1O7du+O5555Dhw7agK2ymM1mtbjl5mrRpmSQZKmsGIP2xSw023x6PQUH97bjNgw82QZypk3+T6vsiMMRsM4m7s/3RnJyMkaMGIEvv/xSZUfEd999h3r16uH888+HXq9Hp06dPM+fOHEipkyZgp9//llls93cP7u36yfLF198oUp2JaiRjE379u3x5ptvqgySlAVL0CJ/UyXQatasmXqtuyxYXr9v3z488MADaN26tbqvRYsWp3/+KpL3kJ9JtqsEUcXx/xuFsn3HCtWBpRyfJhqjVaeomZsOY1SnjGr5PPcZ89Fdzn5QzIxO2XJyTfjP71vxw+oDKvMhJ6tvO685bju/BWqVaut8Nlf0aIQvlu3FugOn8NKMrXj5yi6Ven2eyYo/Nueo6xd38e5knoiNMuCJi9rj5k9X4uOFu3F1z0wVtLwwY4t6/J5BrdSkoN7o17yu+o4s2XmsSoHOL67v5iVdG+KBYW3wt96N8cJvWzBtfZYqZ5NMjWSi5ESArH9xv2/MVpko8eCw1rise8lyvLAMdGTHeP/996vSio4dO5b7PNlRf/zxx+jcubPaib/88stqbM/GjRvRqFHZvyjZ6csBRmkzZ85EfHzlBxLuPyh/bAzYtXc/pk+v2bI58j8pm6TAioqKUllaOYkhmQe1N7prc2BWpsgGmLxPvV966aW477771N8Zydp8/vnn6j75WWSRcYfytyY7Oxt2ux1FRUXYvn2754SL/O2TgMV9+6yrV1Sknrtu3Tp1gkfe0/1aCark/VavXq3+lv7tb3/DyJEjMXDgQLVIhsedDb/zzjtVqdunn36qgjJ5zB0QVZVsQ1nPBQsWqMxScYWFhX75DKJAcJ+Nl7PP3TJT8OacHepMdnUEOrlFVszdcqTCbmtlBTqHc83INVmDekB3TZCyvw//3IV35+1UmQf37/HhEW1VhswXEmzKOJhL312MH1YdwLg+jdHtLJm24n7feBgWm0N1P5PGAZUxuF0aLmhTH3O3HsHTv25C67REHMkzq/ExN53T1Ov3kfK1z5furdLEoScKLKoEr/h3U5phvDOuO27cc1ytnwSDEvh8uWyvykCN7JiuKgpW7zuBe79eo3bzY3s3xl0XaGV5YR/oyNnNDRs2YOHChRU+T8o0ZHGTIKddu3Z4//338fTTT5f5mkceeQQTJkzw3JaDgszMTDUeSGrTK0O3ewH6b38UF0TXxx+p/8aoUV0r9XoKHnJmWYKcoUOHsmQnwORAf//+/Woci9HoPiuV7NVrJXOQl5enxpoEouf+VVddpQKdP//8U43BWbJkCd544w31t+Wf//ynGpPz0ksvoWXLlmosjTxf1tP9t0eyPvIze/u3SN5DnhsTE6MCxOKvc8+fIRkeuV+CLvnb9/vvv+OXX35RJXRyvW/fvioTfuONN6oSu99++w0vvPACvvrqKxWk+WN7ynpKCfLp7anxNqAjCjby/2uqqzGAnI0/v019TJq/C6v2nlATenbN9GsRLX7flAOL3YHWaQlom372meElsJFyLGnRKw0JKnMAHm7b6X/rsvDib1tUxk10a5yCxy9qj+5++J3I71UyOxLoPPnLRky5c4DXJWg/F8uC+LK/kp9h4Y4FKtiZv00LNCTTUzpjUpE+zbWqqS3ZearrXJ1aMZVej982ZKtJPWWen5apiWd0ipt65wBVVvfS71uw/3gR7vxytRqPJAHZo1M2qHkoB7VNxdNjOgTlXDl+D3SkpvzXX39VZ//Ky8qURw5Qpc58x44d5T5HzrLKUtZrK32A67Sgfv4WtNBZMd3u5AFyGPDpe0B+JVkJ+WMnB/2yVIa71Mr9+pomWeHLLrtMjYPZtWuXyjr37NlTPbZ48WIVTFx++eXqtmR49uzZo7Irxde1Muvu/h1JqZpkYyRzIoGNkCBLHpOTP+73kyYDsjz66KPqJNE333yjThCJtm3bqkWCobFjx6r3c69rVchny89U1v8t/l+jULU5K0+V/Ei3reEd01VgISVlP64+oLI6b43t5tfPOz13jvcHxZLVkUBnR4QGOpLFGf/JCk9XsQbJRvxzZFsVmPrzgPrhEW1Uk4i/DpxSAc9VFbR0dsvJM2HRjqOe8Tm+aF4/ATed0wzvz98F6YgtwcIFbVMr9R4ybkaCZ5lcdPnuYxjRsfLZyJ9dAVt5mUYJ/C7v0QgjO6WrdX1/wU4s33NcLaJzo2T1/6W6xrZVJ31lo24JcqRmfc6cOT6VTcgBknQeysionvrYM0RrpW5xMHMeHSLyDOyfNm2aKquV626tWrVSLfRloL90OZNSMn+MgXF/pmRLbrjhBpUNl4YI0hjhuuuuU13edu/erbLZEvxIpzUpn5OSOQmCJDiSv73SlU0eW7RokWpoII8RUcUHd4PbpnrKwtwlQ9PXZyHrlJY98IdTFmCJa9B8ZcZytIzwhgQfLNilgpy4aAMeGNoasx8YWKlA0VvShOC+wa08ncNOFVm96tYnwYlk/prU9a7hTVlkPI6U3kkDAsnw+MLdfU3m06msQyeLVCMDIYF+ReJjovCPoa0x98GBqiucaFwnHh/d0KvS46NCMtCRcjUZUCvlElJ2IjXssshO2O36669XO2u3p556Su2w5cyp1KFfe+21akct3YZqMtCJh5ld14hIkRbP0kRFuqlJMOP26quvonbt2iqDIt3WpEuaNFDxVyZJytCky5uUzF1xxRWq89vbb7/teVzmKJMMjTQckPE48jf3tttuUw0Cjh07pv6+ymNSTidjecoay0hEWhvfX/46dMZZ7A4NklXbYZlw8tPF/huzu/qoTo1h6NGkdqUmJXUHOlK6FmmkTO3deTs8XbzuGdwKcTHel3RV1g39m6qxNscKLHjjj+1ez4fkzXiriiTERql5fOY9OFCNz/GFNCQQ0pCgsv7n+n/Qu1kdr+e8yUiOw2tXd8XCf16A6fedW+UJVwOpUuGZtDUVUsZR3CeffKLKPYR0Bipe0nHixAnVLlUCIjmAkJIMKQ+RMo4aEePK6OjMKOKEoUTkKtU6dEj7419c06ZNVba6uOLd1oSUsnnLPQbHTZoPlH5/N8nqSLa8LDK+R0rtiMg7K/YcR9Ypk5pHZWCbkqVCN5/TXJ0Zl05T9w5uqc5iV9Wqo3qfDopl0tBI7bz23LTNqtJGDsAv6lz9VT5Swvjv0R1w/cfL8emSPRjbOxOt0soeS7X3WIEaxyVDeWTi16pKjpeMou9lwH1cgc7Ww3k4lm9G3QraQJcXsF3ixWSnpTWqXfkGYCFfulbW4g5yhJRWSPtUt9dee01lcKRdtAQ7Ui4iY3RqTLGMDkvXiIiIwt/PrrPY0jnKGF0ySyClbE3qxqvypR9XHajyZ8nki/sLdGqG+8p2c3NndPYdL1TjVSLF4p1HVVtjCSSeHF1zA9zPa10fQ9unqYzek/+TiTvLnnD6F1dwIN36fJl7x9+kAYG7wYW7DM0b2w/nYVNWLqINOvV/IRKF3qiiynJNJBivM8NsKdk2lYjIVzIXj3SdK2upaJ4wIqpe0g5YxuAIGe9R1sDr8f21sTqfLNqjytyq4td12epyQIs6FU64WBYpCZKsk6zCnmMFiAQ2uwMTf9mkrl/btwnaV7Jtc1U9fmF7ld2RyTKlfXTZ3frcg/crnwWpLqfH6XhfvvaLK+A/v3V91PahW1s4CP9Ax5XREU6bKaCrQkTh4+KLL1ZNC8papAU0kT9tPHQKuyJ0wHpl/bn9CE4WWlUQ4T44LO3KnpkqwNh1tADztmkTQlalNbK42IcSJ8lkRNrEoV8s3atKsFLiozFhqDYBck1qXDdeTUAqnpm26YxM2qasPOw8UqB16+uQhmAhY8sqE+jId/Nnd3v1IArYalpotlCojOjTA690Nk58R0T+IQ1ZZCGqbjJ3xsVvL1IHLlf3ysSEoW1CenBwdXPPnTO6cwNVTlYW6SAlkx9K1y9pNT2orW8HtN+u2I/dxwoRrXdicLvKtQ0uPk5nzb6TERHoyPiSV2dtU9cfHNYGKfGByTLcMbCFKls8cKJItVO+b4jWkU24A9ch7VKRGESTuPZppgXt0mb6aL75rNnDNftPqpJI6fYmP0ukCv9AR2+AwxALvd0Mg9V/rSSJ6MzB9hSauB2Dm8znIWMKxNfL9+N/f2Wp2cnHD2h6xviTSFdgtmHWpmyvGgNc368JPvxzlyph2pKdi7bplSuhmrs1B/83dYO6PqSBQ3XX8kUkZXRenrkNuSabmrhSAs1AkQYUj17YDnd/tUZ1fru8R0OkJUSrEsJf12vfn4u7VEMWxG4DzLmA6ZTrMreMy1OAHK/G1wUS04HEDHVZOzED7dPiselwIZbtOn7WJgnucUbD2qf51nBDplY4uRewW7XGXjIUJLoWEBVaJXDhH+i4y9fsZsQ4Tao2NBQnPCIKJu5JJAsLCxEX5127Sgpesh0FJwcNTu5GOknGKDStVwvrDpxSc4F8tXwvHhnZTg0yDrXZyqvLrE2H1e9L2vjKJIdn6yg1smOGGhT/8cLdeOmKLl5/zoaDp3DXl6tVAHpptwY4P3afz+scKYGO/M6+WaH9niaO6VButq1C+TnAviVAlBGISQBiE1yXidqlVPF4+X/hwk4Z+LzZXjW4/7npm/HGVZ2xKxc4nGtWZY0XtK3v3TpJUCLrpZbDpRb3fTlA4THAWrXKol9hQE5sEmy/pQEbm7uCoAwgKQNIbgQkZwJJDWDTx+LXda722K75cCokAVbOJiB7PXB4I3B4A3B4E2AtY9yYPkoLeCTwkQAo2h0ExQPGZG09UhoDKU2AlExtnWQ7BUhkBDqyAUwntElDbQ4kMNAhqhKZ1yUlJQU5OTmeOWC8PdCSCTgtFgtMJlOJVvQUmEyOBDmyHWV7ynal4FNk0cYQSLna1DsHYMqag3jp9y3Yf7wId365Gr2b1lETEXY6y4F9JE0SKpN2evM3SWatl0BHyt0eHtFWKweSg9JjO7QDSDloM5Q8AbD/eCFu/GQFCi12nNOyHp65uD3+mFn1QEfGC0ng5HUAINkBOeMu63p0G3BU5oZxAi0GAy0GAcYaGuQvB/qnDgKn9gOnDmiLJR9oNxpo3Nfzt+bfv0iXMy3T1qupNt7Ea0UngEVvAMverzhY0BnODIAkM5KQCiSkaRkS13VdQiomjmyGUe8dx/T12bimZ0NPm/BRHTMQ67QCJ/YDedlAXlY5l4e1DExlSVAQm6RtI7mU9XRfl2BBAraCoyU/qyAHeqcd6boT2u9j65Zy394RWwcfW5Jx1Fgf5+3sBhzLPB0IyWcd2aIFNNkS0GzQvkdlMcQAUXFawONwNfSSS/mZK/Nzx9VxBT+ZWgAk6yG36zQHUtuiOkVEoKNzt5iWuXQsdp/Ty0R0Wnq61qrSHex4S3Z4MsmwZIJ4Fjo4SJDj3p4UfEyuya5lMkXpGHZ5j0YY2SldjS14f8FOLN9zHKPfXojLuzfCwyPaIC0p8O1wAzX+Y8H2o+r6xV7OZ9M9MxmjMvKQmLMShz/7AvVsm4Dju04/QacHkhoBtZuopSghE5+tMCOzIBktU5vhvXHdEBNVtb9jklmSge/SLe7AiUI0qVtqUsnC41oQc2y761ICm+3aejqsZ77h6s8AfTTQdADQegTQerh2QFmVQObIVuDE7tOBjFoksDkIFGq/8zMseRvI7AP0vxdTizpj1d4TaryIZCG9Zs4Hlk0CFr15+sC6XhstEJBgSh63uBbhtFfqIFwOsTfHxeGQLQn539XFXVY9nog5hRbb8oBnT3q/noZYLZDyBFRpJW/LIgGXBDESaJQKnr3isOPU0YO49vWfkYoTeG1UOpKsR4HcQ9ri3i7WAsSYj6OzXtpQ7waWL/fu/RMbAOkdgbQOQJpcdgTqtgQMrmNmm0ULeCyFWrBpKXBdFp6+XwIw+V6c3KcFTyf3A6aTQNFxbclaW/IzG3QDbp2H6hQRR/zO6DjInyGV0YmgPvVE1UmClIyMDKSmpsJqLWNnWw557oIFC3DeeeexVCoIyDZgJie4mV37LWPU6e0kNff/GNoa1/TOxEsztqosz4+rD6i2yref3wK3nte8WmeZD0bys0tGpFPDZLRwTcR5BpsZOLRWK3/avwy6fUvxrhyAyZ8izzkbnXbGuSAHkG6tp/Zpy54/IYW6/ydPkXHguQBejYczpTF6W2pBP3c1kNYOqNdaW7ws15EMTvN6tbA/Owc5m/5Ek/hsrWxISolyNpcfSAgp4ZKDUVnqtdIONrfNAI7vBHbN05YZ/9KCAwl4JPCR4MN98Fqcww4c3w3kbCz2+a7Az3mWeQillEllDFyL/J43/qR+x/h2HLqjAcYaRqLp+TcjPdmLQFxev/IT4M+XgYIj2n2p7YFBjwNtRp5ZnibjSeRg2x34mPNOX0pmxFNCln36umRjbEWIdRShmb4IsB8+3YvY4t44sVpZmGucTIlLFdDIZaoWwFT3iTu9AcmpjWFP64rZWbmYn9gNo7uUCuidTpjyjmHcKz8ixXoYT5ybhCZRx4sFpwe1wEO+LxLIFA9s4s+SZZOxObLE1a7cest4pJOu4Kd0ECSfW80iItBRNYSuSUPNrjNjROQfcpBcmQNlea7NZoPRaGSgQ+SFInegU0bjgYzkOLx2dVfc0L8pnv51kzpr/tof2/Dtin346MZeaJdRs3OU1Ag5WDuwQiu9sVu0g3CnAyl/HcQTUUXoE5cCTJuqlXG5HlMH8ZIJObhajdktzhkVhzX25lhkbYWu/Yfj3AtGAfIecvAswc6JPXAc34PpC5agKGcXmkUdQZeEU4jOz1JntHVHtkANC1+8quR6SiaovgQ9bYpdttHKkyQ7UyyY+CJ/LeoZs4HZ5fzMSQ1dwYy8T6vTgY18RukS4BHPAUd3aAGPLBLUHd2qLYvfBIwpQKuhQPOBWrbIHdBI1qa8aTik9Eg+TwLA4gGNe5H3LH2gP3SiKjUrWvJfNLEfwvPRH8G5+hcg6jag181lH1hLOd5fXwPzXgByXZO51m4GXPB/QMfL1MF+meR3IJkSWbwldXQSDOXnYNbytZj651oYYUHX9q1x3dB+WkAjB/VBVnkgLdNlElBpM31GoKPTYfYeK1aZG6FhSitkDr8AalbWQJIgMF2W6g9qIjfQkTMNqnTN5BnUSUREFArc+62KOqx1zUzBD7f3w6/rsvDCb1tw8GQRxn+yAj/d2R8NUkK4YYhkKKTcRQKbAyu1JU8bZF3aaPdRjRwfu46Ry1SrvpbVaNxPjSHRpXfG8kX78cpvW9B2WyJ+G5msqkDUwbM6e5+O59Yl4cNDiWqG+U+v643oFvW0rMOpA7Ad2YHNf/6C9mlRMKiysq1aFkIO1GXZOafUCsi7l+x0WM91eSqqHpKbdNayF3KmPdWVIXJNfu61ehII3Q30vxsoOgnsnA1s+x3YPlMrL1r/vbaUJuMxZMxEquuz09pr1yVrUdkD/sR07OryAC6b2xGXYS4eSp6NuIJDwNxngIWvAd2vA/reqZUFSlC5aSow91ktIFWvbwCc/zDQ7VrfSr3ORn4eV3A0aHhzvLsrDesOnMRNA/sDaZUcQ1SD+rWoi48X7caScubT8YxT69pAlbpGuggJdLSMjkTq7jNjREREodSMwBitP2s5qZzhPa91fVzx3mJsz8lXwc53t/dDclwAsqdyxlwOWmWMh3RqUovBtbhu60rdlrEGB1e6ApsV2mBpGXdR4gc1aEFAg65adkSnw+r9uVi6+wQapsRhTLdMbWxNiUUOnDO04EbGq5Q6aB/bqzHe+GM7tmTnYcnOY+jf0h16QHVk+3DhbnX95Su7oL8EOSIqFqjbAs6kxti11YS2I0fB4M5Sq3E127Qsiedyq1a2I0GOrLcEMyqY6IDF+am4c2YRmjXOxJTrBvh3O0h2quPl2iKZLfm9qkzPUq38yh1QyfrUblp+1sQHkmU8aTdiV5vrYbzuVS2YkfE2h9drY2+W/xdof7H2PZGOX2p96wDnPqBlfYrNhVidpHzw0xt7Ysq039E6LbjnR5PmI/L13XWkADm5JqQWG5N3qtCKeVu1Ur9LIniS0IguXeMYHSIiCsVmBN7OmSNBzeSbeuPSdxapGehv/3wVPr2ptxrwXiNytmjZgg0/qLKvKktIBzJ7AY16AQ17agFOqQzHo68vwBZbHl48vxPQq/LzsyTHR+OKHo3w+dK96my5O9D5bX0Wnp62SV3/54i2GOPtwaOUZUnHMVfXsRIZKhmzIJmiYsFWnexcnJz5p2oxLQ1bqq1RiwQxZa1XNZi9+TDmbj2ismBPXNQeOhnf0fkqoNOVwK65WsAjlxunaC+ISQT63wP0vaPmOsYVI2PaUkJgHl75rnZokIQNB3NVVqf4d3LGxixY7A60TU9Em/TgDthqSkQEOlJ/6+66xtI1IiIKJe79VlwlJgeVzMYn43vhqklL1MHQwz/8pcbyVNsB9Im9wIYftUXa1RYfLC/jN6QlrWRmJKMg192XpbM18vyMrkCjnq6llzY+pYL1lsk+JRMTY9BjRIeKJ1GsiEzAKoHO7C052H20QHVxu//btSoxdW3fxrj9/Cp0LnNTEy9qJ1+Lk3l/pMooz2TDkTxzibP0oUjGQz/16yZPC+/mxZtDyLaU9teyZK0D1nyujYXpc/vZB8ST0rdZXRXoLN11vESgM3XNoUp1HYwEERHouP+osOsaERGFGvd+62yla6V1aJCM967tgZsmr1DzxMhYHZkrxm+ke9XGqVrmRrpruUlr45ZDgE5XaB2yKhpfIlGEahYgwY9N63JVVkewCvzsmgF+YJv66my3r+RgfFDbVMzZkoNnp23Gyr3HYbY5MKRdGiZe3LFa2+HHRhnQuE489hwrVFmdUA90Plq4G3uPFSI1MRb3DGpV/hMzOgMZ/6nJVQsL0pBAyimXFRunk33KhKW7tdujOzPQiaxAxz2PDkwco0NERCEZ6FQmo+Mm43Wev6wTHvphHd6dt1MFO9f2beLzuhw8eACWTdPRLPs3rXWxp+2wDmh6jhbctLvY+zPzEjy4x+mons2V43A48Ysr0PG6rKwCN5/TTAU6f2w+rG53yUzBW2O7eT+JZxXIxKEq0DmSX2KMUKiRA+6352gNBR4Z1ZZzF1aDXs3qqAygTDJ7ONek5s76dd0hdd6gZ5PayKxzZtYwUkVUoBOns6CQgQ4REYVgoBPrQ6AjruyZiUMnTart9BM/b0B6khFD2qd592KZe2PvEph3LUTulvloaNIG5Xs06K4FNx0u0+YbqWGr951QHebkYHpwu9Qqv1//FnXV+AYphWtSNx4f3dCzxuYjapGagD8256iMTih7/rfNKLTY0b1xCgfEVxMZhycZ2/UHT6k20xLkuzObY1i2FsGBDsw4zjE6REQUQoq8aC99NvcObolDJ4vw7cr9uOfrNfj61r6qJfWZXdJ2AnsXaXOvyKXqEqblWuq7nrbFkYlpjr7oceHfMbBf9Q9qr4j74G54h/Qq/X7cpDztucs64Yule3Hf4Faol1Bzo9NbusaxhHKgs2LPcbVNJFFX3eV+kU7aTEugI10COzbUgp4ovQ4Xsmwt8gIdp6d0jWN0iIgockrX3OSA85lLOyI714T5247g5snaHDtNpDHTX98Au+cDexefnoXexQ49NjiaYoWjDQ4kdsXQEWPw+x47PluyFzHTTuDLjOPo1TQwA8itdgemrc/y+1ns7o1rq6WmSelaKAc6docT//55o7p+Ta9MdGqUHOhVCmt9m9fBBwt2qYyOlK6Jc1vVQ51aMYFetaASWc0IdGaO0SEioohoRlBatEGPd8Z1x9XvL8HuQzmY8f6/cIthGvRFxSYeNMSiKK0rZhW0xPdHMrHa0Qox8UmYMLQ1HuvdGFEGPfp2ciLrlAmzNh3G3z9diR/v6O85SK9JC7cfxfECC+olxKiSs1AnpWsiJ8+MXJMVScYAzH1UBV8v34dNWblIMkbhwWFtAr06Ya9nU22cjozr+nLZPr+NUws3EdeMgO2liYgo0jI6bgnOQnzbbhFsx99GiiVP3eeo3Qz6btciN603XtuUgM9WZKuz8zL/yQ3nNMU9g1uVmHBUBua/eU03/O3DpViz7yRu/GS5yg6lJtZspzD3DPAXdW6gArBQJ4FNWlIsDueaVVYnEFklX50stODlmVvVdQmK69ZgyV+kku9Lp4bJ+OvAKRzNN6sTIUO9HXsXQSIq0ImDxTPxGhERUShwn6Cr0hgUmaRy2fvAkneQYDqp7tqLDLxhGYOi2pehh64+3vx6O3JNWvAzrH0aHhnVTs3vUhYZoP/h9T1x+XuL1RllaWH97a39UKuGOmwVWmyYuelw2M0ZIpmxUAx0Xp21DScLrWiTllilrn5U+TbTEuiIoe3Ta+z/XyiJjN9IdJyndI1jdIiIKJS4S65jfSldKzoBLJ0ELH0PMGsHRKjXGjjvIRyudQF+/XglLJuO4rdNR9VD7TKS8PiF7bxqbyxn7SeP743L3lusJi+866vVKvip7uyKxebAW3N2qM5eMvdMt9JNFUKYNCRYtOMYdvowTke6nU13jVnyhjHKgHsHt8LoLlULFDcdylXNG8S/L24fFtm1UNG3RV28v2CXuj6mitsxXEVEoOOM1s5IsRkBERFFROla4XFg6btaFsecq91Xv60KcNDhUjVvTW85E391F9z3zVrUjo/Bg8Naq1bUlZkzpmm9WqoF89j/LsW8rUfw2NQNat6e6ui25XQ6VRbn+embVRZJjOvTOKw6e/nakGDxjqN4f752wFsZ93+7FonGKAxsk+rzNnnyfxvhcAIXdspA/xahO/9PKOrdtI6alFUyrDJnFkV4MwIt0OEYHSIiCsVmBBUEOgVHgay1QNZf2rJjDuAag4PU9sD5DwPtxgD6kmfbZXyLdE2Ten9f54vp1rg23hrbHbd9vhLfrNiPhilxalyPP208dArP/LoZS1wzwddPjMVDw9rgih6NEE7cDQlk0tDKdJ+TYEPI70OCP298ungPpq49hLu+XI1vb+unWhRX1q/rsrB893E1PuTRC9tV+vVUNVKqNmvC+aqdd0wUM2mI9DE6sTorzBZLoNeGiIjIa+4TdJ6MTm7W6YBGLWuBXG1gfglpnbQAp+1FZwQ4JZ7mak1bFTIIeuLFHfD4zxvxyqxtSE82quxQVeXkmfDK79vw3ar9apofOZi75dxmuGNgSzVJaLhxZ3T2Hy9UAa4347KkbGzb4XzUjo/GYxe2Q0q8d+2FZcLJI/lmVSo3XtqN39EfmXW04yVvx0k9N32zun7nwJYqwKWaV7xRCJ0p/P5KVBDoCKdFS3cTERGFAjngHWf4A+nTPgaOrAfytUH4Z6jbEsjooi0NewJN+ssEOjW2ntf1a4qDJ02YNH8nHvlpvQqgfC2nkZ/5o4W78e7cHSiwaBktGUvyzxFt0Ki29wfjoaZ+Qqxqz5xrsmH30QI1Zqoix/LNqhGAeHB4G6+DHCFB43vX9sBVk5ZgS3aeCnZ+uL2f1+/x7tydqs14o9pxuPW85l5/LlFNioxAJ8oIJ3TQwQlYiwK9NkRERF6XJdV3HMWzxo+B3a47dXqgXhugQdfTgU1aR8BY8UFxTXh4eBscOlmEX/46hDtVSVRflTmozJgPKYd64bctOHhS2193yUzBExe1Q48mgZmYtCbJeCPJ6qzed1KN0zlboCMtnfNMNnRokIRrenlXslaclCx+Mr4XLn1nsfq8Wz9bhc9u7n3WTNLeYwVqskrx2IXtq9YRkKgaRUago9PBqotBjNMMvbUg0GtDRETkdWYjRaeN13AaU6Ab9wOQ1sEz9jTY6PU6/OfKzqrkbOmu4xj/yQo8MKw19F5klhxOJ75feQAr955QtzOSjfjniLa4uEsD9b6RonigU5F1B06qMVFCygYr00SiuIzkOEy+qReufG8Jlu85jge+/wtvXdOtwt/5M9M2w2J34NxW9TC8A+duoeAVGYEOAJs+FjF2M3RWlq4REVHotJaOg1m7EVcbyOyFYBcbZcD71/XElZMWq7Ej//xxfaVeL2ORbj+/hSqH8rVBQlh0XqugIYHD4cSTv2xU45Yu6doAPZtWLdvVNj0J71/XAzd8shzT1mWhQbIR/3dh+zKfO3/bEczadBhReh3+Pbp9WHW9o/ATUYEO7IDBztI1IiIKDWarA0ad1kRHV2y8aSgMkP70pt54bdY2HMlzBWpekHlxpNGANDOIVO5Ap6K5dKauPaiyPvExBvxrpH+6ncncSf+5ootqOf3fP3ejQUocxg9odsYcRhNdHd5u6N8ULVMT/fLZRNUlsgId2VFwjA4REYVQ6Zono+Oa/DpUSEnUS1d0CfRqhJyW9bXgYdfRAtgdzjNK0vJMVjz/2xZ1/Z5BrfwaFF7SraEaG/Wf37fiqV83qfLBER0zSrSk3nWkAPUSYnDfEP+2ECeqDhHTdNuh17qIRDmK1GBHIiKi0Chds4RkoEO+aVg7DrFRepU9kTbTpb09Z4fKkjWtG4+bzmnq98+/c2ALNRePHCrJZLKr9h5X98u4qzdmb1fXHx7RVjUyIAp2ERPo2F0ZnTinWQ2gIyIiCoU5dOJ07oxO6JSuke8kg9O8vmucTqnytZ1H8vHxIq393hOj26vxUP4mY26kucHgtqkw2xz4+6crsetIPl78bSvyzTbVBe+K7uE1USuFr4gJdBwGV6CjM3smXyMiIgr2jI6RGZ2IU1ZDAqlGeep/m2C1OzGobSoGta2+bmdRBj3e+ls3dGmUjBOFVoz971L8uPqAekyCoEjqgkehLeIyOvEwqZpnIiKi0Bqjw4xOpGhZRkZn9uYc1fEs2qDD4xeV3RHNn+JjovDRjb1Ug4jDudp38MoejdA1M6XaP5vIXyIu0JFaZwY6REQUOoEOMzoRm9FxBTryPZDmAOLmc5qjWb1aNbIe9RJiMXl8L9StFYM6tWLU2ByiUBJBgY7WjICla0RE4c1ut+Pxxx9Hs2bNEBcXhxYtWuDpp58u0YhGrj/xxBPIyMhQzxkyZAi2b9cGWgddoONqL81AJzJbTMt39aOFu7HveCHSkmJxz6CWNbouMl5ozoMDMeeB81E/UTtpTBQqIrB0TQIdZnSIiMLViy++iPfeew9vv/02Nm/erG6/9NJLeOuttzzPkdtvvvkmJk2ahGXLlqFWrVoYPnw4TCYTgomcmDOydC3iNK0XDxkGk2e24a8Dp1SnNfHIyHaoFRsVkHmRUuK1E8ZEoSRy5tFxNyOASQ3uJCKi8LR48WKMGTMGF154obrdtGlTfP3111i+fLm6LWfIX3/9dTz22GPqeeKzzz5DWloapk6dimuuuQbBQvZXtVm6FnGkm1qTurWw+2gB7v9mjfoe9GhSG2O6Ngj0qhGFlKiIy+io0jUGOkRE4ap///744IMPsG3bNrRu3Rp//fUXFi5ciFdffVU9vnv3bmRnZ6tyNbfk5GT06dMHS5YsKTPQMZvNanHLzc1Vl1arVS2V5X7N2V5baLKigau9tN0QC4cPn0XVy9ttWVnN68WrQGfPsULodMDjo9rAZrP59TOo+rcjVQ9vt1PEBDq2Es0IOEaHiChc/etf/1KBSNu2bWEwGNSYnWeffRbjxo1Tj0uQIySDU5zcdj9W2vPPP4+JEyeecf/MmTMRH+97SdmsWbMqfHzjXj06uTI66zbvwL4j033+LKpeZ9uWlZar94ww6JfqwN61C7F3rX8/gmpgO1K1KCw8czLdiA50TnddM+OEjRkdIqJw9d133+HLL7/EV199hQ4dOmDt2rW4//770aBBA9xwww0+vecjjzyCCRMmeG5LIJWZmYlhw4YhKSnJp7ORckA1dOhQREeXP8P8ymlbYDyiZXQ6de+Njh1H+bT+VH283ZaVZVl7CLN/3IAkYxReG3+O6npGobcdqXq4s+pnE0GBToyndO2QhYEOEVG4euihh1RWx12C1qlTJ+zdu1dlZSTQSU9PV/cfPnxYdV1zk9tdu3Yt8z1jY2PVUpocEFXloOhsr5fJId1d16KMifICnz+LqldVvwulje7aCBsO5WF4h3SkpdRMO2ny/3ak6uHtNtJHWukau64REYV/SYNeX3L3JiVsDodWtixtpyXYmT17domzg9J9rV+/fggmRSUmDGUzgkhijDZg4piO6N+yXqBXhShkRVBGRwt0pE2nycYxOkRE4Wr06NFqTE7jxo1V6dqaNWtUI4KbbrpJPa7T6VQp2zPPPINWrVqpwEfm3ZHStksuuQTBRE7MGT1d19hemoioMiIu0JHStSKWrhERhS2ZL0cClzvvvBM5OTkqgLntttvUBKFuDz/8MAoKCnDrrbfi5MmTOOecczBjxgwYjUYEE2meIxNdK8zoEBFVSuQFOiqjw0CHiChcJSYmqnlyZCmPZHWeeuoptQQzrXSN8+gQEfki4sboSK2zme2liYgoBJg5RoeIyGf6SMvoROvssJpNgV4dIiKiszJZrDDqXBPjcYwOEVGlRFxGR9gtBQFdFyIiIm84rK5sjmBGh4ioUiIm0HHqo+DQaUOSnAx0iIgoFFiLzf4dxUCHiKgyIibQETaD1k3HaSkK9KoQERGdnU3bXzlk/1VqbiAiIqpYRP3VdBi0s2E6KzM6REQU/AxW14k5lq0REVVaRAU6dlfaX1e8FICIiCgI2R1OGBxa8xwnAx0iokqLqEDH6epYo3OfISMiIgpSpmKtpXUMdIiIKi0iAx2DnYEOERGFQKCj0yYL1cWwtTQRUWVFVKDjrnE22Fi6RkREwa2oREaHgQ4RUbUGOs8//zx69eqFxMREpKam4pJLLsHWrVvP+rrvv/8ebdu2hdFoRKdOnTB9+nQERHQtdcGMDhERBTuT1QEjtIwOmxEQEVVzoDN//nzcddddWLp0KWbNmgWr1Yphw4ahoKD8LmaLFy/G2LFjcfPNN2PNmjUqOJJlw4YNqGl6V+o/2q4N7iQiIgqF0jUwo0NEVGnaDJpemjFjRonbkydPVpmdVatW4bzzzivzNW+88QZGjBiBhx56SN1++umnVZD09ttvY9KkSahJulgtoxPtMMHhcEKv19Xo5xMREfnSjIAZHSKiag50Sjt16pS6rFOnTrnPWbJkCSZMmFDivuHDh2Pq1KnlvsZsNqvFLTc3V11KBkmWyvK8JlqbMDReZ0Z+kRlxMYZKvxcFjns7+vIdoODB7RhauJ0CW7oWx9I1IqKaD3QcDgfuv/9+DBgwAB07diz3ednZ2UhLSytxn9yW+ysaCzRx4sQz7p85cybi431P3+8/lIM2gDpD9r/ffkdCtM9vRQEkGUEKfdyOoaGwkM1bAtmMwKhzZ3RYukZEVGOBjozVkXE2CxcuhL898sgjJbJAktHJzMxU44GSkpJ8OiMpB1XNWncADv8P8TCj38BByEjWMjwUGtzbcejQoYiOZpQaqrgdQ4s7o06BKl1jRoeIqEYDnbvvvhu//vorFixYgEaNGlX43PT0dBw+fLjEfXJb7i9PbGysWkqTg6KqHBgZjInqMl5ngs2p40FWiKrq94CCA7djaOA2CpYxOszoEBFVa9c1p9OpgpwpU6Zgzpw5aNas2Vlf069fP8yePbvEfXI2V+4P1ISh0q5Tap+JiIhCo+saMzpERNWa0ZFyta+++go///yzmkvHPc4mOTkZcXHaH+Hrr78eDRs2VONsxH333Yfzzz8fr7zyCi688EJ88803WLlyJT744APUOFd7aSldk9pnIiKiYCUn5Gozo0NEVDMZnffee091Whs4cCAyMjI8y7fffut5zr59+5CVleW53b9/fxUcSWDTpUsX/PDDD6rjWkUNDKpNVJyn65qZgQ4REQUxOSHHMTpERDWU0ZHStbOZN2/eGfddeeWVagm4GG0eHal5PmFjoENERMGL8+gQEdVgRifkuVL/8TBxjA4REQX/PDqeMTosXSMiqqyICnTczQhkx1FkYUaHiIiCfB4dZnSIiHwWUYGOuxmBlAKYWLpGRERBzFxijA4zOkRElRVZgY67GYEEOszoEBFRsDcj0DGjQ0Tkq8gKdFzNCPQ6J6zmwkCvDRER0VmaETCjQ0Tkq8gKdIrtKBzmgoCuChER0dmaEXCMDhGR7yIr0NEbYNPFqKt2BjpERBTELBYzYnSuMmsGOkRElRZZgQ4Aq0HbWTgtLF0jIqLg5bQWnb7B0jUiokqLuEDHbjCqS6eFGR0iIgpirhNyTp0eMGjVCERE5L2IC3RsnowOAx0iIgpiNi2j45ATdDpdoNeGiCjkRFyg43C1mNYVLwkgIiIKMjpXoOPk+BwiIp9EXqDjqnPW2ThGh4iIgpfnhJzrBB0REVVOxAU6zigt0NEz0CEioiDlcDgR5TBpN9iIgIjIJ5EX6LhKAPSukgAiIqJgY7adnkNHF8OMDhGRLyIu0NHF1FKXUQx0iIgoSJmsdhhhVdf1MczoEBH5IuICHbh2GFF2BjpERBScTDY74twZHZauERH5JOICHb07o2N31T4TEREFmSKLHXE6i3aDXdeIiHwSeYFOrBboRDuY0SEiouBksjo8GR02IyAi8k3EBTpRrkAnxt3NhoiIKAhL14xgRoeIqCoiLtAxGLVAxwgTbHZHoFeHiIjoDCZVusaMDhFRVURcoBNtTFCX8TDDZGOgQ0REwdqMgBkdIqKqiLhAJ8oV6MggTxnsSUREFGyKLByjQ0RUVRE7j47sQGSeAiIioqCcR4dd14iIqiTiAh33DkNK18w2BjpERBTc8+gw0CEi8k0EBjqujI5OMjoco0NEREE6j45njA5L14iIfBF5gU6MtsOQM2VFLF0jIqIgZLY5inVdY0aHiMgXkRfouM6Mqa5rDHSIiChIMzqn59FhRoeIyBeRF+i4mxHoLDBZbIFeGyIiojPIiTiO0SEiqprIC3SK7TAspvyArgoREdHZ59FhRoeIyBeRF+hEnQ50bAx0iIgoSOfRMXKMDhFRlUReoKPXw6wzqqsOU0Gg14aIiOgsGR0GOkREvoi8QEdK1vRaoGM3FwZ6VYiIiM5gMtuKjdFh6RoRkS8iMtCx6rWzY3YzMzpERBR8bDYTDDqndoMZHSIin0RkoGMzaBkdp4WBDhERBR+npej0DQY6REQ+idBAx7XTYKBDRERByGnRSqsd+mjAEB3o1SEiCkkRGejYXZ3XnNZiZ8yIiIiChU3bPzlcFQhERFR5ERnoOFyBjs7KjA4REQUfnSuj42TZGhGRzyIy0HG6Otjobey6RkREwUdvN6lLZ7G534iIqHIiM9CJcgU6LF0jIqIgpHeVrrG1NBGR7yIz0HHtOAx2BjpERBRcnE4n9K79ky6aY3SIiHwVkYGOLsZdusZAh4iIgovF7oDRaSmxvyIiosqL0ECnlrqMYkaHiIiCjMniQJzOrK7rGegQEfksIgMdfawW6MQ4tMGeREREwcJks8MIZnSIiKoqMgMd144j2sGMDhERBReT1Y44aBkdHZsREBH5LCIDHYMxQV0yo0NERMGmSAU6WkYHnEeHiMhnERnoRBtdpWtO7YwZERFRsDBZZYyOO9BhRoeIyFcRGehEuTI6RqdJtfEkIiIKFkUWGaPjOhHHjA4Rkc8iMtCJjtMCHamBljaeREREwdiMgIEOEZHvIjLQiXFldOJ1ZtXGk4iIKFiYZYwOS9eIiKosIgOdKNcYnXiY1JkzIiKi4GpGwNI1IqKqiugJQ6U0QNp4EhERBVMzgtOla8zoEBH5KiIDHfeOI1Zng8ns2pkQEVHYOHjwIK699lrUrVsXcXFx6NSpE1auXOl5XBrRPPHEE8jIyFCPDxkyBNu3b0ewNCOI0zGjQ0RUVZEZ6LgyOsJclBfQVSEiIv86ceIEBgwYgOjoaPz222/YtGkTXnnlFdSuXdvznJdeeglvvvkmJk2ahGXLlqFWrVoYPnw4TKbAz68mJdWcR4eIqOqiEIkMMbBDDwMcsBblB3ptiIjIj1588UVkZmbik08+8dzXrFmzEtmc119/HY899hjGjBmj7vvss8+QlpaGqVOn4pprrkHA59HxjNFh6RoRka8iM9DR6WBGLOJRBJuJgQ4RUTj55ZdfVHbmyiuvxPz589GwYUPceeeduOWWW9Tju3fvRnZ2tipXc0tOTkafPn2wZMmSMgMds9msFrfc3Fx1abVa1VJZ7teU9doCkwVGV9c1qy5anlTp96eaU9G2pNDB7RhavN1OkRnoyE5Lb0S8QwKdwkCvChER+dGuXbvw3nvvYcKECXj00UexYsUK3HvvvYiJicENN9ygghwhGZzi5Lb7sdKef/55TJw48Yz7Z86cifh437Mus2bNOuO+bbv0nozOgiUrkG885PP7U80pa1tS6OF2DA2Fhd4dv0duoKMzqku7mRkdIqJw4nA40LNnTzz33HPqdrdu3bBhwwY1HkcCHV888sgjKnAqntGR8rhhw4YhKSnJp7ORckA1dOhQNZaouD+nbETcSS2jc97gEUBypk/rTDWjom1JoYPbMbS4s+p+D3QWLFiA//znP1i1ahWysrIwZcoUXHLJJeU+f968ebjgggvOuF9em56ejkCxGuIAO2Bn6RoRUViRTmrt27cvcV+7du3w448/quvufc/hw4fVc93kdteuXct8z9jYWLWUJgdEVTkoKuv1FmlG4Cpdi45Lkif5/P5Uc6r6XaDgwO0YGrzdRpXuulZQUIAuXbrgnXfeqdTrtm7dqoIb95KamopAsuq1jI7TUhDQ9SAiIv+Sjmuyzylu27ZtaNKkiacxgQQ7s2fPLnF2ULqv9evXD4FmtxSdvsGua0REPqt0RmfkyJFqqSwJbFJSUhAsbJLRUYEOx+gQEYWTf/zjH+jfv78qXbvqqquwfPlyfPDBB2oROp0O999/P5555hm0atVKBT6PP/44GjRoUGGFQk1xFN8vRTHQISIK+nl0pBxASgSk9nHRokUINLsr0CmxQyEiopDXq1cvVVb99ddfo2PHjnj66adVO+lx48Z5nvPwww/jnnvuwa233qqen5+fjxkzZsBo1LL9AWXV9kt2fSygj8zp7oiI/KHamxFIcCMDQGVgqLTm/PDDDzFw4EBVItC9e/cyX1MTbTxPZ3QK2EowRLD1Y3jgdgwtobqdLrroIrWUR7I6Tz31lFqCjlUrXXNExcEQ6HUhIgph1R7otGnTRi1uUk6wc+dOvPbaa/j8888D1sazTqG28z515BCmT5/u83tSzWPrx/DA7RheLTzJf5yeQCcIsktERCEsIO2le/fujYULFwa0jeeG7GlAFlA3IQbnjhrl409CNYmtH8MDt2N4tvAk/9G7StecUb6f2CMiogAFOmvXri3R0jMQbTx1MbXUpcFu4sFWiGHrx/DA7RgauI1qns7m6rrGjmtERDUb6MiAzR07dnhu7969WwUuderUQePGjVU25uDBg/jss8/U4zIAVDradOjQASaTSY3RmTNnjipDC6gY7UyZ3r1DISIiCgJyAg46BjpERDUe6KxcubLEBKDuEjOZbXry5Mlqjpx9+/Z5HrdYLHjggQdU8CPjazp37ow//vijzElEa5QroxNlZ6BDRERBFuhESeUBS9eIiGo00JGOaU6ns9zHJdgpTlp4yhJs9K4dCAMdIiIKFla7AzFOc4n9FBER+SZiG/TrXRmdGAcDHSIiCg4mqx1xYKBDROQPERvoGIxaoBPtMAV6VYiIiJQiFehY1HV9DMfoEBFVReQGOrGujI6rRICIiCjQzFYHjDptv6SLZkaHiKgqIjfQMSaoy1hmdIiIKKhK17SMDruuERFVTcQGOtGu0jUjGOgQEVEwla65Kg2Y0SEiqpKIDXRi4hLVZRxL14iIKEiYrA7E6ZjRISLyhwgOdLTSNSPMcNgdgV4dIiIildGR/ZLCjA4RUZVEbKAT68roROkcMJtZvkZERIHHMTpERP4TsYGOMV7L6AhTYV5A14WIiKj0PDoMdIiIqiZiAx1DdAwsToO6bi5ioENEREES6HjG6LB0jYioKiI20BEmnVFdWovyA70qREREqhnB6TE6zOgQEVVFRAc6RYhVl1ZTQaBXhYiIyNVemhkdIiJ/iOhAx8yMDhERBVnpmpHtpYmI/IKBDgC7mYEOEREFyTw6bC9NROQXER3oWPXuQIela0REFCQZHbaXJiLyi4gOdCzuQIdjdIiIKAhYLGbE6mzaDQY6RERVEtGBjs2g7UQclsJArwoRERFs5qLTN1i6RkRUJQx0ADitzOgQEVHgOSza/sgJHRCldQYlIiLfMNARzOgQEVEQcFq1jI7dYAR0ukCvDhFRSIvoQMcRpQU6OisDHSIiCjyn68Sb3bV/IiIi30V0oONkoENEREFE58roONwVB0RE5LOIDnQc0bXUpd7GQIeIiAJPZ9MCHSc7rhERVVlEBzpOV0cbvWvHQkREFBSBDkvXiIiqLKIDHcRogY6BgQ4REQUBz4k3ZnSIiKosogMdvSvQibIz0CEiosDT203qUufaPxERke8iOtDRxWhjdKIZ6BARURCIcgc6nCyUiKjKIjrQ0cdqO5Joh7ZjISIiChS7w4kY1/7IXXFARES+i+hAxxCboC4Z6BARUaCZrHYYYVHXDa4TcURE5LuIDnSijFqgE+tkoENERIEPdOJ0ZnWdgQ4RUdVFdqATq43RYaBDRESBViSBjiujwzE6RERVF9GBTnScFujEwQw4HIFeHSIiimAmq0PbHwkGOkREVRbhgU7i6Rs2ZnWIiCjAY3R0WkaH8+gQEVVdRAc6xjhtjI5iLQzkqhARUYRTY3Q8GR1joFeHiCjkRXagExONImeMdsNSEOjVISIiRHrpmjujw9I1IqKqiuxAJ1qPImiBjtWUH+jVISKiSG9G4Oq6xtI1IqKqi/BAx4BCaOUBliIGOkREFBzz6DCjQ0RUdREd6MRG6VHkjFXXrSaWrhERUbCM0WFGh4ioqiI60NHpdDDpXIEOMzpERBTwQIcZHSIif4noQEeYdVrpmt3MQIeIiALbjMDIMTpERH4T8YGOxRXo2Fi6RkREgW5G4MnoMNAhIqoqBjp6ZnSIiCjwTBZbsTE6LF0jIqqqiA90rK5Ax2HmhKFERBQ4VosZBp1Tu8GMDhFRlTHQMWg7EwcnDCUiogCymYvth5jRISKqsogPdGyuQMdpYUaHiIgCyLUfcuiiAEN0oNeGiCjkRXyg44hylQdYmdEhIqLAsbsCHZtBK6kmIqKqYaDjCnR0VmZ0iIgogFz7ITsDHSIiv2CgE6XVQeusRYFeFSIiimBOq6lkpQEREVVJxAc6zuha6lJvY6BDRESBz+g4XGNHiYioahjoxGg7FIONpWtERBQ4OtcJNydbSxMR+UXEBzo6VwtPg50ZHSIiChxPZQEDHSIiv2CgE6OVrkUz0CEiogAy2EwlTsAREVHVRHygo4/VAp0ou7aDISIiCgSDwxXouEqqiYioaiI+0DG4Ap1o1w6GiIgoEAyu0jVmdIiI/IOBTmyCuox1sHSNiIgCw+FwItphVtf1sQx0iIj8gYGOa4cSDSvgsAd6dYiIKAKZbQ7E6cwlKg2IiKhqIj7QiY7TMjrF5zAgIiKqSSarHUZY1PWoGGZ0iIj8IeIDnZjYeDicOu2GhYEOERHVPJPNjjiwdI2IyJ8iPtAxxkShELHaDWtBoFeHiIgiUJHFjjidltEBmxEQEQUm0FmwYAFGjx6NBg0aQKfTYerUqWd9zbx589C9e3fExsaiZcuWmDx5MoKFMVqPInegw4wOEREFgMnq8GR0OGEoEVGAAp2CggJ06dIF77zzjlfP3717Ny688EJccMEFWLt2Le6//378/e9/x++//45gEBdtQJEzRrvBMTpERBSg0jX3GB1mdIiI/COqsi8YOXKkWrw1adIkNGvWDK+88oq63a5dOyxcuBCvvfYahg8fjkAzRhtQAKN2g4EOEREFgEmVrjGjQ0QU0ECnspYsWYIhQ4aUuE8CHMnslMdsNqvFLTc3V11arVa1VJb7NWW91gCHp3TNWpgrT6r0+1PNqGg7Uujgdgwt3E41l9FJ8mR0GOgQEYVEoJOdnY20tLQS98ltCV6KiooQF3fmH/Tnn38eEydOPOP+mTNnIj7e95T+rFmzzrivyAZkOrVAZ/Xyxcje6fT5/almlLUdKfRwO4aGwkJmumtCkaXYGJ0oBjpERCER6PjikUcewYQJEzy3JSjKzMzEsGHDkJSU5NMZSTmoGjp0KKKjo0s+Zndg0drX1fX2bVqge+9RfvgJqDpUtB0pdHA7hhZ3Rp1qYB4dT9c1BjpERCER6KSnp+Pw4cMl7pPbErCUlc0R0p1NltLkoKgqB0ZlvV5uml2la05rEQ+8QkBVvwcUHLgdQwO3Uc3Po8NmBEREITKPTr9+/TB79uwS98nZXLk/WJj1WjMCu5klGkREFKB5dDhGh4gosIFOfn6+ahMti7t9tFzft2+fp+zs+uuv9zz/9ttvx65du/Dwww9jy5YtePfdd/Hdd9/hH//4B4KF1RPo5Ad6VYiIyM9eeOEFNe9b8SY4JpMJd911F+rWrYuEhARcfvnlZ1Qf1CSz1cYJQ4mIAh3orFy5Et26dVOLkLE0cv2JJ55Qt7OysjxBj5DW0tOmTVNZHJl/R9pMf/jhh0HRWtrNqtfOnjmY0SEiCisrVqzA+++/j86dO5e4X062/e9//8P333+P+fPn49ChQ7jssssCtp5WU9HpG8zoEBEFZozOwIED4XSW35ls8uTJZb5mzZo1CFZWQxxgB5yWgkCvChER+YlUIIwbNw7//e9/8cwzz3juP3XqFD766CN89dVXGDRokLrvk08+UfO8LV26FH379q3xdbWbi+1/GOgQEYXGGJ1QYDe4JgxloENEFDakNO3CCy88Yy63VatWqe5/xe9v27YtGjdurOZ+CwSHVdv/2HQxgN4QkHUgIgo3QdleuqbZo1z10NZipQNERBSyvvnmG6xevVqVrpU1v1tMTAxSUlLOmONNHgvERNY2kyvQMRjh5CStIYWTIIcHbsfQ4u12YqAjbaVdk7PprByjQ0QU6vbv34/77rtPjQ01Gl0Z+yqq7omsjx7SxraaHAbMnz69CmtKgcJJkMMDt2N4TWbNQEdKBlwdbnQ2BjpERKFOStNycnLQvXt3z312ux0LFizA22+/jd9//x0WiwUnT54skdWRrmsy91sgJrLeeOgzwATojYkYNYoTV4cSToIcHrgdw3MyawY6xQIdAzM6REQhb/DgwVi/fn2J+8aPH6/G4fzzn/9UAYocyMgcb9JWWmzdulV1DC1vjrfqnshabzd5Kgx4kBWaOAlyeOB2DA3ebiMGOpLJcQU6ejvH6BARhbrExER07NixxH21atVSc+a477/55ptVhqZOnToqI3PPPfeoICcQHdeEzjVG1OEeM0pERFXGQEd2MDHajiXKdUaNiIjC22uvvQa9Xq8yOtJkQOZ2kwmtA0Vnc51oY2tpIiK/YaCjAp1a6jKKGR0iorA0b968ErelScE777yjlmCgZ6BDROR3nEdHfgmuQCeagQ4REQWAweaqKHCVUhMRUdUx0JFfglELdAywAzZLoFeHiIgijMGhBTr6WAY6RET+wkBHStZitUBHcc1OTUREVFOi3IEOMzpERH7DQAdATKwRVqdBu+HqfENERFQTnE6npxkOMzpERP7DQEcGpUYbUATX/AgWzqVDREQ1x2J3wAitbDqKgQ4Rkd8w0FGBjh6F7kCHpWtERFSDTBYH4mBW1w3FS6mJiKhKGOgAiIs2oNDJjA4REdU8k82OOJ2W0TEwo0NE5DcMdFylayZmdIiIKABMVjuMroyOjs0IiIj8hoHOGaVrbEZAREQ1p8hqR5xrjA4nDCUi8h8GOgBio1i6RkREgWGyOhCn0zI6nDCUiMh/GOjIGJ2YYl3XWLpGREQ1qMjCjA4RUXVgoOMao+MpXWNGh4iIargZgXuMDjM6RET+w0BHAp0oPYqcMeq6w8KMDhER1Ryz9XTXNWZ0iIj8h4GOp3TNqK7bzQx0iIioppsRMKNDRORvDHRURud06RoDHSIiqulmBEaO0SEi8jsGOvJL0Otg0TGjQ0RENc9sNiNWZ9NuMNAhIvIbBjouFoMW6DgZ6BARUQ2ymos1wWGgQ0TkNwx0XBwGbefiZDMCIiKqQfbigU6UdtKNiIiqjoGOi80d6FiLAr0qREQUQeyuaQ0seiOg0wV6dYiIwgYDHRd7lBbo6KycR4eIiGqOw1UybZNAh4iI/IaBjosjSmvpyUCHiIhqktOV0bG7xooSEZF/MNBxcbrmLtAz0CEiohrkLpm2u0qoiYjIPxjouLkDHRvH6BARUQ1ynWBzsBEBEZFfMdApFegY7Ax0iIioBrkyOg7XWFEiIvIPBjpuMVqgEyWBjtMZ6LUhIqII4a4kcDLQISLyKwY6LvqYBHWpgxOwmQK9OkREFCH07n2Oq7KAiIj8g4GOiyG22Jk0VwccIiKi6uYZGxrNjA4RkT8x0HGJjYmB2Rmt3WDnNSIiqiHusaE6Vwk1ERH5BwMdl9hoAwoRq91goENERDUkyq6VrukZ6BAR+RUDHZe44oGORZulmoiIqLpFORjoEBFVBwY6LsZoPYqczOgQEVHNinaY1aUhtlagV4WIKKww0HExlsjoMNAhIqLqZ7U7EAst0ImKZUaHiMifGOgUz+hwjA4REdUgk9WBOHegY2RGh4jInxjoFBujw9I1IiKqSSarHXGwqOvM6BAR+RcDnbK6rrEZARER1QCTzY44nZbRYXtpIiL/YqDjYowysHSNiIhqvHQt1pXRQTQDHSIif2KgU2yMTqG7dI3NCIiIqIZL1xAdF+jVISIKKwx0XOJimNEhIqLANSNgRoeIyL8Y6LiwdI2IiAKS0dExo0NEVB0Y6BSfR4ela0REVMMZHSNL14iIqgUDneJjdFwZHQe7rhERUQ0wWW0sXSMiqiYMdIpldNylaw4zAx0iIqp+FosZUTqHdoMZHSIiv2Kg4xIbpYfJk9Fh6RoREVU/m6nY/oYZHSIiv2Kg46LT6WDVG7UbLF0jIqIaYHVVENhhAAzRgV4dIqKwwkCnGHuUdjbNya5rRERUA5xmbX/jOdFGRER+w0CnGEeUVh+tY6BDREQ1wO7a31gNDHSIiPyNgU4xDld9NAMdIiKqCe4xoXa9a3oDIiLyGwY6xTgNWkZHbysK9KoQEVEksGj7GzszOkREfsdApxhnTC11abCbAIer3ScREVE1cY8JtbtKp4mIKMCBzjvvvIOmTZvCaDSiT58+WL58ebnPnTx5supoVnyR1wUjXfHWntVQvnaqyIrjBa4ZsImIKOLprEUlKgqIiCiAgc63336LCRMm4N///jdWr16NLl26YPjw4cjJySn3NUlJScjKyvIse/fuRTDSx8TB4dRVS6BTaLFh1Bt/YuB/5mLvMbavJiIiQGfXAh0HJwslIgp8oPPqq6/illtuwfjx49G+fXtMmjQJ8fHx+Pjjj8t9jWRx0tPTPUtaWhqCkTEmCkWIqZa5dL5evh8HTxYh12TDA9/9BbvD6df3JyKi0KOzmbQrLF0jIgpsoGOxWLBq1SoMGTLk9Bvo9er2kiVLyn1dfn4+mjRpgszMTIwZMwYbN25EMDJGG1AEV+ebohN+e1+T1Y4PFuz03F659wTeL3abyO2p/21Cpyd/x+NTN2DPUWb+iMKdwd38hhkdIiK/i6rMk48ePQq73X5GRkZub9mypczXtGnTRmV7OnfujFOnTuHll19G//79VbDTqFGjMl9jNpvV4pabm6surVarWirL/ZqzvTbGoMMJZyLq6XLh/GwMHP3vh6PXLVXeAX27fD8O55qRnhSLuwa2wOO/bMJrs7ZhQPPaaJ+RVKX3jiTebsdQNW19Nj5etFtd/3zpXnyxbC+GtE3FTQOaoEfjFJUZDQfhvh3DDbdT9VLNbySzE1NsjCgREdV8oOOLfv36qcVNgpx27drh/fffx9NPP13ma55//nlMnDjxjPtnzpypyuR8NWvWrAofzz6gxz+tt+D1+I/R2LwPhrlPwbLwbWzOuBz76wwAdJXv3WB3AG+sNchuDP3rFCIxZx061dZj/Qk9bvtkCR7sbEc0e9/5dTuGouNm4KW/tO9J7/oO5FuBTSf1mLU5Ry1NEpy4IMOBznWdMIRHvBOW2zEcFRZyXrHqZHCN0WGgQ0QU4ECnXr16MBgMOHz4cIn75baMvfFGdHQ0unXrhh07dpT7nEceeUQ1PCie0ZGyt2HDhqnGBr6ckZSDqqFDh6rPL8+mmdvxfrYen3T6Av/XeAMM855DXO4BdN/3X3QzLYL9gsfhbDFEBh15/dk/rTmI48s2om6tGDx53bmIizGg70ALLnxrMbILLNgc1QL/GtGm0j9TJPJ2O4YaGa917ccrUGQ/iS6NkjH5770QbdBjR04+Plm8F1P/ysLefAcmbzeg0REjru/XBFf2aIiE2Go/T1EtwnU7hit3Rp2qR7RDq17QM9AhIvK7Sh0pxcTEoEePHpg9ezYuueQSdZ/D4VC37777bq/eQ0rf1q9fj1GjRpX7nNjYWLWUJgdFVTkwOtvraxm1x0wOPaK6jwM6XQ4s/wD48xXocjYh6tuxQNNzgaETgYY9zvp5cgD7/oI96vot5zVHUi2trXZ6SjRevLwz/v7ZSny8eC+GdshA3+Z1UVPmbsnBlDUH8diF7ZCaFJytvitS1e9BsHlv9nas3HtSBS5vje2OeKP23W/XsDZeurI2HhrRTitlW7oXB06a8NxvW/HW3J34W+/GuHFAU2Qkh2Ztf7htx3DFbVS9ohxa6RoDHSIi/6t00ZRkWv773//i008/xebNm3HHHXegoKBAdWET119/vcrIuD311FOq5GzXrl2qHfW1116r2kv//e9/R7AxumrIzFa7dke0ERhwL3DfWqD/vYAhFtjzJ/DfQcD344Hjuyp8v982ZGHX0QIkx0Xj2r5NSjw2pH0arumVCacTqgtbnqlm6uB/W5+lAqxf/jqEl37fWiOfSeVbtfcE3pi9XV1/+pIOaFz3zIOd+omxmDC0NRb/axCevbQjmterhTyTDe8v2IVzX5yLTxdrwTQRhZ4YV6ATFatNWE1ERAEMdK6++mrVUOCJJ55A165dsXbtWsyYMcPToGDfvn1qrhy3EydOqHbUMi5HsjhSBrF48WLVmjoYu64Jk80V6LjF1QaGPQ3cswroMlaNo8DGn4C3ewPTHwZyT/+8bg6HE2/P0crzxg9oWmaZ0WMXtUdmnTjVdnri/zahus3YkIV7vl7jaW390+oD2M3OXgGTa7Livm+07TGmawNc2q3s5hzFv5/j+jTBHxPOx4fX90TvZnVgczjx0owtyDfbamy9icg/5E9xrFMrXYsyMtAhIvI3n4bBS5maZGWkM9qyZcvQp08fz2Pz5s3D5MmTPbdfe+01z3Ozs7Mxbdo0NUYnGBmjXIGO1VH2E1IygUsnAbf/CbQcAjiswPL3gdc6AN9dD+xeAJWiATB7Sw62ZOepAOfG/k3LfDt57NWruqohPz+sOoAZG7Kr7WeT9777qzXqwPjSbg1xfuv6aif7liubQDXviakbcOBEERrVjsPTl3T0+nV6vU5lBL+9tS+a16+FAotdlSISUWiRXY0RFnU9ysjSNSIif2O/r2KMMVqgU2QpldEpLb0TcO2PwPU/A437A047sOln4NPRwDu94Vz6Hj6ZvVY9VUrWUuJdk5CWoVfTOrjtvBbq+qNT1iMnzzV5nB/9vlGCnNUqyLmkawO8fGUXPDCstXps6tqD2Hkk3++fSRWbsuYApq49BINehzeu6YYk1/iwypB205LhEV8u3QunK8gmotAJdOJ0WkYnmqVrRER+x0CnGGOUvuzStfI0Hwjc9Btwx2Kg581ATAJwdBt0M/6FD49eh5diPsRtrc8eRPxjaCu0TU/E8QILHvlxvV8PWGduzMZdX2pBjpRHvXJVV3Vw3blRCoa0S1VZnTeZ1alR+44V4vGp2qS59w5qhR5Navv8Xpd3b4jYKL3KHq7e579Jbomo+lkk0HFldPQxDHSIiPyNgU5ZY3TKK10rT1oH4KJXgQe2ABe+gn1RTRCvM+Mq/RzU/nww8OEQYO3XgLXsbE1slAGvX9MVMQa9Knn7dsV+f/w4mLXpMO5yZXIu7tIAr1zZRQU5bvcP0bI60phgR06eXz6TKma1O3DvN2vUmJpeTWvjrgu0bJ6vJFs4uksDdf2Lpfv8tJZEVGMZHbgmx67ixNRERHQmBjplBDqermuVFZuI5fUuw3n5z2Gs7d8oanMJoI8GDqwApt4OvNoOmPYAsO474NhOz3ge0TY9CQ8O1wKPp3/dpM76V8Ufmw7jzi9XwWp3qgPhV6/qgihDyc3dsWEyhrVPU6vxxuzy5zUi/5Hs2dr9J5FojMJrV3c9Y5v4wt3Rb9q6LJUVpOqxcPtR/Of3LbDYKnkihKiiMTo61/9ZBjpERH7HQKeYOFegU+RroAPg7bkSMOjQtPtQxI39FJiwCRj0OJDUCCg6Dqz4EPjpFuCt7sBLzYAvLgfmPgdsm4mbuyerTloyuHzCd2s93dF8CXLucAU5F3XOwGtlBDmlszq/rjuEbYeZ1alOy3Ydc30/gOcv64RGtf0z+FgmGe3YMAkWuwPfr/RPNpBKknLSB7//C+/M3Ynv+DsmP5HhoKczOmxGQETkbwx0yphHx+RjoPPX/pNYsO2IKg+743xXSVJCKnDeg8B9fwFjvwH63AE06qXNyVN0AtjxBzD/ReCrK2F4uQW+KrgN78a+jc4HvsLU/00BbK6doJdmbz4d5FzYOQOvnyVr0L5BEkZ2TNeyOn9wrE51OVVoxT++Xat+z1f0aISLOmvlZv4gTQmudTUl+Gr5PtXanPxrU1YusnO10tOvlu1j4wfyC6tD5xmjw4wOEZH/nTm5SwTzeYyOyzuus/VjujQ4c+JHQxTQZqS2CJsFOLwBOLhKWw6sBI5tR1TuXozS7cWo6MXAms9hXRuDo8mdUJjRG4ZmA1C79QAkp9Qp8/PnbsnBHV+s1oKcThl4w8vSqPuGtMJvG7IxbX0W7snOVWV05D9yUPzIlHU4dMqEpnXj8eTFHfz+GRd3bYBnp2/G3mOFWLjjKM5rXR/BrDDEpv2R/1vFg571B0+phh5EVSFZ2DhP6RozOkRE/sZAp5hYd0bHZlcHp3Km3FtbsnMxc9NhNSfOnd4MMI+KARp21xbcot1XdBI4tBrOA6uwbtlsNCzYiHrIRcbJVYAsm9+DbZoeG3TNsD22I7JSuqMgvTfqpWZAr9Ph2Wmb1Y5zVKd01dzA2/EfEthIYCSBzuuztmPSdT28/rnp7L5feQDT12cjytVKuqzJY6sqPiYKl3dvhMmL9+CLpXuDOtD5ZPFePLciCobMQ7iqt5aJCnZzXIFOkjEKuSabyuow0KEqs1tPX2dGh4jI7xjolDFGR6pSzDaHJ8PjjXfn7lSXUgbWMjXRxxVIAVoMgq7FILQdMAE/rzmIE/s3ISF7ORqcWoPW5g1oqMtBR+xER/NO4PDPwGFgu6MhVjjaYDzS0KphMi5tmgnDiuWAziB1TYBOry16w+nrUbFAneZA3VZAbILK6kzfkIUZG7Ox8dApdGiQ7NvPQB5H8sz475+78OniPer2hGGt0SWz+g6Ox/VprAKdPzYfRtapImQkB9+BkzRLeGOOlvn8cvn+kAh0ZJ3X7D+prsvErvd9s1Z1Kvy/C9sh0Yf5j4jcnPZizUMY6BAR+R0DnWKKBzZmq/eBzq4j+Wowv7jrgpZ+WRdpOX1Vr8aALBjhub/o6F6c2jwf9j2LUCt7OVIKdqGV/qBalGPSV7qSH5bYAK3rtcSnaXUw50gSZv2yHx2uGaU1UNBzGFdlHc41YdL8neqsvwTMQuYsck8MW11apSWqZhbLdx/H18v3Y8JQrdFEMHlv3g4UmLUxcH8dOIWDJ4vQMCW4D/Dmb8tRJz/aZSSpNu3SOW/nkQL8vPaQp+MdkU8cWqBj1UUjWk5EERGRXzHQKSbaoFeNBKTbmZSvJcO7s7XvzdupJt4c3Da12jMhcfWaIO7c6wFZRMExYN8SbSk8DjgdcprQdelaHHLbWfIxcz5wfCdQcATIO6SW8wCcJz9yNoDX/wlExQF1WwL1WgHJDYH4ekCt+kAtuax3+nZMaNeWL9l5TGVeCsw2nN+mPga3TUPrtIRKlS4KOWifNG8nvl2539OCuGtmCu4d3BIXtEmt9Pv5Qg68JdD5Zvk+3DOopfpOBwvJMn26ZK+6nhDlRL5Nh9/WZ+Hv5zZHMJuz5Yi6HNS2vtqGY3s3xjPTNqtAVrJoNbFdKTzpXRkdi87o5d6GiCrLbrfDai1WJloOeU5UVBRMJpN6DQVWdHQ0DIaqnwBioFOKMUqv2jt723ntwIlCTFmjZVPuGuSfbE6l1KoLtLtIW3whnd+O7gCOblPNENatXYG43N1opj+MKFsRcHi9tlREBtF6Ah9ZUoHENCAhvdhlOpCQBkQby3wLm92hxj6cLLTgVJEVJ4usqlOZuu66PFFgwvFsPZocykXXJnVPv1gFbbu0wO2Ya5HruYeAlMbahK5q6QiktgNcM5BLu+fX/tiGpbuOe95q2e7jeGnGVpVlGNwuFRe0TUW/5nUrzO7tP16Id+ftwA+rDqhGEEImA713cCuc07JejR4Ij+iQjnoJMcjJM6s24yM7ZSBYSCZEAsCeTVLQRHcMP+4xqCYYwRzoyPdy/lZtfM6gtqnqUsZCvfT7Vr82JZDPsTudKpNLkUNn17pq2gxl/10kIt/JWOvs7GycPHnS6+enp6dj//79PIEVJFJSUtQ2qcr2YKBTSlyMQQU6MzceVqUqtWtFo26tWHVZ1kHI+/N3weZwYkDLuujeuDZCTlxtILOXtsiZ9i75GPLqfOicdky7LhNto7KBo9uB/MNAwVGg8KiWBZJMklzKjtpaCJzcpy1nY0zRgp7EdGQ7UrAgy4AjJgPybXpYYIANUbCqxQCL031duy2XCSjCjA9+RU7CcXStdQz1LAegl3Urz6n9wN5Fxe7QwZTYBOusDbE4Lx3JzsZoYWiCAT17oGV6kuqutWjnMZWd+WzJXrXI2K0BLeupwEcOdtOStIOS3UcLVKc9CXTdcx71b14HE/rXRo+kk9CdmAPM3w2c2AsYooHkRkBSAyCpobZIlswVdPlLTJQeV/XMxLvzduLrpbsxst4RbcJa6eonl+Y8oFFPoMkAoEk/IK2T1hGwmsnv6ruVB9T1B4e2wrY1R/HjHmDV3hNBO55IyPpJAF47PhpdM7X/37VrxWBUx3RMXXvIL00JrHYHrpy0BAdOFGH6vecg1fX9It89//zz+Omnn7BlyxbExcWhf//+ePHFF9GmTRvPc+Ss7QMPPIBvvvkGZrMZw4cPx7vvvou0tLQaW0+9QzvLbNNzmxP5mzvISU1NRXx8/FkPlh0OB/Lz85GQkAA9y/YDSoLOwsJC5ORoJxozMnw/actApxQZXHw036Ja9ZZWK8aAOgkxqBMfow526tSKwa/rstRjd1/QCuGgef0EXNKtIX5afRAvLrfik/HFWmKXJuVwlvySgY8EQvk5WmCUlwXkHQbys7VLCYpMJ7XlyBakA7hK3kf+9lS2bkNOhBabYshmrIOoei2Bui2AOi2Aus21YOLEHq2N9+GNsB5aj+iiIzDm7UFv7EHv4p+5qRaQ1QTXG1Nga5eEHIsRu/OjsPmEHofMscjdFo+5W+Mx1ZmA1NT6qJ2YgL27tiJTl4N/6Q+jW/IptDceRfyRA8APBd7/HMZkbSyUCoAaaMFQYkaxDFld7TI2UWssURH5vR9Ygduti3FuzBx0PrATeL+MeZi2/KotIiYRyOytBT0S/DToXm7WrSpenbVNBYMSKPZoUls2B3o0TsGqfScxY0M2xg9ohmA0x5XNOb91fVXW6iblaxLo+KMpgQRLa13NDibN34UnRrf3w5pHtvnz5+Ouu+5Cr169YLPZ8Oijj2LYsGHYtGkTatXSTi784x//wLRp0/D9998jOTkZd999Ny677DIsWlT8xEj10juY0SGqDlJ65g5y6tYtVgFylkDHYrHAaDQy0AkCcpJKSLAj29HXMjYGOqU8cVF7fL9qv+q0pC1WnCi0qIM0yfQUHC/C/uNFJV4jB259m5c9t00oundQKzXQeu7WI1iz7wS6lZepkgNvOQCXRTq4VUSCoqIT2Ll7Jz6YvhjWE4eQqjuJgQ2daFc/GrE6O2J0du0Mp7Rcldp1tZy+LR2KTuabYMzsgk3mVMzIisfy3NrY7UxHrikBHWslYWyDxmrAuOfAs3FfrN53Aq/t34Y/TxxFXZxCB8N+XN04FwNTclDrxBYVdMFaAORs8vynaOBaBsgdpY9hT7mWmGL3yVfC87XQaUFWnWZA7aZA7SbaOKncg8Cpg1pJnVw35wKmU9qSs7Hi359MMBtf93Tg4x4rJRkyKTuUbM1JbfyLzILUz/U32mSoBWOT3toktbLIttq/FNi7GNi3DDCfAnbO1hb1OdL2XDI+/YDG/QEJHhMbaO3QfbR59z5krZuDcYYDeCDeBsPnj2No1lacG1MbW6Nj4fyzLnCyzenSR/VzFiuFjKtTI1mniubPGdQuDbAWAZZC9TuUpg8t6teqclMCKdWU8km3L5ftxe3nN2dWp4pmzJhR4vbkyZPVjnLVqlU477zzcOrUKXz00Uf46quvMGjQIPWcTz75BO3atcPSpUvRt2/fGllPg2uMjt0QnBlNolDlHpMjmRwKXe7tJ9uTgY6fyJgMWYqTmebzTDYcL5TAx6wFPwUWHCuwoNBiw5U9MsOqnrNpvVq4tFtDNebk9T+249Obelf5PaWy65PVp/Dib0dhsbdEvYQOePnKzujbpuTvuiI2qxULpk/HqFGj0CM6Gt2dTjWm5uvl+/Db+mxsOJiL/5uyQc0nJMGOnIX/ZsV+zN+mDSaXeWyG9eyIOwdegsw6xf742W3AsR1aUwYJOmQ+I3cAopbTt22FJ2ErOA6dzQRnUkMY67dwBTSuoEauy7ggad99Nqbc00GPWg4Bpw4AedmuEkFXlkzGSkk2zNU0onw6oH5bVZq2Oaot7l0YjaOxjbFk7LCSY4wkiDnnH1rwJakVaWQh5X17lwAFOcA+CYIWA3jl9PsmpJ4ut0vOPH1dslFyKeOvJCA9uhXI2ay9r1zmbEK73IP4wf3r0GJJyG8/3noc/WS1TABW/FnxzyUlfvI7jTIWW2K1lrxl3h+vPSbZKfd1aa6h7os/fb8831Kg/b4L5fd9+rLoVA7+c2IP6sbmouGvhcCUwtNrFBWH/+lr4VBMDGyzEuHc3hA6yc7FJgHGJCA2WcvWyXpLs46YBO3z5Hp0Lc/9b83aq8agtUlLRK1YA1bvO8msTjWQwEbUqaOdkJKAR3acQ4YM8Tynbdu2aNy4MZYsWVJzgY4ro+OQ7yER+V04HZtFIn9sPwY6XtDrdUiOj1ZLs3r+HVMRzFkdGXsiQYKMU5Csla9yck144Pu/8Of2o55Wyy9e3hl1E7wIBs7yH6Bv87pq+fdoC35afUAFPXKWXQIcWYSUHF3RvRHuHtSyZIDjJtmC1Lba4sV/GL/9p5EDYlnO9rlyIO4ZH3XMdem6LZ32JLCScTcNe2gH1wBaO5woXD8XJ04WqfLKK3o0OvN9pZ1tRmdt6XOblnWTRg4S5EjGR2WJ9mtBlpQiynJoddnrqI863eWvDFnOOkhu2hXxDTvCVq8tFm/JQv+eXfHmr8uQeywbl7aJRZfatjN/RmmWAVeJpCw1SM6xd3FXL5Ru2GMrQjyK0FIel74lO7f69BmPA/hnrAG6olpwmmNwKtYC/UrAsSkaevm5ZZHtosh116X7dy5BnWTh3JcVXb/4Te8C8DAj5Sj3338/BgwYgI4dO3pq92NiYtRA1+JkfI48VhYZxyOLW25urrqUgMmbjk6lqTOUTi2j4zDE+vQeFBzc247bMHjItpBxHvL/XxZvyPPdl96+hqqXbAfZHmVldLz9/8ZAh8rUuG68Cg6kVfLrf2zD5zf38el9Zm7Mxj9/XIcThVYYo/V47ML21dKSV8ZLSfeum89phhV7TqiAR7qqSROBewa1Uj9PyFJn/2tpJXBekuDub30a4z+/b8UXS/eWHeiUJttEStVk6e5qXy5/+CXDIU0dVNmdlN8dKFaG58pEOWza86WUTjrcpbaHM7UdHl/iwC+HknFRn3Z47tJO2ltarTixfzqczc5HUq/meHPaZmwqqoPvru935jpJtk2CHUseYJPGF0Xapc3kujx922ktwkfztiAvLw99G9dCv8w47XFrqcVzXyFgNWlZHnepXLHywI/W5GJxFnBhn0647Nwu2uOSlZGSQ1V2mIu3p6/EXzv3Y2jzOFzVKfl0FtD1uApS5XNUoFboul2gXbp+Z1KyCYt20Fzf/d+iZHWsf4x5G5FIxups2LABCxcurHKDg4kTJ55x/8yZM30uj4lyzaNzUialnT69SutHgTdrVmUnsaPqIm2ipVuXNBeQcTeVIfuQcNG5c2fccccdaglFsu2KioqwYMECNd6yOGlW4A0GOlQuyYD8uPqAysSs2HMcvZp6Pw5JSvrcc42I9hlJeHNsV7RMTazGNdayPDJ+QpZIJ93XJEiVge4bDp5Cx4Y+zPEkwY973qQG3cp+jpTASbZHp9dK2FxB7LwtOfji0ArERulVhrAs0v5avify/crJMyE10Xhmti2hvoQAZ11VaQP9zMkV6vrk7GgsGT8I8TG+/Ykrstjx0oyZMDsceLjPeUCdYt/b+DraIm3EL2iIl7cvxaJ9Boy8drDXTQkkU/r3jxcjyWDGTzd3QZNEncqcSXOGR6dsQLTBoEpG6yYYXb9PXalLuMauSZBncV2aT49tK32fXEoGKMJIg4Fff/1V7SQbNTod7MsBkOxAZbBy8azO4cOH1WNleeSRRzBhwoQSGZ3MzEzV5CApSUbGVY6cjfxq3TR1PbleuirJpdAk21KCnKFDh6q5PyjwpKuitImWDmrSXMAbkjmQICcxMTGgJW8ybrBLly547bXXqvxeK1asUA1YQnWskmxHaUogYytLb0d3Vv1sIm/PR16TMq8re2aq7Ii0v62fGIsGKXFomGJU88zIde22tqTER6s/DnJQfe83a7DriNZ97LbzmmPCsNacI6SGyfYa3iFdla59uWwfnr9My6j4nZTASce4UuPaZK4ZcWP/pkhPLntHI98bmVRVgrHfN2Tjun5NfV6NDxbs8lyXeZd+XHXA5/dbvPMozDaHWj+ZPLY8vjQlkDlznv51k2qXfmm/VmjS/HTL4+5pTiSsMqhy0Xc2xuCJ0Wcvp6SyD1juueceTJkyBfPmzUOzZiW7+vXo0UMdkM6ePRuXX365um/r1q3Yt28f+vUrI7MIIDY2Vi2lyfv4enDrzujoYmrxADkMVOW7QP7vuibHI9I9zdsOau5yNffrAqmidZC/b/LzSdbqbNJqsF1+dZDfgfwuyvq/5e3/NfbPowrdO7glGrgOUo/kmfHX/pOYvj4b//1zNyb+bxNu+3wVLnprIbo9PQvtn/gdg1+Zh0vfXaSCnLSkWHz59z54ZFQ7BjkB4j7w/nntQeSZaq5+/Nf1WdiclYvE2Cjcfn6LCp87qpN2Bl2+V76S4HrxzmOqZO/W87QOgB8v2qMCLl/McXVbu6Bt/QrP7Mlj0mpaSPbSXeNdka+W78OOnHw1N889g1ud8X73D2nl6cAm49vIt3K1L774QnVVk7OzMu5GFimBENJO+uabb1YZmrlz56rmBOPHj1dBTk01IhDRrjE6emlSQUQR78Ybb1Tt8d944w21P5BFukbK5W+//aZO0sgJFynF3blzJ8aMGaOCGclcSTv9P/74o8T7NW3aFK+//rrntrzPhx9+iEsvvVRleVq1aoVffvnFq3WT4Er+bsqJI8myyLxksp6lffzxx+jQoYNaT5n/RjLrbpJFv+2229Q6S4ZGxk1K1r06MaNDFZKJHBf9a5DqDCWTaMpySC5PFOHQKbltUteP5ptRZLWrM9tiRId0lUGQ+YYocPo0q4NWqQnYnpOvmktcX4WMSWUmwHx1ppbNkaDjbN+BkR0z8Nz0LVi2+5j6HtXzoUmFO5tzUecM3De4lcpCyiSlc7fmYLC0hq4ECVY8baVLdWAsy+XdG6ns1aasXKw7cApdMsufQPRUoVXNKSQmDGuD5Lgzz0id07Keav4hWZ335u/Ev0d3qNT6E/Dee++py4EDB5a4X1pIy4GEkLIQOVsoGZ3iE4bWpGjXZGA6BjpE1U7+tstxSkUZHSlbjrLY/J7RkYnHvSmHk8Bh27ZtKgB46qmn1H0bN2rTT/zrX//Cyy+/jObNm6N27dqqNE9KXp999lkVVHz22WcYPXq0yk5LB8nyTJw4ES+99BL+85//4K233sK4ceOwd+9eT1fKin4/UgIsc4/J3ESLFy/GrbfeqoKZq666yvO3V04gvfDCCxg5cqTqeOmem0xeL/dJeaCciGrRooWa28zXttHeYqBDZyX/OeVgVZbyxnmYrHZknTKpIEjGZMiBGts6Bp5sA2n+8OT/NqmmBNf1bVLt2+X7lQew51gh6taKwU3nNPOqRLJzo2QVJPy+MRvj+lRuTpoDJwoxbb02ce8t5zZHrdgolWWR4OejhbsrHehsPZyHQ6dM6nvcr3m9sz5f/l+M6piuJhCVAKuiQOeN2dvVSQMphxvbK7PM57izOtd9tFxlie44vwXn1akkbzJrcjbxnXfeUUugxEpGRydN8RjoEFU3CXKk8iQQNj013Ksxo5Jtlo6Qkm1xjxfcsmWLupTAR8aBuUlgImN53J5++mlVrisZmuJZlNLkZM/YsWPV9eeeew5vvvkmli9fjhEjRqAiUipWvCGLZHakHf93333nCXSeeeYZPPDAA7jvvvs8z5NMk5Bsk3zO5s2b0bp1a3WfBG3VjaVr5BcyT4u03pYuZz2b1mGQE0Qu69FInU3adjgfK/dKu+bqIwHvG7O1jMVdF7RUQYc3RnXKUJfTXQFLZUgwIxP6DmhZ1xOI39C/qSpjk3K2TYe8G7BYumytf4u6iIvx7kzT31zB2S9/HSq3RHDnkXx8tmSPuv74Re0RZSj/z687qyPjhCSrQ+EpxqlldAyxDHSIqGI9e/YscVs6yj344INqomNpqiLlaxJEyFjDs3Vic5NGBdJMJSdH2++djZwYkvK5+vXrq8/74IMPPJ8n73Ho0CEMHjy4zNeuXbtWZYTcQU5NYUaHKMwlGaMxpmsDNa+QZHUq0z2vsuRA/nCuWQ3iH9e3/NR5aaM6ZuCF37Zgyc5jOJZv9nqOJSkF+9Y1X9Kt550eCySfP6JjOqaty8LHi3bj5StPn/U6m8qUrbn1alr7rE0Jnpu2GTaHE4PbpuLcVhV3kWNWJzKojI4KdCJjfjaiQJITfpJZKY+UVuXl5iExKbFaSteqSoKS4iTIkW5/Us7WsmVLNW7miiuuOGs77ehSg/hlf+PNvEHffPON+sxXXnlFjWeU8Y9S/rZs2TL1uHx+Rc72eHVhRocoArgPvH9bn43th6tnjoBckxXvztOyD/cNaVWpBhQyz1HHhkmQ3gEzNx32+nVfLNuLQosdbdMTcV6rkmVmMqeS+GXtIdVIwxsnCy1qbIy4oBKBztmaEizYdgSzt+QgSq/Doxe28+o9mdUJb/IdiYV2QBJlZKBDVN3k77SUj1W0SBb/bM/xZalMlYuUrsnA/7ORsS9ShiaNBTp16qRK3fbs0aoGqsOiRYvQv39/3HnnnejWrZsKrqQhgpsEPtL8QLpZlpdJOnDggBqDVJMY6BBFACnp6tu8Dix2B675YKnqiOZvHy7YpcafSGbjsm4NK/16aUpQmfI1s82OyYv3eMbmlN6RdG9cG90ap6ifWTJZ3s5vI8FWm7RENKpduXIiaUoQE6X3NCUo3U5aSDOIFvXLb1ddnPw8/xjS2hM8sQNbeJEANk6nBeDRDHSIyEWCBcmSSNBy9OjRcrMt0jHtp59+UiVhf/31F/72t795lZnxlXzeypUr8fvvv6tg5fHHH1fz9BT35JNPqoyPjPvZvn07Vq9erRoeiPPPP1/NhyMNYCQTtXv3btVJbsaMGahODHSIIsR743qgU8NkHCuwYOx/l2J9sYPxqpJuaR8u3K2uPzisTYXjT842TkfG1ZwoOPtM1j+v0TI16UlGjO5Sch4ft5sGaFkdCXRk/JD3baW9z+aUbkogpCmBm1zf7monLR3hKkPGHfVkVidsB0bHuTI6MQx0iMhFysOkE1n79u3VWJjyxty8+uqrqvuaZFmk25p0juzevXu1rddtt92Gyy67DFdffTX69OmDY8eOqexOcTfccINqZy0dLKXF9EUXXaQCHrcff/xRNSeQZgjy8z388MNeZa+qgmN0iCKEHIh/8fc+uPGT5Viz7yT+9uFSfHpTb5X5qKq35+xQJWTSPU3GxvhCmlm0y0hS2aaZm7Jxda/yx/jI/Dgf/Km1lL7pnKYqk1KWkR3T1TxQ0kVNStiuKqfTmZCGBpLRqez4nNJNCaT7mjQl+L8L20FOrnnaSQ9tjeT4yk0mqI3VaY1rP1rGsTphxmR1wOhqL80xOkTkJoP1pZtZce62+KUzP3PmzDljDrHi9pQqZSurI6XMbeMNaWEtLfplKe75558/IyCSpSzSKU7m2alJzOgQRRCZt+Xzm/ugd9M6yDPZcN2Hy7B893Gf308yL/d/s8ZTQvbQ8DZV6rh3oZeTh8r8ODLppkxI6h4bUxbJLEkHNiFNCSpqO7xm3wlVeie/o+6Ny28RfbamBC1TE1TQJ00J3pyzHScKrWouo4rWsyLM6oQnyTDG6VyZy+jADNIlIgp3DHSIIkxCbBQm39RLtU8usNhxw8fLsXjH0Uq/j8x5M/S1BSqDoddBlWXJAPqqcJevLdpxVDUGKM/7rglCx/ZpjERjxVmSa3o3RnyMAVuy87Box7Gzlq2d17q+T6V3pZsSvL9gJz5d7F07aW+yOuLLZftwmGN1wiajE+fK6CCa7aWJKLBuv/121TK6rEUeC1UMdIgikHSB+fjGXji/dX01VmD85BWYt9W7PvrHCyy49+s1uO3zVWpsjmQrptw5AP8Y2rrK8yc1r5+gOqhJG+ZZ5XRfW7v/pMpCSQez8QO0bE1FJENzZY9G6vpHC7UAqaJAZ1Dbils/n400YpBSuv3Hi9TPIWVwEjxVhTurY5GsjquzHYU2k80Oo2uMDjM6RBRoTz31lGpsUNYij4UqBjpEETzJ6wfX98CQdmmqLOrWz1bhj7O0dp6xIQvDXpuvxqDIhJx3XdACv957Drpk+lbq5cvkof91ZXMu7toAGcneHSCOH9AMEoPN3XpElbyVduhkkcr4yHPOb+3b+JyymhKodtKjvGsn7W1W56vlzOqEA5PZjFidTbvBjA4RBVhqaqpqGV3WIo+FKgY6RBFM5rp5d1x3jOqUrtow3/7FKvxWRoAhWZy7v1qN279YjaP5FrROkyxOfzw0vG2l5svxhqyLWLjjKE4VWUs8tu9YIX7bkOVpKe2tpvVqYXDbNHX9k0Vad7jSY35Et8wU1KkVg6q6fWALpCbGqiyXjNnxB2Z1wovVVHT6BjM6RETVgoEOUYSTMqs3r+mGMV0bqFKru79eg5/XHvQ8LoHP0Ffn49d1WZ4szv/uOQedG/kvi1Ncy9REFUhZ7c4zMkxSeibz3EgpmHRoqwz3BKI/rj5wxvifuZ6yNf+ctWqbnoTl/zcEd13QEv7CrE54sZmKZRaj2EmPiKg6MNAhIjVQ/tWruqqxLNJm+f5v16rMx11frcYdX65Wc+/IJJrVlcXxZvJQ6fD23coD6vpt53mfzXGTCVPbZySpQeASKBTvfiXZI1/nz6lJxbM6D/+wzqu5gSg42cyF6tKsi5UoNtCrQ0QUlhjoEJEi2ZoXL++McX0aQ7owT/zfJkxzZXHuGdQSv9wzoNqyOKVd2FkLdP7cfhS5Jq187fOle1XjBAlWpGOcLxkRd1bns8V7YbVrM0gv2XVMBT8y8ai8dzCTn+Gxi9rDGK1Xc/7c8cUqBjshymbRAh2LjtkcIqLqwkCHiDz0eh2euaSjp5uZdECbeucAPDCsTbVncYqTTm4t6tdS44Zmbz6sDubdrZpvO7+5z93dRndpgPqJscjONXmyRe6yNcnmVLVrXE3ompmiOuZJsCPNFRjshCaHK6Nj1ccGelWIiMIWAx0iKkEO9v89ugMWPHSBGovTqVFyQNbhQk/3tWz8tPqgKp9rmBLn6crm63ik6/s2Udc/WqhNIHq6rXRwl60V179FPQY7Ic7hyuhY9czoEJH/NG3aFK+//nqgVyNoMNAhojI1rhuPaB8nufSHUa7yNSnRksk3xU3nNKvyOv2tT2MV8Kw7cArfrNiPAyeK1G0Z/xJKGOyER6BjMzDQISKqLgx0iCgoSfOD5vVqqYH3e48VItEYhat7ZVb5fesmxKpJPcWTv2xUl32b11WTqIYaBjuhy+kKdOwGtpYmIqouDHSIKChJ+VrxMrVr+zZBQqx/ghHJDAmZKFUMalMfoYrBToiyae3BHczoEJHLBx98gAYNGsDh0PZNbmPGjMFNN92EnTt3qutpaWlISEhAr1698Mcff/j8ea+++io6deqEWrVqITMzE3feeSfy80tOqr1o0SIMHDgQ8fHxqF27NoYPH44TJ06ox2Q9X3rpJTWpaGxsLBo3boxnn30WwYSBDhEFLXegE2PQY3x/rUGCP7ROS8S5rep5bg9yTSYaqhjshJ5uGdrEtElJwd3pjyhsSDtRS0HFi7Xw7M/xZZHP9sKVV16JY8eOYe7cuZ77jh8/jhkzZmDcuHEqCBk1ahRmz56NNWvWYMSIERg9ejT27Ts9ZUJl6PV6vPnmm9i4cSM+/fRTzJkzBw8//LDn8bVr12Lw4MFo3749lixZgoULF6rPs9u1/csjjzyCF154AY8//jg2bdqEr776SgVhwST0ajWIKGK0b5CEN8d2Q+34aKQm+ffM923ntVDtqzs0SFLjkUKdO9i5afIKT7Dz3rU9YIyuuW555L3mydp5xhQGOkQ1Q4KY5xqU+7D8j6y2CRQePQTE1Drr0yRjMnLkSBUwSIAhfvjhB9SrVw8XXHCBCky6dOnief7TTz+NKVOm4JdffsHdd99d6dW6//77SzQxeOaZZ3D77bfj3XffVfdJtqZnz56e26JDhw7qMi8vD2+88Qbefvtt3HDDDeq+Fi1a4JxzzkEwYUaHiILaxV0a4NxW/i8tO6dVPXx/ez/89/qeCBfM7IQQa5F2Gc0xOkR0mmRufvzxR5jNZnX7yy+/xDXXXKOCHMnoPPjgg2jXrh1SUlJU+drmzZt9zuj88ccfKqBq2LAhEhMTcd1116mMUmFhYYmMTlnkc2Udy3s8WDCjQ0QRq1fTOgg3zOyEVqDjjA79bCJRSJD/a5JZKYeMN8nNy0NSYqIKKvz+2V6S0jCZ+mDatGlqDM6ff/6J1157TT0mQc6sWbPw8ssvq3ExcXFxuOKKK2CxWCq9Snv27MFFF12EO+64Q42rqVOnjipNu/nmm9X7yZgcef/yVPRYMGFGh4gozDCzEyJlNCKKzQiIaoRMCC3lYxUtEpCc7Tm+LJWYjNpoNOKyyy5TmZyvv/4abdq0Qffu3T2NAW688UZceumlqolAenq6Clh8sWrVKhXcvfLKK+jbty9at26NQ4dKBoKdO3dW44HK0qpVKxXslPd4sGCgQ0QU5sHO8t3HsetIQaBXiYpj6RoRVVC+Jhmdjz/+WF0vHlz89NNPqqTsr7/+wt/+9rczOrR5q2XLlrBarXjrrbewa9cufP7555g0aVKJ50izgRUrVqhubOvWrcOWLVvw3nvv4ejRoyog++c//6maF3z22WeqI9zSpUvx0UcfIZgw0CEiCvNgZ/JNvVVjBwoejj53YHGLh+HocHmgV4WIgsygQYNUKdnWrVtVMFO8HbQ0LOjfv78qcZNWz+5sT2V16dJFvd+LL76Ijh07qgzS888/X+I5kuWZOXOmCqp69+6Nfv364eeff0ZUlDbyRbqtPfDAA3jiiSfUuKGrr74aOTk5CCYco0NEFObBDgWhui1xJKkjUKd5oNeEiIKMjBEqXUbm7owmLaCLu+uuu0rcrkwp2z/+8Q+1FCcNCYo7//zzVclceev5f//3f2oJVszoEBERERFR2GGgQ0REREQURqQULSEhoczFPRdOJGDpGhERERFRGLn44ovRp0+fMh+Ljo5GpGCgQ0REREQURmQC0MTEREQ6lq4REREREVHYYaBDRERERGHH6XQGehUowNuPgQ4RERERhQ33GJTCwsJArwpVgXv7VWVMEcfoEBEREVHYMBgMSElJ8UxeGR8fD51OV+FrHA4HLBYLTCaTmh+GApvJkSBHtp9sR9meNRrovPPOO/jPf/6D7OxsNbPqW2+9pWZMLc/333+vZk+VSYxatWqlZmEdNWqUzytNRERERFSe9PR0dekOdrw5uC4qKkJcXNxZgyKqGRLkuLdjjQU63377LSZMmIBJkyaptnWvv/46hg8fjq1btyI1NfWM5y9evBhjx47F888/j4suughfffUVLrnkEqxevRodO3as0soTEREREZUmwUpGRoY6NrVarWd9vjxnwYIFOO+88yKq/XKwkm1QlUyOz4HOq6++iltuuQXjx49XtyXgmTZtGj7++GP861//OuP5b7zxBkaMGIGHHnpI3X766acxa9YsvP322+q1RERERETVQQ6WvTlglufYbDYYjUYGOmGkUoGO1C6uWrUKjzzyiOc+qWMcMmQIlixZUuZr5H7JABUnGaCpU6eW+zlms1ktbrm5uZ5o25uovDT3a3x5LQUPbsfwwO0YWridiIgoIgKdo0ePwm63Iy0trcT9cnvLli1lvkbG8ZT1fLm/PFLmNnHixDPunzlzphpQ5ivJJFHo43YMD9yOoYFdi4iIKFQFZdc1yRgVzwJJRiczMxPDhg1DUlKST2ck5aBq6NChTEeGMG7H8MDtGFrcGXUiIqKwDnTq1aunahgPHz5c4n65XV5XBLm/Ms8XsbGxaik9YZB0w/DlwEgOrOSspLxe6i8pNHE7hgdux9Ai20lw4r2S3L8PXwNB9/8DeT0D/tDGbRkeuB1Di/tv79n2TZUKdGJiYtCjRw/Mnj1bdU5z9x2X23fffXeZr+nXr596/P777/fcJ2dz5X5v5eXlqUvJ6hARUc2Tv8PJycmBXo2gwf0SEVHw75sqXbomJWU33HADevbsqebOkfbSBQUFni5s119/PRo2bKjG2Yj77rsP559/Pl555RVceOGF+Oabb7By5Up88MEHXn9mgwYNsH//fiQmJvrU29xd+ibv4UvpGwUHbsfwwO0YWuRsmexI5O8wncb9ErlxW4YHbsfw3DdVOtC5+uqrceTIETzxxBOqoUDXrl0xY8YMT8OBffv2lZhRtn///mrunMceewyPPvqomjBUOq5VZg4deb9GjRqhquSLyy9v6ON2DA/cjqGDmZwzcb9EpXFbhgdux/DaN+mcEVB4LVG6/DJOnTrFL28I43YMD9yORPx/EE64LcMDt2N4Op16ISIiIiIiChMREehIB7d///vfJTq5UejhdgwP3I5E/H8QTrgtwwO3Y3iKiNI1IiIiIiKKLBGR0SEiIiIiosjCQIeIiIiIiMIOAx0iIiIiIgo7ERXoyKRuMocPhTZux/C0Z88etW3Xrl0b6FUhqlH8mxb6uA3DF/dNoS3sAp133nkHTZs2hdFoRJ8+fbB8+fJArxJV0pNPPqn+qBRf2rZtG+jVorNYsGABRo8erWYpLmunL31PZKLhjIwMxMXFYciQIdi+fXvA1peoJnHfFNq4Xwpd3DdFtrAKdL799ltMmDBBtQdcvXo1unTpguHDhyMnJyfQq0aV1KFDB2RlZXmWhQsXBnqV6CwKCgrU/zk5oCvLSy+9hDfffBOTJk3CsmXLUKtWLfX/02Qy1fi6EtUk7pvCA/dLoYn7psgWVoHOq6++iltuuQXjx49H+/bt1Zc2Pj4eH3/8cZnPl52ORPDr1q2r8XWlikVFRSE9Pd2z1KtXr9zncjsGh5EjR+KZZ57BpZdeesZjcsbs9ddfx2OPPYYxY8agc+fO+Oyzz3Do0KFyyz3sdjtuuukmddZ03759NfATEFUP7pvCA/dLoYn7psgWNoGOxWLBqlWrVMrRTa/Xq9tLliw544t9zz33qC/zn3/+qb7YFFwkbSxp5ubNm2PcuHFl/jHhdgwdu3fvRnZ2don/n8nJyaqEp/T/T2E2m3HllVeqmmjZto0bN67hNSbyD+6bwgf3S+GH+6bwF4UwcfToURVlp6Wllbhfbm/ZssVz22az4dprr8WaNWtU2rlhw4YBWFuqiPyBmTx5Mtq0aaPKAyZOnIhzzz0XGzZsQGJionoOt2NokR2JKOv/p/sxt/z8fFx44YVqhzJ37ly10yEKVdw3hQful8IT903hL2wCHW/94x//QGxsLJYuXVph2pkCm2Z2k7NhsoNp0qQJvvvuO9x8883qfm7H8DV27Fg0atQIc+bMUQNDiSIB/6YFN+6XiPum0BQ2pWvyR8VgMODw4cMl7pfbUkvrNnToUBw8eBC///57ANaSfJGSkoLWrVtjx44dnvu4HUOL+//g2f5/ilGjRqm69rLKBohCDfdN4Yn7pfDAfVP4C5tAJyYmBj169MDs2bM99zkcDnW7X79+nvsuvvhifPXVV/j73/+Ob775JkBrS5Uh6eKdO3eqgZ1u3I6hpVmzZmqnUfz/Z25urupwU/z/p7jjjjvwwgsvqG08f/78AKwtkf9w3xSeuF8KD9w3RQBnGPnmm2+csbGxzsmTJzs3bdrkvPXWW50pKSnO7Oxs9bj8uFOmTFHXv//+e6fRaFSXFFweeOAB57x585y7d+92Llq0yDlkyBBnvXr1nDk5OepxbsfglJeX51yzZo1aZBu9+uqr6vrevXvV4y+88IL6//jzzz87161b5xwzZoyzWbNmzqKiIvW4bG95nbxGvPbaa86EhATnn3/+GdCfi6iquG8KfdwvhS7umyJbWAU64q233nI2btzYGRMT4+zdu7dz6dKlnseK/yES3377rfpj9OOPPwZobaksV199tTMjI0Ntw4YNG6rbO3bs8DzO7Ric5s6dq7ZN6eWGG25QjzscDufjjz/uTEtLUwd9gwcPdm7dutXz+tI7E/HKK684ExMT1YEFUSjjvim0cb8Uurhvimw6+SfQWSUiIiIiIiJ/CpsxOkRERERERG4MdIiIiIiIKOww0CEiIiIiorDDQIeIiIiIiMIOAx0iIiIiIgo7DHSIiIiIiCjsMNAhIiIiIqKww0CHiIiIiIjCDgMdIj+58cYbcckllwR6NYiIiBTulyjSMdAhIiIiIqKww0CHqJJ++OEHdOrUCXFxcahbty6GDBmChx56CJ9++il+/vln6HQ6tcybN089f//+/bjqqquQkpKCOnXqYMyYMdizZ88ZZ9wmTpyI+vXrIykpCbfffjssFksAf0oiIgoV3C8RlS2qnPuJqAxZWVkYO3YsXnrpJVx66aXIy8vDn3/+ieuvvx779u1Dbm4uPvnkE/Vc2XlYrVYMHz4c/fr1U8+LiorCM888gxEjRmDdunWIiYlRz509ezaMRqPaCcnOZvz48Wpn9eyzzwb4JyYiomDG/RJR+RjoEFVyh2Kz2XDZZZehSZMm6j45iybkTJrZbEZ6errn+V988QUcDgc+/PBDdTZNyA5HzqLJzmPYsGHqPtmxfPzxx4iPj0eHDh3w1FNPqbNxTz/9NPR6Jl6JiKhs3C8RlY/fVKJK6NKlCwYPHqx2IldeeSX++9//4sSJE+U+/6+//sKOHTuQmJiIhIQEtcgZNZPJhJ07d5Z4X9mZuMmZtvz8fFVeQEREVB7ul4jKx4wOUSUYDAbMmjULixcvxsyZM/HWW2/h//7v/7Bs2bIyny87hR49euDLL7884zGpeyYiIqoK7peIysdAh6iSJNU/YMAAtTzxxBOqVGDKlCkqzW+320s8t3v37vj222+RmpqqBnNWdIatqKhIlRmIpUuXqrNsmZmZ1f7zEBFRaON+iahsLF0jqgQ5Q/bcc89h5cqVapDnTz/9hCNHjqBdu3Zo2rSpGsi5detWHD16VA34HDduHOrVq6c62sigz927d6sa6HvvvRcHDhzwvK90srn55puxadMmTJ8+Hf/+979x9913sw6aiIgqxP0SUfmY0SGqBDn7tWDBArz++uuqk42cNXvllVcwcuRI9OzZU+0s5FJKA+bOnYuBAweq5//zn/9UA0WlG07Dhg1VPXXxM2lyu1WrVjjvvPPUwFHpoPPkk08G9GclIqLgx/0SUfl0TqfTWcHjRFTNZL6CkydPYurUqYFeFSIiIu6XKGww/0hERERERGGHgQ4REREREYUdlq4REREREVHYYUaHiIiIiIjCDgMdIiIiIiIKOwx0iIiIiIgo7DDQISIiIiKisMNAh4iIiIiIwg4DHSIiIiIiCjsMdIiIiIiIKOww0CEiIiIiorDDQIeIiIiIiBBu/h9yMU5Zl4XCUgAAAABJRU5ErkJggg==",
      "text/plain": [
       "<Figure size 1000x500 with 2 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "plot_learning_curves(history, sample_step=500)  #横坐标是 steps"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:48:40.300725Z",
     "start_time": "2025-06-26T01:48:39.548524Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(87.81, 0.33535973552465437)"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 在测试集上评估模型\n",
    "test_accuracy = evaluate_model(model, test_loader, device, loss_fn)\n",
    "test_accuracy\n"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.12.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
