{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 查看FashionMNIST原始数据格式"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "['c:\\\\Program Files\\\\Python312\\\\python312.zip', 'c:\\\\Program Files\\\\Python312\\\\DLLs', 'c:\\\\Program Files\\\\Python312\\\\Lib', 'c:\\\\Program Files\\\\Python312', '', 'C:\\\\Users\\\\41507\\\\AppData\\\\Roaming\\\\Python\\\\Python312\\\\site-packages', 'C:\\\\Users\\\\41507\\\\AppData\\\\Roaming\\\\Python\\\\Python312\\\\site-packages\\\\win32', 'C:\\\\Users\\\\41507\\\\AppData\\\\Roaming\\\\Python\\\\Python312\\\\site-packages\\\\win32\\\\lib', 'C:\\\\Users\\\\41507\\\\AppData\\\\Roaming\\\\Python\\\\Python312\\\\site-packages\\\\Pythonwin', 'c:\\\\Program Files\\\\Python312\\\\Lib\\\\site-packages']\n"
     ]
    }
   ],
   "source": [
    "import sys\n",
    "print(sys.path)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:32.363026Z",
     "start_time": "2025-06-26T01:43:29.447990Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Downloading http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/train-images-idx3-ubyte.gz\n",
      "Downloading http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/train-images-idx3-ubyte.gz to ./data\\FashionMNIST\\raw\\train-images-idx3-ubyte.gz\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 26.4M/26.4M [00:02<00:00, 12.4MB/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Extracting ./data\\FashionMNIST\\raw\\train-images-idx3-ubyte.gz to ./data\\FashionMNIST\\raw\n",
      "\n",
      "Downloading http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/train-labels-idx1-ubyte.gz\n",
      "Downloading http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/train-labels-idx1-ubyte.gz to ./data\\FashionMNIST\\raw\\train-labels-idx1-ubyte.gz\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 29.5k/29.5k [00:00<00:00, 192kB/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Extracting ./data\\FashionMNIST\\raw\\train-labels-idx1-ubyte.gz to ./data\\FashionMNIST\\raw\n",
      "\n",
      "Downloading http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/t10k-images-idx3-ubyte.gz\n",
      "Downloading http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/t10k-images-idx3-ubyte.gz to ./data\\FashionMNIST\\raw\\t10k-images-idx3-ubyte.gz\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 4.42M/4.42M [00:01<00:00, 3.57MB/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Extracting ./data\\FashionMNIST\\raw\\t10k-images-idx3-ubyte.gz to ./data\\FashionMNIST\\raw\n",
      "\n",
      "Downloading http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/t10k-labels-idx1-ubyte.gz\n",
      "Downloading http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/t10k-labels-idx1-ubyte.gz to ./data\\FashionMNIST\\raw\\t10k-labels-idx1-ubyte.gz\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 5.15k/5.15k [00:00<?, ?B/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Extracting ./data\\FashionMNIST\\raw\\t10k-labels-idx1-ubyte.gz to ./data\\FashionMNIST\\raw\n",
      "\n",
      "(<PIL.Image.Image image mode=L size=28x28 at 0x25E56E40500>, 9)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n"
     ]
    },
    {
     "data": {
      "image/jpeg": "/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/wAALCAAcABwBAREA/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/9oACAEBAAA/APn+tbw1oNx4m8QWmkWx2yXD4LkZCADJJ+gFbviL4a63oc7COE3MW4hdn38duD976jNc9daDqllIsc9lKrMu4YGeMkdR7gj8KzcV7H8BtEvV16+1iWCeG1Wz8mOV02pIzupwCeuAp6Z98cZ90aIzLIlw0c0ZJ4KgjHoeOa+evjS9n/wnMcNxBPCYLKONFhA2FNzMpGenDcgd816V4K03wefC+m3NlpVhP+5QSXBiR5fMx825iMg5zwce3FdbOzTwgW90lu6uCm8eYrL02soIyCPQgggEdMGQ3cluiPNK0rJwrRQBNueuMkt+teNfGKxsdY8WWdxNqcNo66eieXMwVsb5DnH415Hp2rajpE5n02/urOUjBe3laMkehIPIrVm8eeLrhNknibVivoLtx/I1UPinxC3XXtUP1vJP8ay5JZJpGkldnduSzHJP41//2Q==",
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAAAAABXZoBIAAACS0lEQVR4AWKgA2BkYOD1ZGBgZAHZxcjIAKZBbBBm+quS8v3rj1N/GBiZGP8wMKNIMv91cnnCzuU65+X/vww8/76hSP5iMFVgZtpp2HXm8nUz02PHGUHGQTHjf9cugd//GE7f+cUo8ft0yDSEJCMDw/8TCgyMf34x/Ph3/vYfT0VphLH/GRgY3kt+Z2fl+cH5z8aSSWwHqmsZuJiZvn18p/CPkYnr7z9ZBiaofQwMjMwMPFI/frH++sr/j537K9sldhOE5H9mhnBJJg4Gbtlf7L//cQhvusaCkGT5xXDlBxsXl6rSD2Yunr9PoraeYAGZx8T4+x/DHwaGbV+/s/1/zczxm+H3P2a9jwxMDMz///z6+Y+BwW7ime9v//z78/XrXw6GbwxsX4NAYc3AICSlJhmk/oPpN+czVjbhX1zHeOz+fWR9qcnIYNkkKvCX+cMfrl+M36+HneEVVGC4x/v5GycPHxcj83GpP3+/MTB/Z2DgF0lwy3z24/49VeFfrLxsf+UBY0xqv8vDw87Ayv/4mSiTRACHIrexMdMvJjYGRlYLlpeP+X485mHje/eQ5/uPP+svKwj9+vD77y/Wf4xsaixP/z/mFvnw5jULOysHL9Mbza+P37O/+f3nN6fERwOWC+sTn937wcPGwcb88+//by/+/WX5wfPrw4fffxRfMjIweBWLv/7wl5mNhZnxPysrGysjA+NLBrZ/EpfCGJn+MTA4tYnxMzGz/GV8+f/pvy/MDP9/f2Paff0YJBAYGBg0RN/LPPx1Fx5HFDIAaCTYdiCc4RIAAAAASUVORK5CYII=",
      "text/plain": [
       "<PIL.Image.Image image mode=L size=28x28>"
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import torch\n",
    "import torchvision\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "from torchvision import datasets, transforms\n",
    "from deeplearning_func import EarlyStopping, ModelSaver,train_classification_model,plot_learning_curves\n",
    "from deeplearning_func import evaluate_classification_model as evaluate_model\n",
    "# 加载Fashion MNIST数据集，张量就是和numpy数组一样\n",
    "transform = transforms.Compose([])\n",
    "train_dataset = datasets.FashionMNIST(root='./data', train=True, download=True, transform=transform)\n",
    "test_dataset = datasets.FashionMNIST(root='./data', train=False, download=True, transform=transform)\n",
    "print(train_dataset[0])\n",
    "train_dataset[0][0]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 加载数据并处理为tensor"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:32.407799Z",
     "start_time": "2025-06-26T01:43:32.363026Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "训练集形状: (60000, 28, 28)\n",
      "训练集标签数量: 60000\n",
      "测试集形状: (10000, 28, 28)\n",
      "测试集标签数量: 10000\n"
     ]
    }
   ],
   "source": [
    "# 加载Fashion MNIST数据集，张量就是和numpy数组一样\n",
    "transform = transforms.Compose([\n",
    "    transforms.ToTensor(),\n",
    "    transforms.Normalize((0.286,), (0.353,))  \n",
    "])\n",
    "train_dataset = datasets.FashionMNIST(root='./data', train=True, download=True, transform=transform)\n",
    "test_dataset = datasets.FashionMNIST(root='./data', train=False, download=True, transform=transform)\n",
    "\n",
    "# 获取图像和标签\n",
    "# 注意：由于使用了transform，图像已经被转换为张量且标准化\n",
    "# 我们需要从dataset中提取原始图像用于显示\n",
    "train_images = train_dataset.data.numpy()\n",
    "train_labels = train_dataset.targets.numpy()\n",
    "test_images = test_dataset.data.numpy()\n",
    "test_labels = test_dataset.targets.numpy()\n",
    "\n",
    "# 定义类别名称\n",
    "class_names = ['T-shirt/top', '裤子', '套头衫', '连衣裙', '外套',\n",
    "               '凉鞋', '衬衫', '运动鞋', '包', '短靴']\n",
    "\n",
    "# 查看数据集基本信息\n",
    "print(f\"训练集形状: {train_images.shape}\")\n",
    "print(f\"训练集标签数量: {len(train_labels)}\")\n",
    "print(f\"测试集形状: {test_images.shape}\")\n",
    "print(f\"测试集标签数量: {len(test_labels)}\")\n",
    "\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 把数据集划分为训练集55000和验证集5000，并给DataLoader"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.144223Z",
     "start_time": "2025-06-26T01:43:33.135368Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "训练集大小: 55000\n",
      "验证集大小: 5000\n",
      "测试集大小: 10000\n",
      "批次大小: 64\n",
      "训练批次数: 860\n"
     ]
    }
   ],
   "source": [
    "# 从训练集中划分出验证集\n",
    "train_size = 55000\n",
    "val_size = 5000\n",
    "# 设置随机种子以确保每次得到相同的随机划分结果\n",
    "generator = torch.Generator().manual_seed(42)\n",
    "train_subset, val_subset = torch.utils.data.random_split(\n",
    "    train_dataset, \n",
    "    [train_size, val_size],\n",
    "    generator=generator #设置随机种子，确保每次得到相同的随机划分结果\n",
    ")\n",
    "\n",
    "# 创建数据加载器\n",
    "batch_size = 64\n",
    "train_loader = torch.utils.data.DataLoader(\n",
    "    train_subset,\n",
    "    batch_size=batch_size,\n",
    "    shuffle=True #打乱数据集，每次迭代时，数据集的顺序都会被打乱\n",
    ")\n",
    "\n",
    "val_loader = torch.utils.data.DataLoader(\n",
    "    val_subset,\n",
    "    batch_size=batch_size,\n",
    "    shuffle=False\n",
    ")\n",
    "\n",
    "test_loader = torch.utils.data.DataLoader(\n",
    "    test_dataset,\n",
    "    batch_size=batch_size,\n",
    "    shuffle=False\n",
    ")\n",
    "\n",
    "# 打印数据集大小信息\n",
    "print(f\"训练集大小: {len(train_subset)}\")\n",
    "print(f\"验证集大小: {len(val_subset)}\")\n",
    "print(f\"测试集大小: {len(test_dataset)}\")\n",
    "print(f\"批次大小: {batch_size}\")\n",
    "print(f\"训练批次数: {len(train_loader)}\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.148120Z",
     "start_time": "2025-06-26T01:43:33.145230Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "55040"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "64*860"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 搭建模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "torch.Size([20, 100])\n"
     ]
    }
   ],
   "source": [
    "#理解每个接口的方法，单独写例子\n",
    "import torch.nn as nn\n",
    "m=nn.BatchNorm1d(100)\n",
    "x=torch.randn(20,100)\n",
    "print(m(x).shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.152657Z",
     "start_time": "2025-06-26T01:43:33.148120Z"
    }
   },
   "outputs": [],
   "source": [
    "import torch.nn as nn\n",
    "import torch.nn.functional as F\n",
    "\n",
    "class NeuralNetwork(nn.Module):\n",
    "    def __init__(self):\n",
    "        super().__init__()\n",
    "        \n",
    "        # 第一组卷积层 - 32个卷积核\n",
    "        self.conv1 = nn.Conv2d(1, 32, kernel_size=3, padding=1) # 输入通道数，输出通道数代表的是卷积核的个数\n",
    "        self.conv2 = nn.Conv2d(32, 32, kernel_size=3, padding=1)\n",
    "        self.pool = nn.MaxPool2d(kernel_size=2, stride=2)\n",
    "        \n",
    "        # 第二组卷积层 - 64个卷积核\n",
    "        self.conv3 = nn.Conv2d(32, 64, kernel_size=3, padding=1)\n",
    "        self.conv4 = nn.Conv2d(64, 64, kernel_size=3, padding=1)\n",
    "\n",
    "        \n",
    "        # 第三组卷积层 - 128个卷积核\n",
    "        self.conv5 = nn.Conv2d(64, 128, kernel_size=3, padding=1)\n",
    "        self.conv6 = nn.Conv2d(128, 128, kernel_size=3, padding=1)\n",
    "\n",
    "        \n",
    "        # 计算全连接层的输入特征数\n",
    "        # 经过3次池化，图像尺寸从28x28变为3x3x128\n",
    "        self.fc1 = nn.Linear(128 * 3 * 3, 256)\n",
    "        self.fc2 = nn.Linear(256, 10)\n",
    "        \n",
    "        # 初始化权重\n",
    "        self.init_weights()\n",
    "        \n",
    "    def init_weights(self):\n",
    "        \"\"\"使用 xavier 均匀分布来初始化卷积层和全连接层的权重\"\"\"\n",
    "        for m in self.modules():\n",
    "            if isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear):\n",
    "                nn.init.xavier_uniform_(m.weight)\n",
    "                if m.bias is not None:\n",
    "                    nn.init.zeros_(m.bias)\n",
    "    \n",
    "    def forward(self, x):\n",
    "        # x.shape [batch size, 1, 28, 28]\n",
    "        \n",
    "        # 第一组卷积层\n",
    "        x = F.relu(self.conv1(x))\n",
    "        print(f\"conv1后的形状: {x.shape}\")\n",
    "        x = F.relu(self.conv2(x))\n",
    "        print(f\"conv2后的形状: {x.shape}\")\n",
    "        x = self.pool(x)\n",
    "        print(f\"pool1后的形状: {x.shape}\")\n",
    "        \n",
    "        # 第二组卷积层\n",
    "        x = F.relu(self.conv3(x))\n",
    "        print(f\"conv3后的形状: {x.shape}\")\n",
    "        x = F.relu(self.conv4(x))\n",
    "        print(f\"conv4后的形状: {x.shape}\")\n",
    "        x = self.pool(x)\n",
    "        print(f\"pool2后的形状: {x.shape}\")\n",
    "        \n",
    "        # 第三组卷积层\n",
    "        x = F.relu(self.conv5(x))\n",
    "        print(f\"conv5后的形状: {x.shape}\")\n",
    "        x = F.relu(self.conv6(x))\n",
    "        print(f\"conv6后的形状: {x.shape}\")\n",
    "        x = self.pool(x)\n",
    "        print(f\"pool3后的形状: {x.shape}\")\n",
    "        \n",
    "        # 展平\n",
    "        x = x.view(x.size(0), -1)\n",
    "        print(f\"展平后的形状: {x.shape}\")\n",
    "        \n",
    "        # 全连接层\n",
    "        x = F.relu(self.fc1(x))\n",
    "        print(f\"fc1后的形状: {x.shape}\")\n",
    "        x = self.fc2(x)\n",
    "        print(f\"fc2后的形状: {x.shape}\")\n",
    "        \n",
    "        return x\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.185031Z",
     "start_time": "2025-06-26T01:43:33.152657Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "批次图像形状: torch.Size([64, 1, 28, 28])\n",
      "批次标签形状: torch.Size([64])\n",
      "----------------------------------------------------------------------------------------------------\n",
      "conv1后的形状: torch.Size([64, 32, 28, 28])\n",
      "conv2后的形状: torch.Size([64, 32, 28, 28])\n",
      "pool1后的形状: torch.Size([64, 32, 14, 14])\n",
      "conv3后的形状: torch.Size([64, 64, 14, 14])\n",
      "conv4后的形状: torch.Size([64, 64, 14, 14])\n",
      "pool2后的形状: torch.Size([64, 64, 7, 7])\n",
      "conv5后的形状: torch.Size([64, 128, 7, 7])\n",
      "conv6后的形状: torch.Size([64, 128, 7, 7])\n",
      "pool3后的形状: torch.Size([64, 128, 3, 3])\n",
      "展平后的形状: torch.Size([64, 1152])\n",
      "fc1后的形状: torch.Size([64, 256])\n",
      "fc2后的形状: torch.Size([64, 10])\n"
     ]
    }
   ],
   "source": [
    "# 实例化模型\n",
    "model = NeuralNetwork()\n",
    "\n",
    "# 从train_loader获取第一个批次的数据\n",
    "dataiter = iter(train_loader)\n",
    "images, labels = next(dataiter)\n",
    "\n",
    "# 查看批次数据的形状\n",
    "print(\"批次图像形状:\", images.shape)\n",
    "print(\"批次标签形状:\", labels.shape)\n",
    "\n",
    "\n",
    "print('-'*100)\n",
    "# 进行前向传播\n",
    "with torch.no_grad():  # 不需要计算梯度\n",
    "    outputs = model(images)\n",
    "    \n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "78400"
      ]
     },
     "execution_count": 12,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "next(model.parameters()).numel()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.203053Z",
     "start_time": "2025-06-26T01:43:33.199532Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "需要求梯度的参数总量: 264910\n",
      "模型总参数量: 264910\n",
      "\n",
      "各层参数量明细:\n",
      "linear_relu_stack.0.weight: 78400 参数\n",
      "linear_relu_stack.0.bias: 100 参数\n",
      "linear_relu_stack.Linear_1.weight: 10000 参数\n",
      "linear_relu_stack.Linear_1.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_1.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_1.bias: 100 参数\n",
      "linear_relu_stack.Linear_2.weight: 10000 参数\n",
      "linear_relu_stack.Linear_2.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_2.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_2.bias: 100 参数\n",
      "linear_relu_stack.Linear_3.weight: 10000 参数\n",
      "linear_relu_stack.Linear_3.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_3.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_3.bias: 100 参数\n",
      "linear_relu_stack.Linear_4.weight: 10000 参数\n",
      "linear_relu_stack.Linear_4.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_4.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_4.bias: 100 参数\n",
      "linear_relu_stack.Linear_5.weight: 10000 参数\n",
      "linear_relu_stack.Linear_5.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_5.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_5.bias: 100 参数\n",
      "linear_relu_stack.Linear_6.weight: 10000 参数\n",
      "linear_relu_stack.Linear_6.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_6.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_6.bias: 100 参数\n",
      "linear_relu_stack.Linear_7.weight: 10000 参数\n",
      "linear_relu_stack.Linear_7.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_7.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_7.bias: 100 参数\n",
      "linear_relu_stack.Linear_8.weight: 10000 参数\n",
      "linear_relu_stack.Linear_8.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_8.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_8.bias: 100 参数\n",
      "linear_relu_stack.Linear_9.weight: 10000 参数\n",
      "linear_relu_stack.Linear_9.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_9.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_9.bias: 100 参数\n",
      "linear_relu_stack.Linear_10.weight: 10000 参数\n",
      "linear_relu_stack.Linear_10.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_10.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_10.bias: 100 参数\n",
      "linear_relu_stack.Linear_11.weight: 10000 参数\n",
      "linear_relu_stack.Linear_11.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_11.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_11.bias: 100 参数\n",
      "linear_relu_stack.Linear_12.weight: 10000 参数\n",
      "linear_relu_stack.Linear_12.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_12.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_12.bias: 100 参数\n",
      "linear_relu_stack.Linear_13.weight: 10000 参数\n",
      "linear_relu_stack.Linear_13.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_13.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_13.bias: 100 参数\n",
      "linear_relu_stack.Linear_14.weight: 10000 参数\n",
      "linear_relu_stack.Linear_14.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_14.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_14.bias: 100 参数\n",
      "linear_relu_stack.Linear_15.weight: 10000 参数\n",
      "linear_relu_stack.Linear_15.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_15.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_15.bias: 100 参数\n",
      "linear_relu_stack.Linear_16.weight: 10000 参数\n",
      "linear_relu_stack.Linear_16.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_16.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_16.bias: 100 参数\n",
      "linear_relu_stack.Linear_17.weight: 10000 参数\n",
      "linear_relu_stack.Linear_17.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_17.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_17.bias: 100 参数\n",
      "linear_relu_stack.Linear_18.weight: 10000 参数\n",
      "linear_relu_stack.Linear_18.bias: 100 参数\n",
      "linear_relu_stack.BatchNorm_18.weight: 100 参数\n",
      "linear_relu_stack.BatchNorm_18.bias: 100 参数\n",
      "linear_relu_stack.Output Layer.weight: 1000 参数\n",
      "linear_relu_stack.Output Layer.bias: 10 参数\n"
     ]
    }
   ],
   "source": [
    "# 计算模型的总参数量\n",
    "# 统计需要求梯度的参数总量\n",
    "total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)\n",
    "print(f\"需要求梯度的参数总量: {total_params}\")\n",
    "\n",
    "# 统计所有参数总量\n",
    "all_params = sum(p.numel() for p in model.parameters())\n",
    "print(f\"模型总参数量: {all_params}\")\n",
    "\n",
    "# 查看每层参数量明细\n",
    "print(\"\\n各层参数量明细:\")\n",
    "for name, param in model.named_parameters():\n",
    "    print(f\"{name}: {param.numel()} 参数\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.217395Z",
     "start_time": "2025-06-26T01:43:33.203561Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "OrderedDict([('linear_relu_stack.0.weight',\n",
       "              tensor([[-0.0254, -0.0611,  0.0578,  ..., -0.0313, -0.0333,  0.0806],\n",
       "                      [-0.0494,  0.0288, -0.0514,  ..., -0.0695, -0.0438, -0.0584],\n",
       "                      [ 0.0089,  0.0470, -0.0464,  ...,  0.0817,  0.0683,  0.0521],\n",
       "                      ...,\n",
       "                      [-0.0095,  0.0069, -0.0324,  ...,  0.0717, -0.0555, -0.0069],\n",
       "                      [ 0.0558, -0.0322, -0.0044,  ...,  0.0311,  0.0751,  0.0197],\n",
       "                      [-0.0319,  0.0327, -0.0644,  ...,  0.0459, -0.0054, -0.0516]])),\n",
       "             ('linear_relu_stack.0.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.Linear_1.weight',\n",
       "              tensor([[ 0.0554, -0.0808, -0.0937,  ..., -0.0881,  0.0850,  0.0209],\n",
       "                      [-0.1427,  0.0041,  0.0151,  ...,  0.0637,  0.0390,  0.0785],\n",
       "                      [ 0.1073,  0.0047,  0.0090,  ...,  0.0469, -0.0451,  0.0295],\n",
       "                      ...,\n",
       "                      [-0.0126,  0.0142,  0.0890,  ...,  0.1251,  0.1359,  0.0540],\n",
       "                      [ 0.1162, -0.0323, -0.1073,  ..., -0.0933, -0.0248, -0.0449],\n",
       "                      [-0.1095, -0.0203, -0.1056,  ...,  0.1502,  0.1230,  0.1496]])),\n",
       "             ('linear_relu_stack.Linear_1.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_1.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_1.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_1.running_mean',\n",
       "              tensor([ 0.0289, -0.0412,  0.0186, -0.0031,  0.0754, -0.0030, -0.1043, -0.0252,\n",
       "                      -0.0541, -0.0326,  0.0049, -0.0344, -0.1260, -0.0711, -0.0377,  0.0329,\n",
       "                       0.1061, -0.0189,  0.0921, -0.0610,  0.0096, -0.0064,  0.0234,  0.0344,\n",
       "                       0.1196, -0.0036, -0.0241, -0.0497,  0.0841,  0.1127, -0.0022, -0.0308,\n",
       "                       0.0440,  0.1045,  0.0024, -0.0390, -0.0310,  0.0242, -0.0474,  0.0062,\n",
       "                       0.0719, -0.0058,  0.0340, -0.0144,  0.0424,  0.0596,  0.1065, -0.0039,\n",
       "                       0.0682,  0.0132,  0.0158, -0.0260, -0.0567,  0.0349, -0.0740,  0.0389,\n",
       "                      -0.0944,  0.0160, -0.0496, -0.0974,  0.0219,  0.1115,  0.0125,  0.0620,\n",
       "                      -0.0370,  0.0134,  0.0101, -0.0729, -0.0061,  0.0805, -0.0249, -0.0234,\n",
       "                       0.0054,  0.0234, -0.0257, -0.0852, -0.0390,  0.0007,  0.0532,  0.0578,\n",
       "                       0.0503, -0.0205, -0.0494, -0.0148,  0.0049,  0.0013, -0.0019, -0.0379,\n",
       "                      -0.0134, -0.0541,  0.0285,  0.0165,  0.0169, -0.1148, -0.0323, -0.0080,\n",
       "                       0.0203,  0.1324,  0.0107,  0.1173])),\n",
       "             ('linear_relu_stack.BatchNorm_1.running_var',\n",
       "              tensor([0.9533, 0.9397, 0.9211, 0.9328, 0.9274, 0.9445, 0.9392, 1.0147, 0.9285,\n",
       "                      0.9342, 0.9609, 0.9354, 0.9367, 0.9352, 0.9224, 0.9175, 0.9300, 0.9211,\n",
       "                      0.9590, 0.9435, 0.9472, 0.9572, 0.9228, 0.9693, 0.9314, 0.9136, 0.9203,\n",
       "                      0.9285, 0.9616, 0.9503, 0.9471, 0.9635, 0.9390, 0.9186, 0.9828, 0.9398,\n",
       "                      0.9365, 0.9185, 0.9279, 0.9210, 0.9291, 0.9353, 0.9183, 0.9385, 0.9259,\n",
       "                      0.9281, 0.9386, 0.9371, 0.9598, 0.9252, 0.9250, 0.9498, 0.9188, 0.9319,\n",
       "                      0.9264, 0.9395, 0.9216, 0.9354, 0.9511, 0.9349, 0.9289, 0.9682, 0.9266,\n",
       "                      0.9303, 0.9360, 0.9942, 0.9500, 0.9450, 0.9330, 0.9601, 0.9234, 0.9768,\n",
       "                      0.9280, 0.9381, 0.9375, 0.9405, 0.9290, 0.9458, 0.9289, 0.9666, 0.9494,\n",
       "                      0.9804, 0.9460, 0.9380, 0.9444, 0.9185, 0.9319, 0.9279, 0.9402, 0.9255,\n",
       "                      0.9727, 0.9316, 0.9381, 0.9339, 0.9252, 0.9410, 0.9425, 0.9501, 0.9385,\n",
       "                      0.9312])),\n",
       "             ('linear_relu_stack.BatchNorm_1.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_2.weight',\n",
       "              tensor([[-0.0968,  0.0040,  0.0049,  ...,  0.0940, -0.0220,  0.0103],\n",
       "                      [-0.0524,  0.0751,  0.0729,  ..., -0.0705, -0.0419, -0.1500],\n",
       "                      [-0.0698, -0.0053, -0.0791,  ..., -0.1550,  0.1028,  0.0639],\n",
       "                      ...,\n",
       "                      [-0.0335, -0.1335,  0.0018,  ..., -0.1166, -0.1719, -0.1340],\n",
       "                      [-0.1227, -0.1053, -0.0300,  ...,  0.0048,  0.1266,  0.1687],\n",
       "                      [ 0.0651,  0.0541,  0.0348,  ...,  0.1034,  0.0229, -0.1625]])),\n",
       "             ('linear_relu_stack.Linear_2.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_2.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_2.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_2.running_mean',\n",
       "              tensor([-0.0218, -0.0468, -0.0747, -0.0226,  0.0182, -0.0362, -0.0020,  0.0121,\n",
       "                      -0.0120,  0.0477,  0.0137, -0.0014, -0.0546, -0.0415, -0.0320,  0.0149,\n",
       "                       0.0439,  0.0289,  0.0203,  0.0310,  0.0061,  0.0257, -0.0778, -0.0369,\n",
       "                       0.0466,  0.0017, -0.0479, -0.0225,  0.0021, -0.0422, -0.0228,  0.0075,\n",
       "                       0.0948,  0.0677, -0.0398, -0.0511, -0.0590, -0.0476, -0.0298, -0.0373,\n",
       "                       0.0102,  0.0120, -0.0213,  0.0025,  0.0564, -0.0028, -0.0241,  0.0416,\n",
       "                      -0.0341,  0.0793,  0.0168,  0.0571,  0.0150,  0.0106, -0.0352,  0.0546,\n",
       "                      -0.0022,  0.0065, -0.0152,  0.0159, -0.0406,  0.0704,  0.0179,  0.0485,\n",
       "                       0.0407,  0.0013,  0.0348,  0.0383, -0.0692, -0.0464,  0.0157, -0.0916,\n",
       "                      -0.0119,  0.0581,  0.0128, -0.0450, -0.0043,  0.0629, -0.0186, -0.0072,\n",
       "                      -0.0766,  0.0015,  0.0450, -0.0035,  0.0371,  0.0033, -0.0081,  0.0231,\n",
       "                      -0.0266,  0.0306,  0.0528,  0.0547, -0.0123,  0.0592,  0.0216,  0.0262,\n",
       "                      -0.0460,  0.0090, -0.0775,  0.0399])),\n",
       "             ('linear_relu_stack.BatchNorm_2.running_var',\n",
       "              tensor([0.9384, 0.9335, 0.9392, 0.9262, 0.9682, 0.9242, 0.9363, 0.9250, 0.9307,\n",
       "                      0.9298, 0.9420, 0.9348, 0.9374, 0.9335, 0.9521, 0.9278, 0.9220, 0.9171,\n",
       "                      0.9292, 0.9411, 0.9333, 0.9350, 0.9319, 0.9346, 0.9297, 0.9399, 0.9362,\n",
       "                      0.9286, 0.9361, 0.9376, 0.9331, 0.9281, 0.9416, 0.9314, 0.9364, 0.9365,\n",
       "                      0.9308, 0.9286, 0.9207, 0.9379, 0.9396, 0.9350, 0.9295, 0.9624, 0.9369,\n",
       "                      0.9321, 0.9378, 0.9354, 0.9313, 0.9586, 0.9347, 0.9312, 0.9262, 0.9180,\n",
       "                      0.9475, 0.9352, 0.9371, 0.9330, 0.9336, 0.9265, 0.9259, 0.9274, 0.9194,\n",
       "                      0.9395, 0.9502, 0.9622, 0.9540, 0.9221, 0.9539, 0.9342, 0.9269, 0.9571,\n",
       "                      0.9288, 0.9412, 0.9201, 0.9226, 0.9513, 0.9270, 0.9454, 0.9297, 0.9328,\n",
       "                      0.9251, 0.9217, 0.9454, 0.9341, 0.9504, 0.9197, 0.9266, 0.9173, 0.9382,\n",
       "                      0.9205, 0.9255, 0.9349, 0.9414, 0.9229, 0.9382, 0.9323, 0.9321, 0.9250,\n",
       "                      0.9492])),\n",
       "             ('linear_relu_stack.BatchNorm_2.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_3.weight',\n",
       "              tensor([[ 0.1688,  0.1581,  0.1731,  ..., -0.0188, -0.1316, -0.1596],\n",
       "                      [-0.1219, -0.1453,  0.1348,  ...,  0.1539,  0.1680,  0.0658],\n",
       "                      [-0.0066, -0.0574,  0.0675,  ..., -0.0392, -0.0953, -0.1172],\n",
       "                      ...,\n",
       "                      [ 0.0065, -0.0287, -0.0650,  ...,  0.1302,  0.1647,  0.0847],\n",
       "                      [ 0.1377,  0.0009, -0.0530,  ..., -0.0683,  0.0252,  0.0624],\n",
       "                      [ 0.0987, -0.0588,  0.0133,  ..., -0.0255,  0.1653,  0.1128]])),\n",
       "             ('linear_relu_stack.Linear_3.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_3.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_3.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_3.running_mean',\n",
       "              tensor([-6.0536e-09,  1.0710e-09, -1.8626e-09, -4.5635e-09, -1.2107e-09,\n",
       "                       4.0978e-09,  4.5169e-09,  6.5193e-09, -5.5879e-10,  6.9849e-10,\n",
       "                      -1.6764e-09, -7.8231e-09, -2.7940e-10, -1.2340e-09,  4.1793e-09,\n",
       "                      -1.9558e-09,  2.0489e-09, -8.7544e-09,  6.4843e-09, -3.9116e-09,\n",
       "                       4.7265e-09, -1.1176e-09, -3.3062e-09, -2.8871e-09, -2.2352e-09,\n",
       "                       4.2142e-09,  1.1758e-09,  4.8662e-09, -6.0536e-10, -5.6811e-09,\n",
       "                      -2.0489e-09, -3.4925e-10,  1.7229e-09, -1.2107e-09,  3.7253e-10,\n",
       "                       0.0000e+00, -1.7695e-09, -5.2154e-09,  3.7719e-09,  1.1642e-10,\n",
       "                       4.2375e-09,  2.7474e-09,  4.7497e-09,  1.2456e-09,  2.3283e-09,\n",
       "                       3.0268e-09, -2.1013e-09,  2.8638e-09,  4.7730e-10, -4.1211e-09,\n",
       "                       1.3970e-09,  6.8452e-09, -6.4261e-09, -7.1945e-09, -2.0489e-09,\n",
       "                      -2.9802e-09,  2.5146e-09,  4.5635e-09,  3.4925e-10,  4.3074e-09,\n",
       "                      -1.4203e-09,  1.5832e-09,  5.0990e-09,  3.5623e-09, -1.1176e-09,\n",
       "                      -2.2352e-09,  1.3271e-09,  7.1246e-09, -5.5879e-10,  1.2107e-09,\n",
       "                       5.4482e-09, -3.5390e-09,  3.5390e-09, -7.4506e-10, -7.3109e-09,\n",
       "                      -7.3109e-09,  3.4226e-09,  4.7963e-09, -7.2178e-10,  2.2352e-09,\n",
       "                      -3.0734e-09, -6.3330e-09,  1.1642e-09,  1.8626e-09,  2.4680e-09,\n",
       "                      -3.9116e-09, -3.9116e-09, -4.6100e-09,  9.3132e-10,  1.0012e-09,\n",
       "                      -3.3993e-09,  1.1176e-09, -3.4925e-10,  4.9826e-09,  6.1700e-09,\n",
       "                       3.0268e-09,  6.5193e-10,  1.4668e-09,  4.6566e-10,  4.0978e-09])),\n",
       "             ('linear_relu_stack.BatchNorm_3.running_var',\n",
       "              tensor([0.9811, 0.9805, 0.9694, 0.9844, 0.9839, 1.0431, 0.9766, 1.0470, 1.0235,\n",
       "                      1.0909, 1.0167, 1.0018, 0.9938, 1.0651, 0.9492, 1.0304, 0.9752, 1.0031,\n",
       "                      1.0017, 1.0392, 0.9982, 0.9836, 1.0153, 1.0235, 0.9539, 1.0637, 1.0120,\n",
       "                      1.0039, 0.9802, 1.1540, 0.9974, 1.0942, 1.0949, 0.9610, 0.9524, 1.0440,\n",
       "                      0.9883, 0.9733, 1.0373, 0.9843, 1.0423, 1.0134, 0.9733, 0.9677, 0.9917,\n",
       "                      1.0054, 0.9485, 1.0074, 0.9734, 0.9748, 0.9469, 0.9918, 1.0564, 0.9684,\n",
       "                      0.9916, 0.9827, 0.9890, 1.0193, 0.9742, 0.9966, 1.0284, 1.0437, 0.9944,\n",
       "                      1.0086, 0.9644, 0.9799, 0.9729, 0.9890, 0.9726, 0.9921, 0.9931, 1.0188,\n",
       "                      1.0109, 0.9772, 1.1564, 0.9773, 1.0276, 0.9905, 0.9975, 0.9898, 0.9694,\n",
       "                      1.0096, 0.9856, 1.0461, 1.0556, 0.9931, 0.9819, 0.9644, 1.0102, 1.0089,\n",
       "                      0.9948, 1.0038, 1.0082, 1.0093, 1.1292, 0.9825, 0.9812, 0.9524, 1.0087,\n",
       "                      1.0038])),\n",
       "             ('linear_relu_stack.BatchNorm_3.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_4.weight',\n",
       "              tensor([[ 0.1619, -0.0540, -0.1401,  ...,  0.1567, -0.0241,  0.0513],\n",
       "                      [ 0.0367, -0.0060,  0.0973,  ...,  0.1686,  0.1548,  0.1306],\n",
       "                      [ 0.0753,  0.0263, -0.0433,  ..., -0.0233,  0.1672,  0.1175],\n",
       "                      ...,\n",
       "                      [-0.0385,  0.0835,  0.1091,  ...,  0.1297, -0.0921, -0.0933],\n",
       "                      [-0.1679, -0.1543,  0.0773,  ..., -0.1572,  0.0840,  0.0417],\n",
       "                      [-0.0309,  0.0003,  0.0580,  ...,  0.1636,  0.0327,  0.0242]])),\n",
       "             ('linear_relu_stack.Linear_4.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_4.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_4.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_4.running_mean',\n",
       "              tensor([ 3.0152e-09, -3.0734e-09,  9.3132e-11, -2.6484e-09,  8.3819e-10,\n",
       "                       3.2131e-09, -3.2596e-10, -2.8871e-09, -1.7695e-09, -1.7229e-09,\n",
       "                       2.5611e-09,  2.9337e-09, -4.6566e-10,  3.9057e-09, -4.2841e-09,\n",
       "                       2.8871e-09,  4.9593e-09,  8.3819e-10, -2.6077e-09, -2.3283e-10,\n",
       "                      -1.5832e-09, -6.7754e-09, -3.0501e-09, -6.9849e-10,  2.1886e-09,\n",
       "                      -2.6543e-09, -1.8626e-10, -6.2864e-10,  4.6566e-11, -1.4901e-09,\n",
       "                       6.5193e-10, -4.7963e-09, -2.3283e-09, -8.1491e-11,  1.4901e-09,\n",
       "                      -2.5611e-09,  1.4436e-09,  4.5402e-09,  2.4680e-09, -1.8161e-09,\n",
       "                      -2.6543e-09,  3.7719e-09,  1.0245e-09,  1.9558e-09,  1.3388e-09,\n",
       "                       1.8859e-09,  9.3132e-11,  4.8429e-09,  1.7229e-09,  1.9092e-09,\n",
       "                       1.1642e-09, -4.1910e-10,  2.3283e-10, -5.1223e-10, -3.4925e-09,\n",
       "                      -1.2107e-09,  3.3295e-09,  9.3132e-11,  1.1176e-09, -1.8626e-09,\n",
       "                       4.6566e-10,  1.1642e-09, -2.0955e-10,  4.0047e-09, -2.0489e-09,\n",
       "                      -9.3132e-11,  3.6671e-09,  1.6764e-09, -4.7963e-09, -4.6100e-09,\n",
       "                      -9.3132e-10, -8.3819e-10, -3.3295e-09, -2.0489e-09,  6.9849e-10,\n",
       "                      -1.3039e-09, -6.9849e-11, -7.4506e-10,  8.3819e-10, -1.3039e-09,\n",
       "                       1.6298e-09,  1.4901e-09, -1.5832e-09,  2.2352e-09,  2.2817e-09,\n",
       "                       3.7253e-10,  1.4901e-09,  1.0245e-09,  3.0268e-09,  2.0489e-09,\n",
       "                       6.5193e-10,  1.9092e-09, -3.2596e-10, -2.9802e-09, -2.7474e-09,\n",
       "                      -6.9849e-11, -4.1910e-10,  2.7940e-09, -6.3796e-09,  3.2596e-10])),\n",
       "             ('linear_relu_stack.BatchNorm_4.running_var',\n",
       "              tensor([0.9679, 1.0089, 1.0154, 1.0008, 1.0053, 0.9961, 1.0084, 1.1382, 0.9525,\n",
       "                      0.9906, 0.9551, 0.9999, 1.0482, 0.9922, 1.0127, 0.9638, 0.9763, 0.9790,\n",
       "                      1.0223, 0.9903, 0.9791, 0.9466, 0.9815, 0.9881, 0.9696, 0.9501, 1.0012,\n",
       "                      0.9639, 0.9673, 0.9537, 1.0196, 0.9889, 1.0772, 0.9889, 1.0638, 0.9722,\n",
       "                      0.9712, 0.9759, 1.0319, 0.9578, 1.0404, 1.0311, 0.9855, 1.0148, 1.0047,\n",
       "                      0.9855, 1.0272, 0.9987, 0.9840, 1.0165, 0.9635, 1.0459, 1.0023, 1.0191,\n",
       "                      0.9542, 0.9617, 1.0439, 0.9857, 0.9644, 0.9804, 1.0391, 1.0423, 1.0096,\n",
       "                      0.9990, 0.9633, 0.9748, 0.9759, 0.9795, 0.9905, 1.0247, 0.9880, 0.9768,\n",
       "                      0.9393, 1.0598, 0.9715, 0.9798, 1.0250, 1.0151, 0.9780, 1.0419, 0.9793,\n",
       "                      0.9989, 0.9830, 0.9958, 0.9408, 0.9920, 1.0165, 1.0011, 0.9733, 0.9499,\n",
       "                      1.0636, 1.0177, 1.0852, 0.9880, 0.9681, 0.9727, 0.9916, 1.0257, 1.0533,\n",
       "                      1.0515])),\n",
       "             ('linear_relu_stack.BatchNorm_4.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_5.weight',\n",
       "              tensor([[ 0.0280,  0.0539,  0.1707,  ..., -0.0118, -0.1618,  0.0416],\n",
       "                      [-0.1003,  0.1207, -0.0114,  ..., -0.0966,  0.1355,  0.1548],\n",
       "                      [ 0.0762,  0.0926, -0.0561,  ..., -0.1533, -0.0030, -0.0854],\n",
       "                      ...,\n",
       "                      [-0.0201,  0.1521,  0.1396,  ..., -0.0494,  0.1021,  0.0830],\n",
       "                      [ 0.0120, -0.1212,  0.0376,  ...,  0.1035, -0.1143,  0.1596],\n",
       "                      [-0.0803, -0.0732, -0.1648,  ..., -0.1240, -0.1152,  0.0490]])),\n",
       "             ('linear_relu_stack.Linear_5.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_5.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_5.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_5.running_mean',\n",
       "              tensor([ 1.1234e-09, -1.0245e-09, -1.6298e-09, -3.5390e-09,  3.8650e-09,\n",
       "                      -1.4901e-09, -9.3132e-10, -2.8638e-09,  4.3772e-09,  2.7940e-10,\n",
       "                      -1.3039e-09, -6.5193e-10, -1.3970e-09,  1.1642e-09,  2.3283e-09,\n",
       "                       9.3132e-10,  2.4214e-09,  1.8626e-10,  1.0012e-09,  4.0047e-09,\n",
       "                      -4.0978e-09, -2.4622e-09,  3.1665e-09,  2.1886e-09, -2.1420e-09,\n",
       "                      -2.9686e-09,  1.6531e-09, -7.9162e-10,  3.4925e-09,  3.9116e-09,\n",
       "                       1.3970e-09, -2.2352e-09,  1.9558e-09, -1.2573e-09,  1.5134e-09,\n",
       "                       7.4506e-10, -4.5635e-09,  9.3132e-10, -2.2468e-09, -2.5146e-09,\n",
       "                      -6.0536e-10, -5.4250e-09, -2.1886e-09, -7.4506e-10,  2.0955e-10,\n",
       "                      -1.6764e-09, -3.0734e-09, -2.2352e-09, -3.2596e-09, -1.8161e-09,\n",
       "                      -6.5193e-10,  1.7695e-09,  3.6787e-09,  5.1223e-10, -5.5879e-10,\n",
       "                      -1.8626e-10,  1.8161e-09,  2.3283e-10, -4.4005e-09,  4.1910e-10,\n",
       "                       5.1223e-10,  1.5367e-09, -1.3970e-10, -1.4436e-09, -2.5611e-09,\n",
       "                       2.8871e-09, -2.7940e-09, -2.3283e-10, -3.5390e-09,  1.2107e-09,\n",
       "                       1.0245e-09,  2.5611e-09, -2.5379e-09, -1.0245e-09, -2.3283e-10,\n",
       "                       2.4447e-09, -4.6566e-11,  1.3504e-09,  6.0303e-09,  4.4936e-09,\n",
       "                       6.5193e-10, -1.0594e-09,  2.1420e-09,  4.5868e-09, -1.4668e-09,\n",
       "                      -3.5856e-09,  1.6764e-09, -3.1665e-09, -2.7940e-10,  1.3504e-09,\n",
       "                       2.9569e-09, -1.3970e-09,  3.2596e-09,  4.2841e-09, -4.2375e-09,\n",
       "                      -6.4727e-09, -4.4238e-10,  2.2352e-09,  3.8184e-09, -1.4901e-09])),\n",
       "             ('linear_relu_stack.BatchNorm_5.running_var',\n",
       "              tensor([0.9873, 1.0394, 0.9786, 1.0504, 1.0013, 1.0393, 0.9718, 0.9868, 1.0457,\n",
       "                      0.9593, 1.0312, 0.9889, 0.9485, 0.9854, 1.0021, 0.9564, 0.9807, 0.9635,\n",
       "                      0.9806, 0.9645, 0.9858, 1.0142, 1.0371, 1.0254, 1.0328, 1.0077, 0.9977,\n",
       "                      0.9793, 1.0006, 0.9803, 1.0592, 0.9915, 0.9959, 0.9901, 1.0461, 0.9686,\n",
       "                      0.9751, 1.0133, 0.9536, 0.9862, 0.9767, 0.9968, 0.9852, 0.9828, 0.9684,\n",
       "                      1.0150, 1.0283, 1.0623, 0.9686, 1.0510, 0.9413, 0.9909, 1.0151, 0.9578,\n",
       "                      1.0177, 0.9756, 0.9525, 1.0031, 0.9919, 1.0101, 0.9912, 0.9915, 0.9925,\n",
       "                      0.9753, 1.0124, 0.9543, 1.0338, 1.0251, 1.0979, 0.9851, 1.0588, 1.0128,\n",
       "                      1.0302, 0.9852, 0.9763, 1.0227, 0.9493, 1.0175, 0.9913, 0.9424, 1.0773,\n",
       "                      0.9817, 0.9971, 0.9852, 0.9411, 0.9925, 1.0688, 0.9989, 0.9982, 1.0003,\n",
       "                      0.9620, 1.0642, 0.9847, 0.9727, 0.9794, 1.0104, 0.9693, 1.0065, 0.9972,\n",
       "                      0.9823])),\n",
       "             ('linear_relu_stack.BatchNorm_5.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_6.weight',\n",
       "              tensor([[ 0.0977,  0.0900, -0.0634,  ..., -0.1096,  0.1054,  0.0074],\n",
       "                      [-0.0719,  0.0935,  0.1673,  ..., -0.0324,  0.0973,  0.0245],\n",
       "                      [ 0.0864, -0.0739,  0.1674,  ...,  0.0005, -0.0270,  0.0055],\n",
       "                      ...,\n",
       "                      [ 0.0625,  0.1249, -0.0581,  ..., -0.1670,  0.0256, -0.1143],\n",
       "                      [-0.1662, -0.0235,  0.1523,  ..., -0.0256, -0.0600, -0.0760],\n",
       "                      [ 0.0181, -0.0829, -0.1174,  ...,  0.1354,  0.0889,  0.0146]])),\n",
       "             ('linear_relu_stack.Linear_6.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_6.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_6.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_6.running_mean',\n",
       "              tensor([-4.6566e-11, -1.5832e-09,  1.1642e-09,  9.3132e-11, -2.8405e-09,\n",
       "                      -1.0827e-09, -6.5193e-10, -2.7008e-09, -4.6566e-10,  5.8208e-09,\n",
       "                       1.2107e-09,  1.6182e-09,  2.0373e-09,  9.7789e-10, -4.6100e-09,\n",
       "                      -1.5134e-09,  3.0268e-09, -4.9826e-09,  2.0955e-09, -3.7253e-10,\n",
       "                      -4.5169e-09, -4.5868e-09,  2.8405e-09,  2.4098e-09, -3.6438e-09,\n",
       "                      -1.1991e-09, -1.0710e-09, -2.7940e-09, -7.4506e-10, -6.7521e-10,\n",
       "                      -2.6077e-09,  2.0489e-09, -1.1176e-09,  4.0513e-09, -3.5856e-09,\n",
       "                       3.7253e-10,  4.0513e-09, -2.8871e-09,  1.6764e-09, -2.4913e-09,\n",
       "                      -2.7008e-09,  2.0955e-09,  5.3551e-10, -8.1491e-10,  2.2817e-09,\n",
       "                      -8.8476e-10,  1.1176e-09,  1.2107e-09,  4.6566e-10,  4.6566e-11,\n",
       "                      -1.3970e-10,  1.9092e-09,  1.4668e-09, -7.4506e-10, -1.3970e-10,\n",
       "                      -9.0804e-10,  8.1491e-10,  3.3528e-09, -7.9162e-10,  2.4680e-09,\n",
       "                      -1.7928e-09,  4.7963e-09, -2.7008e-09,  8.1491e-10, -3.7719e-09,\n",
       "                       2.3283e-09,  4.6566e-11, -2.4796e-09,  2.6077e-09,  4.3772e-09,\n",
       "                       9.3132e-10, -5.0757e-09, -1.0710e-09,  2.2585e-09,  1.6298e-09,\n",
       "                      -2.3749e-09,  2.6077e-09, -2.0023e-09, -4.6566e-10,  1.7229e-09,\n",
       "                      -1.5832e-09,  1.1642e-09, -2.0489e-09,  6.5193e-10,  4.8894e-10,\n",
       "                       1.3504e-09,  2.3283e-10,  3.6322e-09, -2.7940e-10,  5.2154e-09,\n",
       "                      -3.6322e-09,  1.4901e-09,  5.2154e-09,  3.0268e-09,  2.3749e-09,\n",
       "                       1.5832e-09, -2.7940e-09,  4.9360e-09, -3.3062e-09, -3.4459e-09])),\n",
       "             ('linear_relu_stack.BatchNorm_6.running_var',\n",
       "              tensor([0.9971, 0.9900, 0.9548, 1.0282, 0.9635, 0.9794, 0.9368, 0.9989, 1.0043,\n",
       "                      0.9634, 0.9774, 0.9862, 1.0506, 0.9324, 1.0273, 0.9950, 0.9854, 0.9651,\n",
       "                      0.9697, 0.9985, 0.9890, 0.9822, 1.0047, 1.0568, 0.9686, 0.9782, 0.9791,\n",
       "                      1.0086, 1.0411, 0.9808, 1.0193, 0.9770, 0.9689, 0.9858, 0.9738, 0.9855,\n",
       "                      1.0313, 0.9959, 1.0511, 0.9779, 0.9853, 0.9407, 0.9844, 1.0478, 0.9795,\n",
       "                      0.9781, 0.9798, 0.9520, 0.9887, 0.9531, 1.0487, 1.0025, 0.9348, 1.0605,\n",
       "                      0.9897, 0.9632, 0.9907, 0.9809, 0.9600, 1.0869, 1.0240, 0.9965, 1.0296,\n",
       "                      1.0334, 0.9940, 0.9708, 0.9658, 0.9516, 1.0049, 0.9607, 1.0534, 0.9729,\n",
       "                      0.9751, 0.9648, 1.0025, 0.9721, 1.0446, 0.9894, 0.9746, 1.0114, 0.9595,\n",
       "                      0.9843, 0.9865, 0.9856, 0.9737, 1.0031, 1.0217, 0.9912, 0.9611, 1.0143,\n",
       "                      0.9981, 1.0150, 1.0993, 1.0160, 0.9852, 1.0311, 1.0170, 0.9855, 0.9921,\n",
       "                      1.0281])),\n",
       "             ('linear_relu_stack.BatchNorm_6.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_7.weight',\n",
       "              tensor([[-0.0008,  0.1549,  0.0414,  ...,  0.0477,  0.0982, -0.0387],\n",
       "                      [-0.1106,  0.0923,  0.0787,  ..., -0.1061,  0.0099,  0.1246],\n",
       "                      [ 0.1200, -0.0708, -0.1233,  ..., -0.1680, -0.1218, -0.0846],\n",
       "                      ...,\n",
       "                      [ 0.1031,  0.0811, -0.1640,  ..., -0.1018, -0.0015, -0.0972],\n",
       "                      [ 0.0368,  0.0303,  0.0507,  ...,  0.1059, -0.0572, -0.1369],\n",
       "                      [-0.0838,  0.0417, -0.1650,  ...,  0.0590, -0.0497, -0.0349]])),\n",
       "             ('linear_relu_stack.Linear_7.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_7.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_7.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_7.running_mean',\n",
       "              tensor([ 1.3504e-09, -4.0513e-09, -1.2107e-09, -1.8626e-10,  3.3993e-09,\n",
       "                       3.0734e-09, -3.7253e-10,  5.2270e-09, -1.2456e-09, -1.2107e-09,\n",
       "                       2.7940e-10, -3.6322e-09, -1.3970e-09,  7.4506e-10, -3.0035e-09,\n",
       "                       7.4506e-10, -4.4703e-09, -2.3749e-09,  4.8894e-09,  2.9802e-09,\n",
       "                       1.0943e-09,  9.3132e-10,  1.0943e-09,  6.9849e-10,  2.7940e-10,\n",
       "                       6.0536e-10,  2.5961e-09, -1.2107e-09,  0.0000e+00,  2.9104e-09,\n",
       "                      -3.3528e-09,  2.6310e-09,  2.7794e-09, -1.0245e-09,  4.6566e-10,\n",
       "                       2.3283e-09,  1.9092e-09, -1.9325e-09, -4.5169e-09, -2.5146e-09,\n",
       "                       1.0477e-09, -3.6322e-09, -2.7940e-09, -2.7940e-09,  4.0978e-09,\n",
       "                      -1.5250e-09, -4.6566e-10,  2.0955e-09,  3.0268e-09, -9.3132e-10,\n",
       "                       2.7940e-09, -5.9954e-10, -5.1223e-10, -4.1910e-10,  2.7940e-10,\n",
       "                      -1.9558e-09,  2.8405e-09, -5.0757e-09, -6.9849e-10,  1.7229e-09,\n",
       "                      -2.3283e-09, -1.5134e-09,  9.3132e-11,  7.5670e-10,  6.2399e-09,\n",
       "                      -1.3970e-10, -8.8476e-10,  9.7789e-10, -9.7789e-10,  3.0734e-09,\n",
       "                       1.5600e-09,  8.8476e-10,  1.3970e-10, -2.6543e-09,  3.9581e-10,\n",
       "                      -2.7940e-10,  1.3039e-09,  6.0536e-10, -2.0955e-10,  3.3528e-09,\n",
       "                      -1.1642e-09, -2.0023e-09, -6.5193e-10, -1.0710e-09,  2.0489e-09,\n",
       "                      -6.7521e-10, -9.3132e-11, -2.8173e-09,  9.3132e-10,  3.3528e-09,\n",
       "                      -2.2817e-09,  3.0268e-09, -1.6298e-09, -3.9116e-09,  1.3271e-09,\n",
       "                       1.7229e-09,  4.5402e-10,  3.6322e-09, -2.8405e-09, -1.5832e-09])),\n",
       "             ('linear_relu_stack.BatchNorm_7.running_var',\n",
       "              tensor([1.0519, 0.9783, 1.0883, 1.0066, 1.0319, 0.9702, 0.9801, 1.1416, 0.9702,\n",
       "                      1.0222, 1.0259, 0.9898, 0.9847, 1.0378, 1.0303, 1.0152, 1.0137, 0.9531,\n",
       "                      1.0335, 1.0348, 0.9727, 1.0173, 1.0047, 0.9577, 0.9752, 1.0055, 0.9799,\n",
       "                      1.0012, 0.9760, 0.9746, 0.9873, 0.9759, 0.9946, 0.9545, 1.0114, 0.9953,\n",
       "                      1.0213, 1.0621, 0.9792, 1.0263, 0.9513, 1.0302, 0.9708, 0.9454, 0.9658,\n",
       "                      0.9701, 1.0622, 1.1254, 0.9759, 0.9982, 0.9908, 0.9383, 1.0200, 0.9868,\n",
       "                      1.0107, 1.0152, 0.9682, 0.9941, 0.9465, 0.9613, 0.9612, 0.9410, 1.0085,\n",
       "                      0.9900, 0.9942, 1.0142, 0.9375, 0.9833, 0.9581, 1.0141, 1.0016, 0.9823,\n",
       "                      1.0116, 0.9797, 1.0655, 1.0419, 0.9710, 1.0241, 1.0195, 1.0839, 1.0061,\n",
       "                      1.0704, 0.9725, 1.0208, 1.0305, 1.0161, 0.9780, 0.9751, 0.9865, 1.0478,\n",
       "                      1.0021, 0.9523, 1.0361, 1.0530, 1.0066, 0.9866, 0.9891, 0.9857, 0.9731,\n",
       "                      0.9448])),\n",
       "             ('linear_relu_stack.BatchNorm_7.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_8.weight',\n",
       "              tensor([[ 0.1540, -0.1669,  0.0392,  ...,  0.1331,  0.1136,  0.0596],\n",
       "                      [ 0.1298,  0.0739,  0.1255,  ..., -0.1173,  0.0217,  0.0362],\n",
       "                      [ 0.0753,  0.0870, -0.0262,  ...,  0.1377,  0.0064,  0.1306],\n",
       "                      ...,\n",
       "                      [-0.1598, -0.0256, -0.1164,  ...,  0.0007, -0.1140,  0.0980],\n",
       "                      [-0.1630, -0.1710,  0.1087,  ..., -0.0596, -0.1517, -0.0284],\n",
       "                      [-0.0532, -0.1548, -0.1444,  ..., -0.0415, -0.1449, -0.1250]])),\n",
       "             ('linear_relu_stack.Linear_8.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_8.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_8.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_8.running_mean',\n",
       "              tensor([-3.5390e-09, -1.1176e-09,  5.3551e-10, -2.7008e-09,  2.0023e-09,\n",
       "                      -6.0536e-10, -4.6566e-11,  5.0291e-09,  2.3283e-10, -3.5390e-09,\n",
       "                      -3.6322e-09,  4.5169e-09,  4.8429e-09,  4.1910e-10, -4.8429e-09,\n",
       "                      -1.6298e-10, -4.6566e-10,  9.3132e-11,  1.3970e-09, -2.5146e-09,\n",
       "                       3.2131e-09,  8.1491e-10, -1.2340e-09,  3.7253e-10,  1.3970e-10,\n",
       "                       2.5611e-10, -2.5146e-09, -2.0489e-09,  4.5635e-09,  2.9337e-09,\n",
       "                      -3.9116e-09,  2.7940e-09,  2.6077e-09,  3.2131e-09, -3.3760e-09,\n",
       "                       6.5193e-09,  4.1910e-10,  2.8871e-09, -1.5367e-09,  0.0000e+00,\n",
       "                      -8.6147e-10, -4.1910e-09, -1.7695e-09, -3.0268e-10,  4.2841e-09,\n",
       "                      -3.3528e-09,  3.1665e-09, -7.9162e-10,  1.6298e-09, -1.8394e-09,\n",
       "                       4.8894e-10, -2.5611e-09, -2.1886e-09,  3.8883e-09,  5.5879e-09,\n",
       "                       3.7253e-10,  2.5146e-09,  6.7521e-10, -4.8429e-09, -2.6077e-09,\n",
       "                      -3.7253e-10, -6.9849e-10,  5.8673e-09, -1.3504e-09,  2.7125e-09,\n",
       "                       5.0291e-09,  7.4506e-10,  1.2107e-09,  1.7695e-09,  6.0536e-10,\n",
       "                       1.3504e-09,  9.3132e-10, -3.4925e-09, -1.1176e-09, -3.9581e-09,\n",
       "                      -1.8394e-09,  4.3772e-09, -3.0268e-10, -1.3970e-10, -4.1910e-10,\n",
       "                      -4.9360e-09, -2.4913e-09,  3.7253e-10, -3.6554e-09, -6.2864e-10,\n",
       "                       2.9337e-09, -4.6566e-10, -3.7719e-09, -1.3039e-09, -3.4692e-09,\n",
       "                      -6.3446e-10,  3.0734e-09,  1.1642e-09, -1.0012e-09, -4.4238e-09,\n",
       "                      -9.3132e-11, -1.3039e-09,  3.3062e-09,  2.6310e-09, -8.8476e-10])),\n",
       "             ('linear_relu_stack.BatchNorm_8.running_var',\n",
       "              tensor([1.0477, 0.9407, 1.0008, 0.9655, 0.9622, 1.0074, 0.9464, 1.0898, 0.9819,\n",
       "                      1.0405, 0.9786, 0.9946, 0.9941, 1.0366, 0.9976, 1.0919, 1.1072, 1.0411,\n",
       "                      1.0065, 0.9962, 1.0176, 0.9839, 0.9779, 1.0222, 1.0324, 0.9874, 1.0057,\n",
       "                      0.9868, 0.9765, 1.0238, 1.0258, 1.0370, 0.9749, 1.0054, 1.0743, 0.9616,\n",
       "                      0.9964, 1.0390, 0.9756, 1.0218, 0.9410, 0.9713, 0.9668, 1.0083, 1.0064,\n",
       "                      1.0163, 0.9647, 0.9922, 1.0199, 0.9609, 0.9615, 0.9789, 1.0178, 0.9832,\n",
       "                      1.0801, 1.0133, 0.9946, 0.9602, 1.0143, 1.0239, 1.0232, 1.0597, 0.9849,\n",
       "                      1.0071, 0.9982, 1.0487, 0.9801, 1.0149, 0.9484, 1.0476, 0.9953, 1.0175,\n",
       "                      0.9541, 0.9742, 0.9992, 0.9933, 1.0026, 0.9781, 0.9573, 1.0352, 0.9945,\n",
       "                      0.9992, 1.0149, 1.0688, 0.9537, 0.9593, 1.0036, 1.0224, 0.9794, 0.9851,\n",
       "                      1.0333, 1.0012, 1.0306, 0.9787, 1.0025, 1.0051, 1.0108, 0.9777, 1.0403,\n",
       "                      1.0188])),\n",
       "             ('linear_relu_stack.BatchNorm_8.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_9.weight',\n",
       "              tensor([[ 0.0286,  0.0713,  0.1638,  ...,  0.0117,  0.0778,  0.1446],\n",
       "                      [-0.1452, -0.0463,  0.0003,  ..., -0.0129, -0.1133,  0.0519],\n",
       "                      [ 0.1339,  0.1396, -0.0496,  ..., -0.0722,  0.0767, -0.0877],\n",
       "                      ...,\n",
       "                      [-0.0574, -0.0805, -0.1541,  ..., -0.1254,  0.0657, -0.1549],\n",
       "                      [-0.0348, -0.1558, -0.0780,  ..., -0.0198,  0.1673,  0.0368],\n",
       "                      [ 0.1287, -0.1348, -0.0682,  ...,  0.1524,  0.0788, -0.1037]])),\n",
       "             ('linear_relu_stack.Linear_9.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_9.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_9.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_9.running_mean',\n",
       "              tensor([ 2.7474e-09, -1.6997e-09, -1.3970e-09, -2.7940e-10, -1.6415e-09,\n",
       "                      -1.8626e-10,  1.5949e-09,  2.2817e-09, -6.5193e-10, -4.0978e-09,\n",
       "                      -8.3819e-10, -1.0477e-09,  1.4203e-09,  6.5193e-10, -1.1176e-09,\n",
       "                       3.2596e-10, -3.3993e-09, -1.2573e-09,  2.3283e-09, -8.3819e-10,\n",
       "                      -1.7986e-09, -1.1176e-09,  1.5018e-09,  1.8626e-10,  6.0536e-10,\n",
       "                       4.8894e-09,  3.7253e-10,  1.3504e-09, -5.1223e-10,  1.0245e-09,\n",
       "                      -3.5390e-09, -1.8161e-09, -1.1176e-09,  3.4692e-09, -1.6764e-09,\n",
       "                       4.1444e-09,  1.9558e-09,  6.5193e-10, -1.7928e-09, -1.5832e-09,\n",
       "                       3.9116e-09,  2.0722e-09,  1.8626e-09, -3.6904e-09,  5.1223e-10,\n",
       "                       8.1025e-09, -1.3039e-09,  3.9581e-10,  2.0955e-09,  1.0710e-09,\n",
       "                      -4.5402e-10, -9.7789e-10,  1.2107e-09, -3.4925e-09,  1.8626e-09,\n",
       "                      -3.7253e-10, -2.5611e-09, -4.7497e-09,  1.6298e-10, -9.3132e-11,\n",
       "                      -2.9802e-09, -2.1420e-09,  3.4459e-09,  1.2107e-09,  1.5832e-09,\n",
       "                       4.6566e-10, -9.7789e-10, -3.3528e-09,  3.8650e-09,  1.5600e-09,\n",
       "                       1.6647e-09,  1.8859e-09,  1.8626e-10,  3.7253e-09,  1.4901e-09,\n",
       "                      -4.0978e-09,  2.0955e-09, -2.3283e-10,  1.5832e-09, -3.8650e-09,\n",
       "                       6.5193e-10,  5.3318e-09,  1.5367e-09,  1.4901e-09,  2.8405e-09,\n",
       "                      -5.0757e-09,  1.0245e-09, -4.2375e-09,  1.4203e-09,  2.6892e-09,\n",
       "                       2.7940e-10,  3.7253e-10,  1.1642e-09,  5.1223e-10,  4.7497e-09,\n",
       "                      -2.2410e-09,  5.1223e-10,  1.8626e-09,  0.0000e+00,  1.6764e-09])),\n",
       "             ('linear_relu_stack.BatchNorm_9.running_var',\n",
       "              tensor([0.9693, 1.0801, 1.0012, 1.0365, 0.9812, 0.9644, 1.0082, 1.0429, 0.9716,\n",
       "                      0.9799, 0.9584, 0.9780, 0.9453, 0.9775, 0.9948, 0.9648, 1.0026, 1.0419,\n",
       "                      1.0033, 0.9376, 1.0676, 0.9852, 0.9671, 0.9522, 0.9974, 0.9652, 1.0055,\n",
       "                      0.9960, 0.9602, 0.9844, 0.9884, 0.9831, 1.0116, 0.9495, 1.0640, 1.0170,\n",
       "                      0.9908, 1.0313, 0.9704, 1.0134, 0.9917, 0.9983, 1.0015, 1.0133, 0.9968,\n",
       "                      1.0051, 0.9676, 0.9667, 0.9583, 1.0303, 1.1084, 0.9837, 1.0365, 0.9848,\n",
       "                      0.9654, 0.9996, 0.9487, 0.9954, 1.0445, 0.9828, 1.0165, 1.0224, 1.0561,\n",
       "                      1.0173, 0.9523, 0.9942, 1.0295, 0.9827, 1.0119, 1.0060, 0.9714, 0.9592,\n",
       "                      1.0229, 1.0348, 1.0263, 1.0763, 0.9892, 1.0157, 0.9709, 1.0201, 1.0704,\n",
       "                      1.0835, 0.9853, 1.0628, 0.9930, 0.9879, 0.9986, 0.9452, 0.9472, 0.9480,\n",
       "                      0.9899, 1.0282, 0.9750, 1.0057, 1.0299, 0.9846, 0.9555, 0.9526, 1.0398,\n",
       "                      1.0557])),\n",
       "             ('linear_relu_stack.BatchNorm_9.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_10.weight',\n",
       "              tensor([[-0.0258, -0.0682,  0.0375,  ..., -0.1010, -0.1264, -0.1364],\n",
       "                      [ 0.1097,  0.1021, -0.0309,  ..., -0.1625, -0.1714,  0.1302],\n",
       "                      [ 0.0801,  0.1565, -0.0191,  ...,  0.0241,  0.0722, -0.0187],\n",
       "                      ...,\n",
       "                      [-0.0840, -0.0540,  0.0659,  ..., -0.1380, -0.1310,  0.1699],\n",
       "                      [ 0.0952,  0.1315,  0.0934,  ..., -0.1003, -0.0492, -0.0972],\n",
       "                      [ 0.0065,  0.1454, -0.1436,  ..., -0.1331, -0.0969, -0.0748]])),\n",
       "             ('linear_relu_stack.Linear_10.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_10.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_10.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_10.running_mean',\n",
       "              tensor([-5.9605e-09, -1.8626e-10,  1.4436e-09,  3.8184e-09, -7.6834e-10,\n",
       "                      -1.8161e-09,  2.2352e-09,  1.8626e-09,  2.3283e-11, -1.2573e-09,\n",
       "                       0.0000e+00,  3.5274e-09,  1.4785e-09,  8.8476e-10,  1.8161e-09,\n",
       "                       4.1444e-09, -4.6566e-11,  1.8626e-09, -4.1444e-09, -3.7253e-10,\n",
       "                       4.6566e-10,  1.6298e-10, -3.3993e-09, -4.6566e-10, -5.6578e-09,\n",
       "                      -2.3283e-09, -2.7067e-09, -2.2817e-09,  1.3970e-09, -5.4948e-09,\n",
       "                       1.3970e-10,  2.1420e-09, -4.6566e-10,  2.7940e-09,  4.0745e-10,\n",
       "                       8.6147e-10,  3.1199e-09, -1.9325e-09, -1.1874e-09, -6.0070e-09,\n",
       "                       1.0245e-09, -9.3132e-11,  1.8626e-10, -1.3970e-10,  2.0023e-09,\n",
       "                      -3.5390e-09, -1.3970e-10, -1.3039e-09, -2.5611e-09, -3.5507e-10,\n",
       "                      -6.0536e-10,  2.7707e-09, -9.7789e-10,  1.7229e-09, -2.9569e-09,\n",
       "                       2.0955e-09,  1.4901e-09,  1.8626e-10, -3.5390e-09,  3.4459e-09,\n",
       "                      -9.3132e-10,  2.0489e-09,  2.4447e-09, -3.8650e-09, -4.1560e-09,\n",
       "                       3.9116e-09, -6.2864e-10, -1.3970e-10,  2.0489e-09, -2.7940e-09,\n",
       "                      -2.3982e-09,  1.4901e-09, -3.8417e-09,  2.3283e-10,  2.8405e-09,\n",
       "                       7.9162e-10, -4.6566e-11,  9.7789e-10, -2.6077e-09,  1.7229e-09,\n",
       "                      -8.3819e-10,  2.4680e-09,  9.3132e-10,  0.0000e+00, -4.1910e-10,\n",
       "                      -1.6764e-09, -1.1176e-09, -3.7253e-09, -1.4436e-09,  1.9209e-09,\n",
       "                      -9.3132e-11, -1.8161e-09,  3.0268e-09,  4.7497e-09, -5.5879e-10,\n",
       "                      -2.8871e-09,  1.2107e-09, -2.7008e-09, -3.4459e-09, -2.3283e-10])),\n",
       "             ('linear_relu_stack.BatchNorm_10.running_var',\n",
       "              tensor([1.0087, 0.9705, 0.9583, 1.0783, 0.9802, 1.0150, 1.0475, 0.9696, 0.9741,\n",
       "                      0.9680, 0.9926, 0.9394, 1.0056, 0.9325, 0.9652, 1.1229, 0.9890, 0.9797,\n",
       "                      1.0027, 1.0513, 1.0092, 1.0687, 1.0103, 0.9616, 0.9735, 0.9541, 1.0358,\n",
       "                      1.0226, 0.9944, 1.0158, 0.9652, 1.0617, 1.0432, 1.0134, 1.0034, 0.9771,\n",
       "                      1.0242, 1.0747, 1.0977, 1.0503, 0.9573, 1.0237, 1.0372, 0.9963, 0.9903,\n",
       "                      0.9692, 1.0391, 1.0393, 1.0004, 1.0351, 0.9790, 1.0910, 0.9518, 1.0243,\n",
       "                      0.9873, 0.9576, 0.9842, 1.0089, 1.0012, 0.9641, 1.0509, 0.9981, 0.9973,\n",
       "                      0.9968, 0.9731, 1.0193, 0.9354, 1.0363, 0.9868, 1.0603, 1.0100, 1.0197,\n",
       "                      0.9916, 0.9875, 0.9376, 0.9849, 0.9989, 1.0208, 0.9990, 1.0262, 0.9465,\n",
       "                      1.0144, 1.0089, 1.0283, 0.9644, 0.9452, 1.0260, 1.0475, 0.9573, 0.9564,\n",
       "                      1.0812, 1.0371, 1.0035, 1.0429, 0.9994, 1.0263, 0.9648, 1.0579, 0.9962,\n",
       "                      0.9423])),\n",
       "             ('linear_relu_stack.BatchNorm_10.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_11.weight',\n",
       "              tensor([[ 0.0194,  0.0912, -0.0092,  ..., -0.0201,  0.1671,  0.0917],\n",
       "                      [-0.1129, -0.0796,  0.0013,  ..., -0.0350,  0.1023,  0.0752],\n",
       "                      [-0.1419, -0.0298, -0.1642,  ...,  0.1307,  0.1179,  0.1270],\n",
       "                      ...,\n",
       "                      [ 0.0678,  0.0925, -0.1616,  ...,  0.1357,  0.1232,  0.0174],\n",
       "                      [ 0.0710,  0.1367, -0.1270,  ..., -0.0247,  0.0688,  0.0086],\n",
       "                      [-0.0840,  0.1634, -0.1469,  ...,  0.1561,  0.0634, -0.1730]])),\n",
       "             ('linear_relu_stack.Linear_11.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_11.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_11.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_11.running_mean',\n",
       "              tensor([-2.7940e-09,  4.5635e-09,  1.1176e-09,  3.2596e-09, -1.4144e-09,\n",
       "                      -1.8626e-10,  1.2107e-09,  6.9849e-10,  5.1223e-10,  7.9162e-10,\n",
       "                       2.7940e-10, -1.3970e-09, -3.5390e-09,  3.7253e-10,  7.9162e-10,\n",
       "                       4.7032e-09, -2.5495e-09, -1.6764e-09, -3.6322e-09,  2.3283e-09,\n",
       "                       1.5367e-09,  1.4668e-09, -1.6531e-09, -3.7253e-09,  3.8650e-09,\n",
       "                       1.7695e-09, -1.6298e-09, -2.2817e-09, -2.7940e-09,  1.2806e-09,\n",
       "                      -3.2131e-09, -2.1420e-09,  9.3132e-11,  6.8918e-09, -4.8429e-09,\n",
       "                      -1.1874e-09, -2.4214e-09, -3.6089e-09,  1.8626e-09,  0.0000e+00,\n",
       "                      -2.9802e-09, -1.0245e-09,  9.3132e-10, -6.5193e-10,  3.3760e-09,\n",
       "                       2.3283e-09,  0.0000e+00,  2.9337e-09,  2.3283e-10, -4.0047e-09,\n",
       "                       2.0955e-10, -1.1176e-09, -4.0978e-09,  4.1910e-09,  9.3132e-10,\n",
       "                       3.3062e-09, -2.5611e-09, -1.3970e-09, -1.8626e-09,  2.5146e-09,\n",
       "                      -1.9092e-09, -1.3039e-09,  1.1176e-09, -1.4843e-09, -2.9802e-09,\n",
       "                       1.5600e-09,  2.5611e-09, -7.6834e-10,  6.0536e-10, -1.4901e-09,\n",
       "                      -2.0023e-09,  6.5193e-10,  2.6543e-09,  3.3528e-09, -1.7695e-09,\n",
       "                       1.7229e-09,  8.0327e-10,  4.1910e-10,  3.1549e-09, -2.0489e-09,\n",
       "                       6.0536e-10,  7.9162e-10,  5.3551e-10,  8.1491e-10, -3.3062e-09,\n",
       "                      -3.2596e-09, -1.1642e-09,  2.5611e-09, -1.0245e-09, -1.0012e-09,\n",
       "                       7.9162e-10,  2.7940e-09, -4.0513e-09, -3.7253e-09,  4.6566e-10,\n",
       "                      -3.3178e-09,  7.4506e-10, -1.8626e-09, -4.6566e-11,  1.1642e-09])),\n",
       "             ('linear_relu_stack.BatchNorm_11.running_var',\n",
       "              tensor([0.9809, 1.1004, 1.0225, 1.0032, 0.9417, 0.9946, 0.9986, 0.9807, 1.0395,\n",
       "                      0.9508, 1.0071, 1.0922, 1.0548, 0.9614, 1.0108, 1.0415, 1.0383, 1.0386,\n",
       "                      0.9756, 0.9828, 0.9587, 1.0131, 0.9817, 1.0185, 1.0391, 0.9830, 1.0203,\n",
       "                      1.0078, 1.0243, 1.0282, 1.0733, 1.0580, 1.0238, 1.0595, 1.0364, 1.0186,\n",
       "                      0.9870, 0.9665, 1.0002, 0.9800, 1.0670, 0.9571, 1.0576, 0.9768, 0.9520,\n",
       "                      0.9852, 0.9808, 1.0251, 0.9687, 0.9930, 0.9834, 0.9557, 1.0137, 1.1094,\n",
       "                      0.9901, 0.9883, 0.9447, 1.0212, 0.9762, 1.0079, 0.9708, 1.0297, 1.0467,\n",
       "                      0.9859, 0.9898, 1.0246, 1.0330, 0.9552, 1.0187, 1.0279, 0.9705, 0.9505,\n",
       "                      0.9881, 0.9585, 1.0387, 0.9804, 1.0694, 0.9789, 0.9938, 1.0014, 0.9448,\n",
       "                      0.9907, 0.9660, 1.0024, 0.9407, 0.9884, 0.9916, 0.9745, 0.9788, 0.9401,\n",
       "                      1.0160, 0.9729, 1.0465, 0.9813, 1.0185, 0.9818, 0.9717, 0.9697, 1.0848,\n",
       "                      1.0280])),\n",
       "             ('linear_relu_stack.BatchNorm_11.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_12.weight',\n",
       "              tensor([[ 0.0286,  0.1031,  0.0442,  ...,  0.1589,  0.1585,  0.0998],\n",
       "                      [-0.1705,  0.0133, -0.0630,  ...,  0.0196, -0.0814,  0.0782],\n",
       "                      [ 0.0123,  0.0946,  0.1108,  ..., -0.0622,  0.0851,  0.0935],\n",
       "                      ...,\n",
       "                      [ 0.1079,  0.1539, -0.0844,  ...,  0.1385, -0.1386,  0.1671],\n",
       "                      [-0.0299,  0.1113,  0.0725,  ..., -0.1131,  0.1718, -0.0180],\n",
       "                      [ 0.1140,  0.0232,  0.0720,  ..., -0.1339, -0.0464, -0.1675]])),\n",
       "             ('linear_relu_stack.Linear_12.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_12.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_12.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_12.running_mean',\n",
       "              tensor([ 1.4086e-09, -2.8638e-09,  3.4925e-09, -2.3283e-10, -2.1886e-09,\n",
       "                       8.2655e-10,  3.7253e-09,  1.3853e-09,  2.7008e-09, -1.3970e-09,\n",
       "                      -5.3144e-09, -5.3085e-09, -2.0489e-09, -2.3283e-09, -1.5832e-09,\n",
       "                       3.1199e-09, -2.7008e-09, -3.4925e-09, -1.9558e-09, -2.0023e-09,\n",
       "                      -3.2596e-10, -1.5367e-09, -2.6310e-09, -2.5379e-09,  1.5832e-09,\n",
       "                       8.8476e-10, -1.5832e-09,  2.2352e-09,  1.0245e-09, -2.9453e-09,\n",
       "                      -4.6566e-11,  2.8871e-09,  3.2131e-09,  2.8638e-09,  4.8894e-10,\n",
       "                      -5.0757e-09,  4.4703e-09, -7.4506e-10,  7.6834e-10,  2.5844e-09,\n",
       "                       7.7998e-10, -3.6322e-09,  3.5390e-09, -4.1910e-10,  4.5402e-09,\n",
       "                      -1.0710e-09,  5.2154e-09, -5.1688e-09, -2.9337e-09, -3.4459e-09,\n",
       "                       1.3970e-09, -2.3516e-09, -3.5856e-09,  1.2282e-09, -1.4901e-09,\n",
       "                       1.5367e-09, -7.9162e-10,  6.0536e-10,  9.3132e-10,  1.6065e-09,\n",
       "                      -8.3819e-10,  1.5367e-09,  1.8626e-09,  5.8208e-10,  2.3283e-09,\n",
       "                       3.5856e-09, -6.9849e-10,  1.8626e-10, -1.7695e-09, -4.1095e-09,\n",
       "                      -1.5367e-09,  4.1444e-09, -1.3970e-09, -1.8626e-10, -3.2596e-10,\n",
       "                       2.0955e-09,  9.3132e-10, -9.5461e-10,  2.4331e-09,  1.8161e-09,\n",
       "                       1.3737e-09, -1.6997e-09,  7.0781e-09,  1.8626e-09,  8.6147e-10,\n",
       "                      -2.3283e-09, -1.8161e-09, -9.8953e-10, -3.2596e-10,  1.6764e-09,\n",
       "                       2.1420e-09,  3.6554e-09, -2.5146e-09,  4.4703e-09, -6.2864e-10,\n",
       "                       1.4436e-09,  4.0047e-09, -1.5716e-09,  2.7940e-10,  8.3819e-10])),\n",
       "             ('linear_relu_stack.BatchNorm_12.running_var',\n",
       "              tensor([0.9837, 0.9879, 0.9912, 0.9968, 0.9440, 0.9626, 1.0259, 1.0226, 1.1544,\n",
       "                      1.0556, 0.9506, 1.0521, 0.9676, 0.9606, 1.0081, 1.0483, 0.9755, 0.9450,\n",
       "                      1.0296, 0.9637, 0.9591, 1.0162, 0.9600, 0.9455, 1.0209, 0.9891, 1.0357,\n",
       "                      1.0309, 1.0589, 0.9606, 0.9471, 1.0096, 1.0177, 0.9431, 1.0360, 1.0035,\n",
       "                      1.0438, 1.0902, 1.0008, 0.9969, 1.0444, 1.0577, 0.9710, 1.0971, 0.9618,\n",
       "                      1.0033, 1.0091, 1.0884, 0.9613, 0.9694, 0.9708, 1.0024, 0.9888, 0.9477,\n",
       "                      0.9793, 0.9717, 1.0157, 1.0045, 1.0408, 1.0940, 1.0241, 0.9918, 1.1135,\n",
       "                      1.0156, 1.0440, 0.9495, 1.0255, 1.0205, 0.9609, 1.0182, 0.9931, 1.0093,\n",
       "                      0.9423, 1.0341, 0.9774, 0.9884, 0.9734, 0.9685, 0.9635, 1.0291, 0.9460,\n",
       "                      0.9464, 1.0823, 0.9836, 0.9415, 0.9771, 0.9797, 0.9776, 1.1340, 1.0469,\n",
       "                      0.9940, 1.0130, 0.9793, 1.0498, 1.0027, 1.0205, 1.0805, 1.0142, 1.0108,\n",
       "                      0.9856])),\n",
       "             ('linear_relu_stack.BatchNorm_12.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_13.weight',\n",
       "              tensor([[-0.0270,  0.0359,  0.0339,  ..., -0.1324, -0.1259,  0.0672],\n",
       "                      [-0.0999,  0.1207, -0.1155,  ..., -0.0579, -0.1445,  0.0314],\n",
       "                      [ 0.1633, -0.0047,  0.0966,  ..., -0.0808,  0.1287, -0.1651],\n",
       "                      ...,\n",
       "                      [ 0.0166,  0.1732, -0.1709,  ..., -0.0164, -0.0522, -0.0857],\n",
       "                      [-0.0268,  0.0899, -0.1276,  ..., -0.0945,  0.1424,  0.1003],\n",
       "                      [ 0.0289, -0.1468, -0.1545,  ..., -0.0261,  0.1094,  0.1551]])),\n",
       "             ('linear_relu_stack.Linear_13.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_13.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_13.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_13.running_mean',\n",
       "              tensor([ 1.4436e-09,  8.3819e-10,  3.9116e-09,  1.6764e-09, -1.6065e-09,\n",
       "                      -2.7940e-10,  1.3737e-09, -1.7695e-09, -2.7241e-09,  7.4506e-10,\n",
       "                       6.0536e-10,  5.1223e-10, -2.2352e-09,  1.2107e-09,  3.2596e-09,\n",
       "                       5.8673e-09,  1.2573e-09, -4.3307e-09, -2.8871e-09, -7.4506e-10,\n",
       "                       2.0955e-09,  3.7253e-10,  2.3283e-10,  5.7044e-09, -6.5193e-10,\n",
       "                      -2.3283e-10,  3.2363e-09, -1.9558e-09,  3.3528e-09,  5.8208e-09,\n",
       "                      -1.6764e-09,  3.1665e-09, -5.5879e-10, -2.4214e-09,  1.7229e-09,\n",
       "                       4.1910e-09,  1.4901e-09, -3.6322e-09,  2.7474e-09,  1.3039e-09,\n",
       "                      -1.3970e-09, -1.9558e-09, -1.3039e-09,  9.3132e-10, -3.2596e-10,\n",
       "                       2.0023e-09,  2.2352e-09,  6.5193e-10, -4.6566e-11,  1.3970e-09,\n",
       "                       1.8626e-10, -1.3970e-10, -4.9360e-09, -2.0489e-09,  2.4447e-09,\n",
       "                      -1.6531e-09,  4.1910e-09,  2.0955e-09, -1.3039e-09,  2.8638e-09,\n",
       "                       1.7695e-09,  4.1910e-10,  1.3039e-09,  1.3039e-09, -2.7008e-09,\n",
       "                      -9.5461e-10,  2.8871e-09, -3.9581e-10, -1.0245e-09, -8.8476e-10,\n",
       "                      -1.4436e-09,  1.9558e-09,  1.9325e-09, -1.2806e-09, -2.4214e-09,\n",
       "                       3.5390e-09, -1.2573e-09, -1.3039e-09,  3.2596e-10,  2.6077e-09,\n",
       "                      -4.6566e-11, -9.7789e-10, -3.2131e-09,  2.1886e-09,  9.3132e-10,\n",
       "                      -5.5879e-09,  2.4214e-09,  2.9802e-09,  1.6997e-09, -1.5832e-09,\n",
       "                      -4.1910e-09,  3.9581e-09, -5.6345e-09, -9.3132e-10,  1.0245e-09,\n",
       "                      -2.0489e-09, -3.4925e-10,  4.1910e-10, -3.9581e-10, -3.7951e-09])),\n",
       "             ('linear_relu_stack.BatchNorm_13.running_var',\n",
       "              tensor([0.9555, 0.9663, 1.0135, 0.9600, 0.9620, 1.0259, 1.0703, 0.9699, 1.0004,\n",
       "                      1.0595, 1.0379, 1.0600, 0.9447, 0.9784, 1.0219, 1.0090, 1.0162, 1.0331,\n",
       "                      1.0607, 1.0910, 1.0612, 0.9970, 0.9543, 0.9449, 0.9747, 0.9423, 1.0343,\n",
       "                      1.0701, 0.9970, 0.9851, 1.0309, 1.0318, 1.0155, 1.0071, 1.0316, 1.0159,\n",
       "                      1.0181, 0.9933, 1.0189, 0.9302, 1.0278, 1.0654, 0.9754, 0.9603, 0.9687,\n",
       "                      1.0431, 1.1392, 0.9830, 1.1597, 0.9754, 0.9951, 0.9643, 1.0267, 1.0386,\n",
       "                      0.9660, 1.0034, 1.0486, 0.9429, 1.0457, 1.0051, 1.0213, 1.0100, 1.1977,\n",
       "                      1.0135, 1.0717, 0.9724, 1.0426, 0.9611, 0.9533, 0.9412, 0.9807, 0.9872,\n",
       "                      1.0207, 0.9789, 1.0221, 1.1026, 0.9841, 1.0033, 0.9449, 1.0211, 0.9905,\n",
       "                      0.9477, 1.0062, 0.9728, 1.0069, 0.9822, 1.0489, 1.0074, 0.9927, 1.0238,\n",
       "                      0.9786, 0.9764, 1.1344, 0.9505, 0.9985, 0.9751, 0.9576, 0.9669, 0.9901,\n",
       "                      0.9866])),\n",
       "             ('linear_relu_stack.BatchNorm_13.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_14.weight',\n",
       "              tensor([[ 0.0238, -0.0036,  0.0562,  ..., -0.1491,  0.1667,  0.1384],\n",
       "                      [ 0.1309,  0.0168,  0.0218,  ...,  0.1092,  0.1678,  0.0507],\n",
       "                      [ 0.0609,  0.0793, -0.1038,  ..., -0.0371,  0.0003,  0.0088],\n",
       "                      ...,\n",
       "                      [-0.1368,  0.0942,  0.0655,  ..., -0.0520,  0.0489, -0.1016],\n",
       "                      [-0.0113, -0.0059,  0.1132,  ...,  0.0422, -0.0456, -0.0166],\n",
       "                      [ 0.0296, -0.1320,  0.0477,  ..., -0.1023,  0.1081, -0.1461]])),\n",
       "             ('linear_relu_stack.Linear_14.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_14.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_14.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_14.running_mean',\n",
       "              tensor([-3.2596e-10,  3.0268e-09,  3.9348e-09, -1.1642e-09,  1.0710e-09,\n",
       "                      -1.3970e-09, -2.1304e-09, -4.1910e-10,  2.3283e-11, -9.3132e-11,\n",
       "                      -6.4494e-09, -1.2107e-09, -2.4680e-09,  7.9162e-10, -3.4459e-09,\n",
       "                       5.1223e-09,  7.4506e-10,  3.2596e-10, -7.3574e-09, -1.2107e-09,\n",
       "                       5.1223e-10, -1.1409e-09,  8.3819e-10, -2.0955e-09,  3.2596e-09,\n",
       "                       9.0804e-10,  2.8871e-09, -9.7789e-10, -9.7789e-09, -1.5134e-09,\n",
       "                       9.3132e-11,  2.3283e-11,  3.2131e-09,  4.6566e-11,  2.7940e-10,\n",
       "                       1.7462e-09, -3.0734e-09,  4.0047e-09,  3.7253e-10,  1.2107e-09,\n",
       "                       2.7940e-09,  1.3039e-09,  1.1642e-09,  0.0000e+00,  1.8626e-10,\n",
       "                       2.0955e-09, -3.7253e-10, -3.9116e-09,  3.2596e-09, -2.7940e-10,\n",
       "                       9.3132e-11,  2.0489e-09,  3.4692e-09,  1.1874e-09,  2.6776e-09,\n",
       "                       1.4203e-09, -2.7940e-10, -5.5879e-09,  4.1910e-09,  3.6787e-09,\n",
       "                      -1.1642e-09, -1.0361e-09, -1.5716e-09,  3.7253e-09, -9.3132e-10,\n",
       "                      -4.6566e-10,  3.9116e-09, -3.7951e-09,  7.9162e-10,  8.3819e-10,\n",
       "                       5.1223e-09,  5.3784e-09, -7.9162e-10, -3.3528e-09, -1.2573e-09,\n",
       "                       2.6776e-09, -2.3516e-09, -2.2119e-09, -1.3621e-09,  1.2573e-09,\n",
       "                      -1.1642e-09, -4.0454e-09, -5.5879e-10,  1.2107e-09,  6.5193e-10,\n",
       "                       2.9104e-09,  5.5879e-10,  1.5832e-09, -6.5193e-10,  3.6322e-09,\n",
       "                       9.3132e-11, -1.1642e-09, -2.1420e-09, -4.6566e-10,  5.1223e-10,\n",
       "                       8.8476e-10,  2.3283e-10, -5.8906e-09, -3.0268e-09, -5.1456e-09])),\n",
       "             ('linear_relu_stack.BatchNorm_14.running_var',\n",
       "              tensor([1.0051, 1.0116, 0.9883, 0.9827, 0.9634, 0.9384, 0.9484, 0.9589, 1.0806,\n",
       "                      1.0198, 1.0376, 1.0464, 0.9716, 1.0367, 1.0323, 1.0719, 0.9866, 0.9563,\n",
       "                      1.0603, 1.0194, 0.9639, 0.9680, 1.0520, 0.9682, 0.9631, 0.9816, 1.0311,\n",
       "                      0.9951, 1.1977, 1.0334, 0.9997, 1.0208, 1.0906, 0.9506, 0.9676, 0.9711,\n",
       "                      1.0332, 1.0134, 1.0029, 1.0724, 0.9984, 0.9748, 0.9857, 1.0195, 1.0687,\n",
       "                      0.9717, 0.9848, 1.0516, 0.9947, 1.0387, 0.9465, 0.9855, 1.0484, 1.0435,\n",
       "                      0.9625, 0.9535, 0.9837, 0.9841, 1.0529, 0.9599, 0.9998, 0.9706, 0.9759,\n",
       "                      1.0569, 1.0313, 1.0075, 0.9529, 0.9893, 0.9561, 0.9918, 1.0295, 0.9769,\n",
       "                      0.9838, 0.9613, 0.9520, 0.9664, 0.9975, 0.9761, 0.9960, 0.9932, 1.0392,\n",
       "                      0.9519, 1.0350, 0.9443, 1.0211, 0.9684, 1.0624, 1.0384, 0.9464, 0.9684,\n",
       "                      0.9447, 0.9610, 1.0712, 1.0064, 0.9444, 0.9659, 0.9496, 1.0025, 0.9782,\n",
       "                      0.9954])),\n",
       "             ('linear_relu_stack.BatchNorm_14.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_15.weight',\n",
       "              tensor([[ 0.0431, -0.1299,  0.0231,  ..., -0.1522,  0.0892,  0.1610],\n",
       "                      [ 0.0545, -0.0622,  0.1650,  ..., -0.1088, -0.1711,  0.0519],\n",
       "                      [ 0.0717,  0.0651,  0.1044,  ...,  0.1726,  0.0112, -0.0429],\n",
       "                      ...,\n",
       "                      [ 0.0604, -0.0371,  0.1000,  ..., -0.0298,  0.1610, -0.0143],\n",
       "                      [-0.0167, -0.1359, -0.0573,  ..., -0.0110, -0.0441, -0.0225],\n",
       "                      [ 0.0597,  0.1105,  0.0196,  ...,  0.1493,  0.1341, -0.1458]])),\n",
       "             ('linear_relu_stack.Linear_15.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_15.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_15.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_15.running_mean',\n",
       "              tensor([ 4.4820e-09,  2.7008e-09, -4.8429e-09, -1.1642e-09, -7.4506e-10,\n",
       "                      -1.9092e-09,  2.7940e-09, -3.3528e-09, -2.0023e-09, -2.7241e-09,\n",
       "                       5.5879e-10, -4.1910e-10,  2.6659e-09, -3.1665e-09, -3.2596e-09,\n",
       "                      -1.0245e-09, -4.8138e-09,  1.5367e-09,  1.1059e-09,  4.6566e-10,\n",
       "                      -3.2596e-09,  1.8626e-09, -5.1223e-10,  8.3819e-10, -3.0268e-09,\n",
       "                      -8.3819e-10, -2.0023e-09, -2.7940e-09,  5.2154e-09, -1.6764e-09,\n",
       "                       2.3283e-09,  3.8184e-09, -1.0710e-09, -5.6811e-09, -2.6776e-09,\n",
       "                       1.3039e-09,  3.2131e-09,  2.6397e-09,  1.6298e-09,  1.3039e-09,\n",
       "                       0.0000e+00, -3.3528e-09,  6.9384e-09, -2.5611e-09,  4.1910e-10,\n",
       "                      -5.5414e-09,  5.7276e-09,  1.8626e-10, -3.4925e-09,  2.3982e-09,\n",
       "                      -2.2352e-09, -3.1956e-09,  2.0489e-09, -1.3039e-09, -4.4703e-09,\n",
       "                      -3.1665e-09, -1.5367e-09,  5.1223e-10,  1.4436e-09,  3.2596e-09,\n",
       "                       9.3132e-10, -1.5832e-09,  6.6124e-09,  3.1665e-09, -6.3330e-09,\n",
       "                      -6.7987e-09,  3.5390e-09,  0.0000e+00, -1.4668e-09,  3.6787e-09,\n",
       "                       3.6787e-09,  4.1910e-10,  8.1491e-10,  5.2969e-10, -4.6566e-10,\n",
       "                      -3.8301e-09,  9.7789e-10,  2.9337e-09, -5.0291e-09, -3.1199e-09,\n",
       "                       2.0023e-09,  5.8208e-09, -1.9092e-09,  4.6566e-09, -2.6543e-09,\n",
       "                      -1.2107e-09,  3.6787e-09,  9.3132e-10,  1.5832e-09, -6.4028e-11,\n",
       "                       1.2107e-09, -1.6298e-09, -4.9826e-09,  3.8184e-09, -4.0047e-09,\n",
       "                      -6.9849e-10, -5.5879e-10, -2.7940e-10, -1.4901e-09,  1.4436e-09])),\n",
       "             ('linear_relu_stack.BatchNorm_15.running_var',\n",
       "              tensor([0.9848, 1.0763, 1.0052, 0.9779, 0.9430, 1.0755, 1.1771, 1.0451, 0.9517,\n",
       "                      0.9524, 1.0102, 1.0019, 0.9537, 1.0406, 0.9940, 1.0094, 1.0378, 1.0126,\n",
       "                      1.0095, 1.0002, 1.0265, 0.9921, 0.9724, 1.0079, 0.9587, 1.0198, 0.9682,\n",
       "                      0.9849, 1.0492, 1.0240, 1.0697, 0.9859, 0.9760, 1.0338, 0.9371, 1.0060,\n",
       "                      0.9713, 0.9998, 1.0182, 0.9846, 0.9734, 1.0555, 1.0268, 1.0428, 1.0032,\n",
       "                      1.0058, 1.0084, 0.9613, 0.9943, 1.0284, 0.9922, 0.9788, 0.9445, 0.9913,\n",
       "                      1.0636, 1.0765, 1.0354, 0.9847, 1.0182, 1.0219, 1.1194, 1.0811, 0.9778,\n",
       "                      1.0495, 1.0144, 1.0198, 0.9379, 0.9965, 0.9918, 1.0673, 1.0033, 0.9876,\n",
       "                      1.0738, 0.9438, 0.9598, 1.0376, 1.0070, 1.0908, 1.0127, 0.9651, 0.9865,\n",
       "                      1.0315, 1.0226, 0.9997, 0.9530, 0.9857, 1.0190, 1.0168, 1.0969, 0.9738,\n",
       "                      1.0558, 0.9551, 0.9691, 1.0447, 1.0156, 0.9810, 0.9922, 0.9486, 0.9981,\n",
       "                      0.9823])),\n",
       "             ('linear_relu_stack.BatchNorm_15.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_16.weight',\n",
       "              tensor([[ 0.1363,  0.0766,  0.0996,  ..., -0.1530,  0.1641,  0.0502],\n",
       "                      [-0.1078, -0.1445,  0.0083,  ...,  0.1722, -0.1502,  0.1556],\n",
       "                      [ 0.1235,  0.0710,  0.0529,  ...,  0.0055, -0.0710,  0.0475],\n",
       "                      ...,\n",
       "                      [ 0.1020, -0.1510,  0.0443,  ..., -0.0546, -0.1130, -0.1720],\n",
       "                      [-0.0316,  0.1316,  0.1501,  ..., -0.0151, -0.0102,  0.1186],\n",
       "                      [-0.1173, -0.0155, -0.1331,  ..., -0.1422, -0.1278,  0.0789]])),\n",
       "             ('linear_relu_stack.Linear_16.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_16.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_16.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_16.running_mean',\n",
       "              tensor([ 2.1886e-09, -1.8626e-10,  4.3772e-09,  5.1223e-10,  1.4901e-09,\n",
       "                      -2.4680e-09,  1.6764e-09, -2.1886e-09, -3.6089e-09, -1.4436e-09,\n",
       "                      -1.5832e-09,  2.7474e-09,  4.6566e-11,  4.8196e-09, -4.6566e-10,\n",
       "                      -1.3504e-09,  1.4436e-09,  1.1642e-09, -6.9849e-10, -7.9162e-10,\n",
       "                      -3.3062e-09, -6.5193e-10,  0.0000e+00,  3.2131e-09,  7.9162e-10,\n",
       "                      -1.5832e-09, -2.2352e-09, -1.2573e-09,  2.3283e-09, -7.6834e-10,\n",
       "                      -1.5600e-09, -8.9640e-10,  2.0489e-09, -1.5367e-09, -2.4331e-09,\n",
       "                      -1.2107e-09,  3.1665e-09,  4.9593e-09, -7.9162e-10,  4.0978e-09,\n",
       "                      -2.7940e-09,  8.8476e-10,  4.6566e-10,  3.5390e-09,  5.7044e-09,\n",
       "                      -7.4506e-10,  2.3283e-10,  3.2596e-09, -2.0606e-09,  1.8394e-09,\n",
       "                       3.3528e-09, -3.3528e-09, -4.0047e-09,  1.0477e-09, -2.8871e-09,\n",
       "                       4.0047e-09, -1.8161e-09,  7.4506e-10,  2.4680e-09, -1.5600e-09,\n",
       "                       1.9325e-09,  2.6543e-09,  1.0245e-09,  1.7695e-09, -1.3970e-10,\n",
       "                       4.7963e-09, -4.0978e-09, -5.1456e-09, -8.8476e-10, -1.3970e-09,\n",
       "                      -3.0734e-09, -1.9558e-09, -2.4796e-09, -3.4459e-09, -1.2689e-09,\n",
       "                      -1.6764e-09, -1.2107e-09, -2.6077e-09, -1.2107e-09,  1.1642e-09,\n",
       "                      -2.9104e-09,  3.8650e-09, -1.3039e-09,  4.8894e-09, -7.4506e-09,\n",
       "                       6.5193e-10, -1.8161e-09, -9.0804e-10,  1.6065e-09, -1.5367e-09,\n",
       "                       3.7719e-09, -2.8987e-09,  4.6566e-11,  1.5367e-09,  1.8626e-09,\n",
       "                       1.6298e-09,  1.3039e-09,  6.9849e-10,  6.0536e-10,  1.2107e-09])),\n",
       "             ('linear_relu_stack.BatchNorm_16.running_var',\n",
       "              tensor([0.9544, 1.0637, 1.0004, 0.9835, 0.9970, 0.9853, 0.9869, 0.9749, 0.9627,\n",
       "                      1.0067, 0.9607, 0.9785, 0.9734, 1.0062, 0.9695, 0.9726, 1.0389, 0.9805,\n",
       "                      0.9940, 0.9956, 0.9699, 1.0392, 0.9833, 0.9586, 1.0107, 0.9873, 0.9427,\n",
       "                      0.9591, 1.0211, 0.9486, 1.0003, 0.9672, 0.9878, 0.9809, 0.9908, 1.0656,\n",
       "                      1.0145, 0.9893, 0.9887, 1.0217, 0.9782, 1.0141, 1.0137, 0.9816, 0.9936,\n",
       "                      0.9577, 0.9458, 1.0141, 0.9629, 0.9940, 1.0098, 0.9976, 0.9960, 0.9374,\n",
       "                      1.0007, 0.9994, 0.9569, 1.0925, 1.0060, 1.0833, 1.0160, 1.1177, 0.9837,\n",
       "                      1.0310, 0.9985, 1.0053, 1.1700, 0.9777, 0.9622, 1.1170, 1.0047, 1.0471,\n",
       "                      0.9422, 1.0735, 0.9858, 1.0243, 0.9696, 1.0342, 1.0205, 0.9840, 0.9494,\n",
       "                      0.9640, 1.1118, 0.9937, 1.0675, 0.9532, 0.9708, 1.0141, 0.9477, 1.0085,\n",
       "                      1.0438, 0.9762, 1.0111, 0.9692, 0.9342, 0.9698, 0.9964, 0.9822, 0.9655,\n",
       "                      1.0372])),\n",
       "             ('linear_relu_stack.BatchNorm_16.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_17.weight',\n",
       "              tensor([[-7.6886e-02, -1.2565e-01,  1.1198e-01,  ...,  8.3407e-02,\n",
       "                        9.9985e-02,  6.2679e-02],\n",
       "                      [-6.5078e-02, -9.0972e-02,  1.0812e-01,  ...,  1.2255e-02,\n",
       "                       -1.5618e-01,  8.1318e-02],\n",
       "                      [ 1.5858e-01,  5.4279e-02,  5.4885e-02,  ..., -1.6598e-01,\n",
       "                       -1.4472e-01, -1.3101e-01],\n",
       "                      ...,\n",
       "                      [ 9.7081e-05, -5.3473e-02,  1.6856e-01,  ...,  4.3978e-03,\n",
       "                        1.5009e-01,  1.4474e-01],\n",
       "                      [-6.7867e-02,  1.4519e-01, -1.3906e-01,  ..., -2.4427e-03,\n",
       "                        1.0049e-01, -3.5626e-03],\n",
       "                      [ 1.0802e-02,  4.4998e-03, -1.1678e-01,  ...,  4.5002e-02,\n",
       "                       -1.5046e-01,  1.2666e-01]])),\n",
       "             ('linear_relu_stack.Linear_17.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_17.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_17.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_17.running_mean',\n",
       "              tensor([ 1.3970e-10, -2.9337e-09,  4.5635e-09, -1.1176e-09,  2.2352e-09,\n",
       "                      -4.8429e-09, -4.4238e-09, -2.5611e-09,  4.4238e-10, -5.5879e-10,\n",
       "                      -1.2340e-09,  8.8476e-10, -2.7008e-09,  8.3819e-10, -3.6322e-09,\n",
       "                      -4.6566e-11,  1.7229e-09, -9.3132e-11, -7.4506e-10, -5.1223e-10,\n",
       "                       3.8184e-09, -1.8626e-10, -3.7369e-09, -3.7253e-09,  2.5379e-09,\n",
       "                       2.7940e-10, -3.7253e-10,  1.9092e-09, -9.3132e-10, -9.3132e-10,\n",
       "                       3.1665e-09, -5.2154e-09, -1.6298e-09, -2.0256e-09, -3.6787e-09,\n",
       "                      -7.4506e-10,  2.9802e-09, -5.0990e-09,  2.4447e-09, -1.4552e-09,\n",
       "                       2.3283e-11,  2.0489e-09,  5.8208e-10,  1.6764e-09,  3.4459e-09,\n",
       "                       3.1665e-09,  6.5193e-10,  9.0804e-10, -3.3993e-09,  2.0489e-09,\n",
       "                       1.6298e-09, -1.3970e-09, -2.7940e-10, -1.3271e-09,  2.3283e-10,\n",
       "                      -1.3970e-10, -7.4506e-10,  2.3283e-10, -1.5367e-09, -1.5832e-09,\n",
       "                       2.3283e-09, -1.1176e-09,  2.6776e-09, -6.0536e-10, -3.8883e-09,\n",
       "                       2.0489e-09,  1.1176e-09,  2.3283e-09, -1.7986e-09,  2.8638e-09,\n",
       "                       5.5879e-10, -3.0035e-09,  3.4692e-09, -2.6077e-09,  4.6566e-10,\n",
       "                       7.5670e-11,  1.8626e-10,  1.2107e-09, -1.6298e-09, -1.9791e-10,\n",
       "                      -1.8859e-09,  2.0489e-09, -1.6647e-09, -3.2596e-10,  2.1886e-09,\n",
       "                       2.0955e-09, -9.3132e-10,  2.0489e-09, -4.3772e-09, -2.3283e-10,\n",
       "                      -5.1688e-09, -1.0477e-09,  4.6566e-10,  3.4459e-09, -2.2817e-09,\n",
       "                       2.6543e-09, -8.8476e-10,  2.3283e-09, -4.6566e-10,  1.1176e-09])),\n",
       "             ('linear_relu_stack.BatchNorm_17.running_var',\n",
       "              tensor([0.9540, 1.0134, 0.9826, 1.0665, 0.9866, 1.0172, 1.0247, 1.0345, 1.0000,\n",
       "                      1.0042, 0.9606, 1.0563, 0.9422, 0.9887, 1.0356, 1.0115, 0.9854, 1.0311,\n",
       "                      1.0176, 1.0101, 0.9700, 0.9793, 1.0470, 1.0326, 0.9334, 0.9889, 0.9673,\n",
       "                      0.9613, 0.9809, 0.9847, 1.0314, 1.1520, 1.0639, 0.9269, 1.0148, 0.9351,\n",
       "                      0.9854, 0.9769, 1.0043, 1.0364, 0.9534, 0.9841, 1.0139, 0.9941, 1.0325,\n",
       "                      0.9630, 0.9740, 0.9462, 1.0561, 1.2116, 1.0126, 1.1578, 0.9413, 0.9767,\n",
       "                      1.0206, 0.9534, 0.9979, 0.9987, 0.9961, 1.0213, 0.9311, 1.0380, 1.0376,\n",
       "                      0.9776, 0.9631, 0.9975, 0.9471, 0.9932, 0.9356, 0.9638, 1.0196, 0.9364,\n",
       "                      1.0209, 0.9721, 0.9991, 0.9477, 1.0086, 1.1058, 0.9824, 0.9694, 0.9613,\n",
       "                      1.0228, 0.9421, 1.0201, 1.0062, 0.9997, 0.9665, 1.0009, 1.0225, 0.9689,\n",
       "                      1.0194, 1.0119, 0.9888, 1.0233, 0.9849, 0.9886, 0.9863, 1.0042, 0.9550,\n",
       "                      0.9786])),\n",
       "             ('linear_relu_stack.BatchNorm_17.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Linear_18.weight',\n",
       "              tensor([[-0.0433, -0.1137, -0.1334,  ...,  0.0076, -0.1022, -0.0376],\n",
       "                      [ 0.0692,  0.1348, -0.0371,  ...,  0.0652, -0.0069,  0.0349],\n",
       "                      [ 0.0840, -0.1640,  0.0412,  ...,  0.1608,  0.1665,  0.1497],\n",
       "                      ...,\n",
       "                      [-0.1213, -0.0101,  0.0729,  ..., -0.1286,  0.0665,  0.1694],\n",
       "                      [-0.1396,  0.0628,  0.0548,  ..., -0.1289, -0.0450,  0.0892],\n",
       "                      [-0.0233,  0.0594, -0.0765,  ..., -0.1053, -0.0889,  0.1138]])),\n",
       "             ('linear_relu_stack.Linear_18.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_18.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('linear_relu_stack.BatchNorm_18.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0.])),\n",
       "             ('linear_relu_stack.BatchNorm_18.running_mean',\n",
       "              tensor([-1.6298e-09, -3.7253e-09, -2.1420e-09,  4.2055e-09,  3.3528e-09,\n",
       "                      -6.9849e-10,  2.3749e-09, -1.0245e-09, -5.5879e-10, -3.1665e-09,\n",
       "                       1.9325e-09, -4.5693e-09,  3.2596e-10,  2.7008e-09, -5.0291e-09,\n",
       "                      -3.9116e-09, -5.1223e-10,  1.8626e-10,  3.7253e-10,  1.1176e-09,\n",
       "                       4.6566e-10,  2.8405e-09,  1.3970e-10, -2.1188e-09, -1.4901e-09,\n",
       "                      -6.7521e-10, -1.1642e-10,  7.1712e-09,  1.0245e-09,  1.4901e-09,\n",
       "                      -1.6997e-09, -5.3551e-10,  4.0047e-09,  1.2107e-09, -5.5879e-10,\n",
       "                       2.7940e-09,  5.2154e-09,  1.1176e-09,  3.1199e-09,  9.3132e-11,\n",
       "                      -5.9139e-09,  1.0536e-09,  3.6322e-09, -1.4901e-09,  1.5832e-09,\n",
       "                       2.7008e-09,  6.0536e-09, -3.7253e-10,  1.1176e-09, -2.2817e-09,\n",
       "                      -1.8626e-10, -3.2596e-10,  1.8626e-10,  3.5390e-09,  1.5367e-09,\n",
       "                      -1.3970e-09, -3.0268e-10,  2.3283e-11,  3.5390e-09, -1.3271e-09,\n",
       "                       1.3039e-09, -1.8161e-09, -2.0955e-10,  4.3307e-09, -4.6566e-11,\n",
       "                      -4.2841e-09, -3.9116e-09,  6.0536e-10, -3.2131e-09, -1.5832e-09,\n",
       "                       1.5832e-09,  6.3796e-09,  2.0489e-09,  6.5193e-10,  6.9849e-11,\n",
       "                      -2.2352e-09, -1.3970e-10, -1.8161e-09, -9.0804e-10, -3.3062e-09,\n",
       "                       1.7229e-09,  1.3970e-09,  1.0710e-09,  5.5879e-10,  3.8650e-09,\n",
       "                      -2.7940e-10,  1.5367e-09, -2.9802e-09,  3.2596e-09, -1.0710e-09,\n",
       "                      -5.8208e-09,  1.3970e-09, -8.7020e-10,  1.5367e-09, -5.7742e-09,\n",
       "                       1.3970e-10,  9.3132e-11,  8.3819e-10, -1.6298e-09,  0.0000e+00])),\n",
       "             ('linear_relu_stack.BatchNorm_18.running_var',\n",
       "              tensor([1.0148, 1.0103, 1.0424, 1.0261, 1.0756, 0.9839, 1.0392, 1.0313, 1.0544,\n",
       "                      0.9874, 0.9765, 1.0558, 1.0618, 0.9825, 1.0055, 1.0284, 0.9620, 0.9798,\n",
       "                      0.9759, 1.0263, 1.0318, 1.0900, 0.9851, 0.9399, 1.0024, 0.9479, 0.9322,\n",
       "                      1.0349, 1.0036, 0.9654, 1.0154, 0.9599, 0.9695, 0.9980, 0.9646, 1.0642,\n",
       "                      1.1530, 0.9800, 0.9747, 1.0178, 0.9646, 0.9555, 0.9778, 0.9949, 0.9895,\n",
       "                      1.0232, 1.0153, 0.9691, 0.9971, 1.0123, 1.0403, 1.0234, 1.0022, 1.0420,\n",
       "                      1.0411, 0.9992, 0.9894, 0.9461, 0.9870, 0.9998, 0.9325, 1.0082, 0.9447,\n",
       "                      1.0209, 0.9849, 0.9984, 1.0238, 0.9648, 1.0065, 0.9913, 1.0490, 1.0209,\n",
       "                      1.0918, 0.9598, 0.9812, 0.9676, 0.9890, 0.9380, 0.9785, 0.9568, 0.9429,\n",
       "                      0.9841, 1.0041, 1.0726, 1.0065, 1.0560, 0.9825, 1.0325, 1.0237, 0.9635,\n",
       "                      1.0329, 1.0260, 0.9811, 1.0269, 1.0854, 0.9681, 0.9650, 1.0651, 0.9740,\n",
       "                      1.0242])),\n",
       "             ('linear_relu_stack.BatchNorm_18.num_batches_tracked', tensor(1)),\n",
       "             ('linear_relu_stack.Output Layer.weight',\n",
       "              tensor([[-7.0464e-02,  1.8107e-01,  1.0263e-01,  1.2835e-01,  1.2394e-01,\n",
       "                        1.0855e-01,  1.7532e-01, -4.7168e-02,  1.3738e-02,  1.7734e-01,\n",
       "                        1.3643e-01, -3.4285e-02, -1.5049e-01,  1.2232e-01, -1.1632e-01,\n",
       "                       -1.7124e-01, -6.7331e-02, -7.9594e-02, -2.3086e-01,  7.5842e-03,\n",
       "                        1.8862e-02, -6.1181e-02, -2.7370e-02,  1.9786e-01,  2.2123e-01,\n",
       "                        5.3454e-02,  2.4386e-02, -1.3154e-03, -9.5637e-02,  2.9482e-03,\n",
       "                       -4.3452e-02, -1.7024e-01,  1.9103e-01, -1.2461e-01, -7.2623e-02,\n",
       "                       -1.3712e-01, -7.6445e-02,  1.4769e-01, -5.4121e-02,  5.0194e-03,\n",
       "                       -7.0794e-02,  1.6309e-01, -1.1059e-02, -1.3205e-02, -4.1052e-02,\n",
       "                       -1.2604e-01,  7.5440e-02,  2.0293e-01, -2.1878e-01,  2.2190e-02,\n",
       "                        1.3840e-01,  5.1123e-02,  4.3509e-03,  1.4125e-01,  2.1271e-01,\n",
       "                        2.1771e-01,  1.6054e-01, -9.9772e-02, -2.7618e-02, -1.7691e-01,\n",
       "                       -8.8252e-02,  1.7993e-01, -1.4398e-01, -2.0612e-01,  5.0896e-02,\n",
       "                        1.5003e-01,  3.3233e-02,  5.7697e-02,  2.2121e-01,  1.0238e-01,\n",
       "                       -1.1347e-01,  4.6756e-02,  1.1538e-01, -1.4747e-01, -1.2227e-01,\n",
       "                        4.2886e-03,  1.6159e-01, -1.1637e-01,  1.3795e-01,  1.9709e-01,\n",
       "                        2.1917e-02, -1.5058e-01, -1.4605e-01,  1.6027e-01, -7.8645e-02,\n",
       "                       -4.5465e-02, -1.8664e-01,  2.1257e-01, -7.8754e-02,  2.2551e-01,\n",
       "                       -1.0204e-01, -9.0863e-02, -8.8555e-02,  6.6714e-02,  8.5255e-02,\n",
       "                        3.1744e-02, -1.6355e-01,  1.7085e-01,  1.8079e-01,  9.5386e-02],\n",
       "                      [-6.8220e-03, -1.8572e-01,  1.4682e-01,  7.1791e-02, -2.6309e-02,\n",
       "                        1.4174e-01,  6.4174e-02,  2.9529e-02,  2.1505e-01,  1.9860e-02,\n",
       "                       -2.0671e-01,  1.1284e-01, -2.0349e-01,  1.8708e-01,  5.5397e-02,\n",
       "                       -7.1620e-02, -8.8150e-03, -1.7185e-01, -1.3091e-01,  1.5361e-01,\n",
       "                        8.9920e-03, -7.9605e-02, -9.0913e-02,  4.2845e-02,  1.8959e-01,\n",
       "                       -2.2753e-01, -1.6559e-01,  1.9836e-01, -6.0328e-02, -3.6501e-02,\n",
       "                        1.4263e-02,  1.2072e-01,  8.0830e-02,  1.7745e-01, -2.3337e-01,\n",
       "                        2.2933e-01, -1.5882e-01, -3.7249e-02, -1.0241e-02, -4.8027e-02,\n",
       "                       -2.3930e-02,  2.6159e-02,  9.9967e-02,  2.3927e-02,  2.2133e-01,\n",
       "                        1.8126e-01, -1.3585e-01,  1.3499e-01, -1.3756e-01,  7.3407e-02,\n",
       "                       -1.8186e-01,  1.9886e-01,  9.5445e-02, -7.8777e-02, -8.2271e-02,\n",
       "                        1.3798e-01, -1.8548e-03, -2.2711e-01,  1.8394e-01, -4.3676e-02,\n",
       "                        2.1233e-01,  9.6310e-02,  1.0292e-01, -6.0033e-02,  1.7325e-01,\n",
       "                        7.7857e-02, -1.8513e-01,  1.8074e-01,  1.3567e-02, -3.8683e-02,\n",
       "                        1.3324e-01,  9.5919e-02,  7.8169e-02,  2.7740e-02, -1.5353e-02,\n",
       "                       -6.3320e-02,  1.7695e-01, -1.4050e-01, -2.1840e-02, -4.6501e-04,\n",
       "                       -2.2042e-01,  1.2973e-01,  2.3022e-01, -1.4499e-02,  1.9908e-01,\n",
       "                       -1.6437e-01,  8.8741e-02, -7.6638e-02, -6.6491e-03, -2.1610e-01,\n",
       "                       -6.2380e-02,  1.3285e-01,  8.3274e-02,  1.0608e-01, -1.9123e-01,\n",
       "                        5.8163e-02,  8.1985e-03, -2.7372e-02,  2.8502e-02, -2.1984e-01],\n",
       "                      [-1.5868e-02,  8.1704e-02, -6.1981e-02, -1.8635e-02, -2.3122e-01,\n",
       "                        1.0845e-01, -1.6176e-01, -4.3035e-03, -1.3644e-01,  6.9048e-02,\n",
       "                        8.0027e-02, -1.9947e-01,  2.0508e-01, -4.4523e-02, -7.5552e-02,\n",
       "                        1.7888e-01,  2.3319e-01,  4.0893e-02, -7.0025e-02, -1.3721e-01,\n",
       "                       -1.5323e-01, -4.3916e-02, -7.7812e-02,  1.2856e-01, -1.3195e-01,\n",
       "                       -2.2906e-02,  3.4350e-02,  1.5315e-01, -1.3036e-01, -2.2015e-02,\n",
       "                        1.0353e-01,  1.4266e-01, -2.2229e-01, -5.8930e-03,  1.2146e-01,\n",
       "                       -7.3513e-02, -1.8068e-01,  1.4907e-01, -1.5696e-01, -1.1208e-01,\n",
       "                       -1.4285e-01,  1.7993e-01,  1.7582e-01, -1.1880e-01, -8.4184e-02,\n",
       "                        1.6878e-01, -2.5173e-02,  2.1955e-02, -2.1241e-01,  9.4357e-02,\n",
       "                        3.4050e-02, -1.3973e-01, -1.3101e-02, -1.9807e-01,  4.9015e-02,\n",
       "                       -7.5812e-02,  4.6933e-02, -1.9549e-02,  1.0416e-01, -1.0329e-01,\n",
       "                        2.1724e-01, -2.0398e-01, -1.7822e-01, -7.8001e-02, -1.0523e-01,\n",
       "                        1.4769e-01, -1.4181e-01, -1.0216e-01,  6.7919e-02, -9.2064e-02,\n",
       "                        1.0582e-01,  4.2633e-02, -1.5202e-02, -5.8083e-02, -2.1884e-01,\n",
       "                       -2.1871e-01,  4.1609e-03,  8.0636e-03,  1.1767e-02, -1.7948e-01,\n",
       "                        1.1053e-01, -1.4355e-01,  1.1917e-01,  4.4953e-02,  2.3059e-01,\n",
       "                       -8.9695e-02,  1.1805e-01,  1.3768e-01,  1.1343e-02, -1.2957e-01,\n",
       "                       -1.6396e-03, -4.2653e-02, -1.1632e-01, -1.2826e-01,  1.4834e-01,\n",
       "                        2.2271e-02, -2.1785e-01, -1.6160e-01,  6.8905e-02, -8.0099e-02],\n",
       "                      [ 7.5835e-02,  1.8150e-01,  1.2813e-01,  6.1593e-02, -1.8598e-01,\n",
       "                        1.8997e-03, -1.4021e-02, -1.4095e-01, -9.4502e-02,  1.8233e-01,\n",
       "                        1.6872e-01, -1.6781e-01, -1.5540e-01,  1.8875e-01, -1.0698e-01,\n",
       "                        2.2120e-01, -1.1243e-01, -1.1294e-01, -2.1215e-01, -1.3111e-01,\n",
       "                       -1.7700e-02,  9.5467e-02, -2.2693e-01,  2.0760e-01,  2.1525e-01,\n",
       "                       -2.3263e-01,  1.2054e-01, -1.2242e-01,  5.5131e-02, -1.8435e-02,\n",
       "                       -1.8062e-01,  2.0759e-01, -1.6742e-01, -1.7447e-01,  3.8626e-02,\n",
       "                        8.7440e-02, -2.0010e-01,  2.0732e-01,  1.8694e-01,  4.7021e-02,\n",
       "                       -2.2485e-01,  2.0243e-01,  3.4484e-02,  1.1298e-01, -1.1725e-01,\n",
       "                        1.7442e-01,  8.1063e-02, -3.4327e-02,  6.7302e-02, -1.4862e-01,\n",
       "                        5.0213e-02,  1.5893e-01, -1.8097e-01,  1.7898e-01,  9.2851e-02,\n",
       "                       -3.7874e-02, -3.8016e-02,  2.7065e-02, -1.8516e-01,  1.2289e-01,\n",
       "                       -1.0764e-01,  6.9722e-02,  2.2775e-02,  1.7369e-01, -9.8836e-02,\n",
       "                       -7.3014e-02, -2.0103e-01,  1.8557e-01, -7.2303e-02, -2.1121e-01,\n",
       "                        7.1846e-03, -2.2420e-02,  1.9733e-01, -1.0557e-01,  1.5861e-01,\n",
       "                       -1.3457e-01, -7.2146e-02,  2.2266e-01,  2.0613e-01,  2.1236e-01,\n",
       "                       -2.1018e-01,  1.7748e-01,  9.5490e-02, -1.9949e-01, -3.6390e-02,\n",
       "                       -1.3188e-02, -6.6906e-02, -7.2804e-02,  1.0955e-01,  2.0205e-01,\n",
       "                       -6.5830e-02, -1.9880e-01,  1.3946e-01,  1.2704e-01, -4.9750e-02,\n",
       "                        7.5978e-02, -6.2128e-02, -1.9736e-01, -8.3989e-02,  2.2944e-01],\n",
       "                      [ 1.7554e-01, -1.8733e-01, -9.1720e-02,  1.2899e-01,  8.0162e-02,\n",
       "                       -1.5600e-01,  2.2753e-01,  3.9851e-02,  1.6288e-01,  4.3873e-02,\n",
       "                        1.7851e-01,  1.9392e-01,  2.2973e-01, -2.1382e-02,  1.4151e-01,\n",
       "                        1.6928e-01, -2.2118e-01, -2.2804e-01,  1.6671e-01,  1.3540e-01,\n",
       "                        1.4753e-01, -1.3284e-01, -2.1293e-01,  1.8546e-01, -1.8243e-01,\n",
       "                        5.9344e-02,  1.4160e-01, -6.3284e-02,  1.5269e-01, -7.3856e-02,\n",
       "                       -2.2328e-01, -6.8924e-02,  5.8558e-02, -1.9027e-01, -1.5182e-01,\n",
       "                        1.8180e-01, -4.1839e-02,  1.1632e-01,  1.4015e-01, -2.0552e-01,\n",
       "                       -1.0753e-01, -2.7274e-02,  1.7155e-01, -7.7609e-02,  2.0551e-01,\n",
       "                       -2.0481e-02,  1.9376e-01, -2.2729e-01,  1.9307e-01, -8.2543e-02,\n",
       "                        1.3084e-01,  1.1561e-01, -2.2129e-01, -2.1702e-01, -2.2797e-01,\n",
       "                        1.2170e-02, -1.0653e-01,  1.2066e-01, -9.0216e-02,  1.4935e-01,\n",
       "                       -1.7108e-01,  7.3333e-02, -8.0782e-04, -2.8080e-03, -9.6700e-02,\n",
       "                       -1.6741e-01, -2.0430e-02, -1.9932e-01, -1.0697e-01,  9.3533e-02,\n",
       "                        3.1383e-02, -1.7703e-01, -8.9320e-02, -7.5846e-02,  6.8442e-02,\n",
       "                        2.0917e-01,  3.7710e-02, -3.4189e-02,  1.9385e-01,  1.2239e-02,\n",
       "                       -2.1949e-02, -1.3215e-01, -1.2956e-01,  1.6138e-01, -8.0027e-02,\n",
       "                       -9.8260e-02, -1.3573e-02, -5.9369e-02, -1.4442e-01, -2.2745e-01,\n",
       "                       -2.2566e-01, -1.4604e-01, -1.8716e-01,  3.6052e-02, -1.9860e-02,\n",
       "                       -2.1233e-01, -2.1480e-01,  1.9541e-01, -9.3927e-03,  1.1612e-01],\n",
       "                      [-1.9168e-01, -1.7151e-01, -1.8339e-02,  1.9420e-01,  1.1829e-01,\n",
       "                       -2.1083e-02,  1.0920e-02,  1.8701e-01,  1.1996e-01, -2.2708e-01,\n",
       "                        1.4374e-01,  9.8709e-02, -3.9926e-02, -1.8713e-02,  1.1629e-01,\n",
       "                       -1.7974e-02,  1.6622e-01, -2.1662e-01, -1.0067e-01,  9.4313e-02,\n",
       "                       -1.9623e-02, -1.6561e-01, -5.2555e-02, -6.0965e-02, -1.1641e-01,\n",
       "                       -1.9013e-01,  2.2544e-01,  8.3603e-02, -1.3416e-01,  5.0110e-02,\n",
       "                        1.3131e-01, -3.0865e-02, -4.2710e-02,  1.3041e-02, -1.9006e-01,\n",
       "                        2.0014e-02, -6.7893e-02,  2.1088e-01, -6.4727e-02, -2.0908e-01,\n",
       "                       -7.4438e-03, -2.3288e-01,  1.1429e-02, -6.2093e-02, -1.8397e-01,\n",
       "                       -1.4670e-01, -6.9654e-02, -9.7326e-02, -6.8638e-02, -1.8885e-01,\n",
       "                        8.4330e-02,  1.8685e-01, -7.1404e-03,  1.0355e-01, -1.1432e-01,\n",
       "                       -1.5000e-02,  1.4103e-01, -2.1088e-02, -1.8920e-01, -1.1947e-01,\n",
       "                        2.3206e-01, -1.5668e-01,  1.0364e-01, -1.8209e-01,  1.0223e-01,\n",
       "                       -2.2617e-02, -1.0468e-01,  9.1408e-03,  2.3158e-02, -8.1436e-02,\n",
       "                        1.4858e-01, -1.6191e-01,  1.8523e-01,  1.0846e-01,  1.0742e-01,\n",
       "                       -3.0544e-02,  2.0262e-01,  1.6176e-02, -1.7630e-01, -1.3137e-01,\n",
       "                       -9.7258e-02, -1.6453e-01, -8.9534e-02, -1.5129e-01,  3.9461e-02,\n",
       "                        2.5591e-02, -1.4894e-01,  2.0735e-01,  1.8073e-01, -6.0902e-02,\n",
       "                       -1.3861e-01,  1.9385e-01,  4.3368e-02,  1.0431e-01, -1.6059e-01,\n",
       "                       -1.5499e-01, -1.0405e-01, -1.2766e-01,  9.9627e-02,  1.0121e-01],\n",
       "                      [-2.2841e-01, -1.1947e-01,  1.4285e-01, -9.1144e-02, -1.5300e-01,\n",
       "                        2.7674e-02, -8.5998e-02, -1.2779e-01, -3.8071e-02, -2.3179e-01,\n",
       "                        2.3198e-01,  3.3885e-02,  1.3470e-01, -7.8390e-02, -2.0889e-01,\n",
       "                        1.0019e-01, -2.1892e-01, -1.1323e-01,  7.1397e-02,  1.1673e-01,\n",
       "                        1.2418e-01, -2.0930e-01,  1.8542e-01,  1.2841e-02, -1.3697e-01,\n",
       "                        7.0018e-02, -2.0748e-01, -6.0361e-02,  7.6648e-03,  4.3816e-02,\n",
       "                        7.4710e-02,  4.6951e-02, -1.4753e-01,  2.2342e-02,  2.4300e-02,\n",
       "                       -1.1676e-01, -5.7056e-02, -1.1477e-01,  1.9702e-01, -3.3937e-02,\n",
       "                       -1.1143e-01,  3.7323e-02,  1.3245e-01,  1.6045e-01, -2.5041e-02,\n",
       "                       -1.2900e-01,  1.4858e-01, -2.1000e-01,  2.3267e-02, -9.9841e-02,\n",
       "                        1.6638e-01,  2.1656e-01,  5.2234e-02,  2.0151e-01, -2.0293e-01,\n",
       "                       -4.7736e-02,  2.2696e-01, -1.7664e-01, -9.0424e-02, -2.1816e-01,\n",
       "                       -1.4048e-02, -1.6711e-01,  6.2148e-02,  1.2336e-01,  9.7766e-02,\n",
       "                        1.4298e-01,  1.3336e-01,  1.3573e-01,  1.4017e-01,  2.9598e-02,\n",
       "                       -1.4880e-01, -1.6005e-02, -2.0574e-01, -8.6224e-02, -1.2248e-01,\n",
       "                        9.7694e-02,  9.2574e-02, -9.3325e-02,  2.1160e-01,  2.1548e-01,\n",
       "                        2.0123e-01, -1.9403e-01,  1.4562e-01,  1.6690e-01, -1.1117e-01,\n",
       "                        5.4965e-02,  9.0740e-02, -9.1790e-02,  1.3278e-01, -1.1260e-01,\n",
       "                       -1.3194e-02,  1.1698e-01, -1.3113e-01, -1.5655e-01,  3.9935e-02,\n",
       "                       -9.7927e-02,  8.3749e-02, -3.9225e-02, -7.1345e-02, -5.5797e-02],\n",
       "                      [ 1.7686e-01,  2.1833e-01, -1.6771e-01,  3.7000e-03, -7.6594e-02,\n",
       "                        1.9154e-01,  2.1577e-01,  1.3218e-01,  1.0450e-01,  8.4788e-02,\n",
       "                       -4.0224e-03, -1.5071e-01,  1.9961e-01,  1.0967e-01,  1.5029e-01,\n",
       "                       -1.4807e-01, -1.5443e-01,  7.5261e-02,  9.8873e-02,  2.8783e-02,\n",
       "                        2.2454e-01,  1.3814e-01,  8.8316e-02,  2.8203e-03, -1.7190e-01,\n",
       "                        2.0125e-01,  7.1656e-02,  6.8366e-02,  7.3858e-02, -2.2607e-01,\n",
       "                        1.7921e-01,  9.0955e-02,  1.4544e-01, -1.8127e-01, -2.2687e-02,\n",
       "                       -2.0041e-01,  1.2295e-01,  1.0034e-02,  1.2087e-01, -2.2153e-01,\n",
       "                        1.1014e-01, -8.7195e-02,  7.0326e-02, -3.2119e-03,  2.1948e-01,\n",
       "                        1.9294e-01,  3.3004e-02,  1.5438e-01, -1.7679e-01, -1.3635e-02,\n",
       "                        1.3125e-01, -2.2801e-01,  1.6517e-01, -1.6846e-01,  1.5493e-01,\n",
       "                       -2.8883e-02,  9.8182e-03, -2.0158e-01, -7.8033e-03, -1.1260e-01,\n",
       "                        1.2613e-01, -6.4397e-02, -9.7285e-02,  1.9625e-01,  9.4960e-02,\n",
       "                        1.5268e-01, -1.5659e-02, -6.3148e-02,  6.4541e-03, -1.0012e-01,\n",
       "                        4.4375e-03,  5.4125e-02, -1.8905e-01, -4.1015e-03, -1.4156e-01,\n",
       "                       -1.5624e-01,  9.1533e-02,  6.7099e-02,  1.4163e-01,  9.7258e-02,\n",
       "                        6.4084e-02,  2.2190e-01,  1.9122e-01,  1.9768e-01, -4.8708e-02,\n",
       "                       -1.1301e-01,  3.0621e-02, -1.9774e-01,  8.2814e-02, -2.2105e-01,\n",
       "                       -3.1387e-02, -1.1479e-01,  1.4046e-02, -1.3273e-01, -1.4442e-01,\n",
       "                       -1.1510e-01,  1.5211e-01,  2.0116e-01,  3.1158e-02, -1.6147e-01],\n",
       "                      [ 1.7561e-01,  1.0597e-01,  2.1978e-01, -4.4892e-02, -1.1421e-01,\n",
       "                       -2.8792e-02,  1.0938e-01, -2.2872e-02,  2.1821e-01,  2.1807e-01,\n",
       "                        9.1236e-02, -2.0964e-01, -1.6857e-01, -1.1383e-01,  1.3374e-01,\n",
       "                       -2.1153e-01, -1.6848e-01, -1.7166e-01, -5.9587e-02, -2.8613e-02,\n",
       "                       -1.6824e-01, -1.5796e-01,  8.2721e-03,  3.1221e-02, -2.0763e-01,\n",
       "                        2.2850e-01,  1.1008e-01,  1.5154e-01, -1.6985e-01, -1.0395e-01,\n",
       "                        1.5373e-01,  1.2536e-01, -3.2959e-02, -8.5551e-02,  2.1336e-02,\n",
       "                        1.8207e-01,  1.0759e-01,  4.2154e-02,  1.4247e-01,  1.2644e-01,\n",
       "                       -1.8478e-01, -6.7939e-02,  1.1289e-01, -7.9225e-02,  1.8178e-01,\n",
       "                        5.2941e-02, -1.3721e-01,  1.4238e-01, -2.0958e-01, -1.6991e-01,\n",
       "                        3.3685e-02, -1.7237e-01, -1.8726e-01, -1.4867e-01, -6.6151e-02,\n",
       "                        2.2690e-01,  1.5055e-01,  2.8851e-02, -7.2252e-02, -1.3599e-01,\n",
       "                        1.8717e-01,  9.9707e-02,  1.9400e-01, -1.2418e-01, -1.6717e-01,\n",
       "                        1.1829e-01,  1.8692e-01,  1.8353e-01,  1.0280e-01,  1.6082e-01,\n",
       "                        9.7695e-02,  1.8616e-01, -1.1530e-01, -2.0202e-01,  1.6433e-01,\n",
       "                        6.3441e-02, -1.0714e-01,  2.1181e-01,  7.7508e-03, -1.3356e-01,\n",
       "                        2.0227e-01,  3.5432e-02, -7.0088e-03,  2.5635e-02, -1.6067e-01,\n",
       "                        9.4681e-02, -1.5117e-01, -2.2124e-01, -7.8775e-02,  2.3521e-02,\n",
       "                       -2.3002e-01, -1.7533e-01,  6.7817e-02, -1.2936e-04, -1.3991e-01,\n",
       "                        2.0700e-01, -2.3063e-01,  1.3703e-01, -2.1880e-01, -1.1147e-01],\n",
       "                      [-2.1985e-01,  7.5341e-02,  8.0804e-02, -8.5716e-02,  8.2630e-02,\n",
       "                       -1.6079e-02, -3.6307e-02,  2.2816e-01,  2.0237e-02, -1.4903e-01,\n",
       "                       -2.3240e-01,  8.7499e-02,  3.5262e-02,  8.0788e-02, -1.0113e-01,\n",
       "                       -2.2847e-01, -2.8766e-02, -6.3658e-02, -1.7997e-01,  2.0944e-02,\n",
       "                       -1.7142e-01,  7.9760e-02, -2.1618e-01,  3.4304e-02, -1.5873e-01,\n",
       "                        2.1040e-01,  1.3318e-01,  1.1419e-01,  1.0314e-01,  1.3001e-01,\n",
       "                        8.8382e-02,  1.1126e-01,  1.6710e-01, -1.8898e-01,  4.5594e-02,\n",
       "                        1.0890e-01,  2.0603e-01, -1.9153e-01,  3.8983e-02, -1.0955e-01,\n",
       "                       -1.1537e-03, -2.4473e-02, -1.2451e-01,  1.4237e-01,  7.4459e-02,\n",
       "                       -1.3649e-01, -2.0096e-01, -2.2171e-01,  1.3949e-01, -9.7564e-02,\n",
       "                        1.7156e-02, -1.6838e-01,  3.5189e-02, -9.9247e-02, -1.6306e-01,\n",
       "                       -2.1109e-01, -7.5804e-02,  1.5993e-01, -5.6152e-02,  1.9308e-01,\n",
       "                        1.7745e-02, -6.5028e-03, -2.0255e-01,  2.1671e-01, -2.9475e-02,\n",
       "                        1.8051e-01,  1.2321e-01, -1.1959e-01,  1.8724e-01, -2.1489e-01,\n",
       "                       -7.6431e-02,  9.2990e-03,  1.0978e-01,  1.0859e-01, -1.7582e-01,\n",
       "                        8.2467e-02, -9.2352e-02,  1.5706e-01,  1.1335e-01,  1.8228e-01,\n",
       "                        1.4532e-01, -1.1378e-03,  1.5601e-01,  2.0085e-01,  1.1547e-01,\n",
       "                       -2.0799e-01,  1.3895e-01,  2.6345e-02,  1.6149e-01,  2.1498e-01,\n",
       "                       -8.6075e-02, -1.6659e-01,  6.3533e-02, -1.6260e-01,  5.6839e-02,\n",
       "                       -5.2595e-02,  1.8788e-01,  6.5769e-02, -8.8045e-02, -2.2041e-01]])),\n",
       "             ('linear_relu_stack.Output Layer.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]))])"
      ]
     },
     "execution_count": 22,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model.state_dict()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:40.019027Z",
     "start_time": "2025-06-26T01:43:33.217395Z"
    }
   },
   "outputs": [],
   "source": [
    "from torch.utils.tensorboard import SummaryWriter\n",
    "class TensorboardLogger:\n",
    "    \"\"\"\n",
    "    Tensorboard日志记录类：记录训练过程中的损失和准确率\n",
    "    \n",
    "    参数:\n",
    "        log_dir: 日志保存目录,log_dir的父目录不要有中文\n",
    "    \"\"\"\n",
    "    def __init__(self, log_dir='tensorboard_logs'):\n",
    "\n",
    "        import os\n",
    "        \n",
    "        # 确保日志目录存在\n",
    "        if not os.path.exists(log_dir):\n",
    "            os.makedirs(log_dir)\n",
    "            \n",
    "        self.writer = SummaryWriter(log_dir) # 实例化SummaryWriter, log_dir是log存放路径，flush_secs是每隔多少秒写入磁盘\n",
    "        \n",
    "    def log_training(self, epoch, train_loss, train_acc):\n",
    "        \"\"\"\n",
    "        记录训练数据\n",
    "        \n",
    "        参数:\n",
    "            epoch: 当前训练轮数\n",
    "            train_loss: 训练损失\n",
    "            train_acc: 训练准确率\n",
    "        \"\"\"\n",
    "        self.writer.add_scalar('训练/损失', train_loss, epoch)\n",
    "        self.writer.add_scalar('训练/准确率', train_acc, epoch)\n",
    "        \n",
    "    def log_validation(self, epoch, val_loss, val_acc):\n",
    "        \"\"\"\n",
    "        记录验证数据\n",
    "        \n",
    "        参数:\n",
    "            epoch: 当前训练轮数\n",
    "            val_loss: 验证损失\n",
    "            val_acc: 验证准确率\n",
    "        \"\"\"\n",
    "        self.writer.add_scalar('验证/损失', val_loss, epoch)\n",
    "        self.writer.add_scalar('验证/准确率', val_acc, epoch)\n",
    "    \n",
    "    def log_lr(self, epoch, lr):\n",
    "        \"\"\"\n",
    "        记录学习率\n",
    "        \n",
    "        参数:\n",
    "            epoch: 当前训练轮数\n",
    "            lr: 学习率\n",
    "        \"\"\"\n",
    "        self.writer.add_scalar('学习率', lr, epoch)\n",
    "        \n",
    "    def log_model_graph(self, model, images):\n",
    "        \"\"\"\n",
    "        记录模型结构图\n",
    "        \n",
    "        参数:\n",
    "            model: 模型\n",
    "            images: 输入图像样本\n",
    "        \"\"\"\n",
    "        self.writer.add_graph(model, images)\n",
    "        \n",
    "    def close(self):\n",
    "        \"\"\"\n",
    "        关闭Tensorboard写入器\n",
    "        \"\"\"\n",
    "        self.writer.close()\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 设置交叉熵损失函数，SGD优化器"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:40.023837Z",
     "start_time": "2025-06-26T01:43:40.019952Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "损失函数: CrossEntropyLoss()\n",
      "优化器: SGD (\n",
      "Parameter Group 0\n",
      "    dampening: 0\n",
      "    differentiable: False\n",
      "    foreach: None\n",
      "    fused: None\n",
      "    lr: 0.01\n",
      "    maximize: False\n",
      "    momentum: 0.9\n",
      "    nesterov: False\n",
      "    weight_decay: 0\n",
      ")\n"
     ]
    }
   ],
   "source": [
    "model = NeuralNetwork()\n",
    "# 定义损失函数和优化器\n",
    "loss_fn = nn.CrossEntropyLoss()  # 交叉熵损失函数，适用于多分类问题，里边会做softmax，还有会把0-9标签转换成one-hot编码\n",
    "# 用少量样本就能更新权重，训练更快，且更容易跳出局部最优\n",
    "optimizer = torch.optim.SGD(model.parameters(), lr=0.01, momentum=0.9)  # SGD优化器，学习率为0.01，动量为0.9\n",
    "\n",
    "print(\"损失函数:\", loss_fn)\n",
    "print(\"优化器:\", optimizer)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:40.035848Z",
     "start_time": "2025-06-26T01:43:40.032419Z"
    }
   },
   "outputs": [],
   "source": [
    "model = NeuralNetwork(layers_num=19)\n",
    "\n",
    "optimizer = torch.optim.SGD(model.parameters(), lr=0.001, momentum=0.9)  # SGD优化器，学习率为0.01，动量为0.9"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.732814Z",
     "start_time": "2025-06-26T01:43:40.035848Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "使用设备: cpu\n"
     ]
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "12656d4e492641549f3560cb36f550c7",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "  0%|          | 0/43000 [00:00<?, ?it/s]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "早停触发! 最佳验证准确率: 87.9400\n",
      "早停: 已有5轮验证损失没有改善！\n"
     ]
    }
   ],
   "source": [
    "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n",
    "print(f\"使用设备: {device}\")\n",
    "model = model.to(device) #将模型移动到GPU\n",
    "early_stopping=EarlyStopping(patience=5, delta=0.001)\n",
    "model_saver=ModelSaver(save_dir='model_weights', save_best_only=True)\n",
    "tensorboard_logger=TensorboardLogger(log_dir='logs')\n",
    "\n",
    "model, history = train_classification_model(model, train_loader, val_loader, loss_fn, optimizer, device, num_epochs=50, early_stopping=early_stopping, model_saver=model_saver, tensorboard_logger=tensorboard_logger)\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.737721Z",
     "start_time": "2025-06-26T01:45:37.732814Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[{'loss': 0.1547924280166626, 'acc': 93.75, 'step': 9401},\n",
       " {'loss': 0.31046798825263977, 'acc': 89.0625, 'step': 9402},\n",
       " {'loss': 0.11046310514211655, 'acc': 96.875, 'step': 9403},\n",
       " {'loss': 0.24028357863426208, 'acc': 90.625, 'step': 9404},\n",
       " {'loss': 0.26721835136413574, 'acc': 89.0625, 'step': 9405},\n",
       " {'loss': 0.2150113880634308, 'acc': 89.0625, 'step': 9406},\n",
       " {'loss': 0.4928027093410492, 'acc': 84.375, 'step': 9407},\n",
       " {'loss': 0.20720763504505157, 'acc': 95.3125, 'step': 9408},\n",
       " {'loss': 0.38967669010162354, 'acc': 85.9375, 'step': 9409},\n",
       " {'loss': 0.44880470633506775, 'acc': 84.375, 'step': 9410},\n",
       " {'loss': 0.31742972135543823, 'acc': 89.0625, 'step': 9411},\n",
       " {'loss': 0.1524515450000763, 'acc': 95.3125, 'step': 9412},\n",
       " {'loss': 0.2586272954940796, 'acc': 90.625, 'step': 9413},\n",
       " {'loss': 0.3900238275527954, 'acc': 78.125, 'step': 9414},\n",
       " {'loss': 0.27417251467704773, 'acc': 87.5, 'step': 9415},\n",
       " {'loss': 0.19726693630218506, 'acc': 92.1875, 'step': 9416},\n",
       " {'loss': 0.3505842089653015, 'acc': 90.625, 'step': 9417},\n",
       " {'loss': 0.26967161893844604, 'acc': 89.0625, 'step': 9418},\n",
       " {'loss': 0.33068859577178955, 'acc': 82.8125, 'step': 9419},\n",
       " {'loss': 0.24933630228042603, 'acc': 89.0625, 'step': 9420},\n",
       " {'loss': 0.2578052580356598, 'acc': 89.0625, 'step': 9421},\n",
       " {'loss': 0.5356611013412476, 'acc': 79.6875, 'step': 9422},\n",
       " {'loss': 0.2810977101325989, 'acc': 92.1875, 'step': 9423},\n",
       " {'loss': 0.31292831897735596, 'acc': 89.0625, 'step': 9424},\n",
       " {'loss': 0.22595667839050293, 'acc': 89.0625, 'step': 9425},\n",
       " {'loss': 0.2361590713262558, 'acc': 95.3125, 'step': 9426},\n",
       " {'loss': 0.33246225118637085, 'acc': 87.5, 'step': 9427},\n",
       " {'loss': 0.2456618994474411, 'acc': 89.0625, 'step': 9428},\n",
       " {'loss': 0.28381145000457764, 'acc': 90.625, 'step': 9429},\n",
       " {'loss': 0.21530845761299133, 'acc': 93.75, 'step': 9430},\n",
       " {'loss': 0.19616569578647614, 'acc': 92.1875, 'step': 9431},\n",
       " {'loss': 0.2537690997123718, 'acc': 92.1875, 'step': 9432},\n",
       " {'loss': 0.23877759277820587, 'acc': 92.1875, 'step': 9433},\n",
       " {'loss': 0.309629887342453, 'acc': 89.0625, 'step': 9434},\n",
       " {'loss': 0.5600196719169617, 'acc': 79.6875, 'step': 9435},\n",
       " {'loss': 0.2708642780780792, 'acc': 85.9375, 'step': 9436},\n",
       " {'loss': 0.5061455368995667, 'acc': 81.25, 'step': 9437},\n",
       " {'loss': 0.20905479788780212, 'acc': 92.1875, 'step': 9438},\n",
       " {'loss': 0.2350572943687439, 'acc': 92.1875, 'step': 9439},\n",
       " {'loss': 0.2390000969171524, 'acc': 92.1875, 'step': 9440},\n",
       " {'loss': 0.18417389690876007, 'acc': 96.875, 'step': 9441},\n",
       " {'loss': 0.47598397731781006, 'acc': 81.25, 'step': 9442},\n",
       " {'loss': 0.1994292438030243, 'acc': 90.625, 'step': 9443},\n",
       " {'loss': 0.3940974473953247, 'acc': 84.375, 'step': 9444},\n",
       " {'loss': 0.35674136877059937, 'acc': 87.5, 'step': 9445},\n",
       " {'loss': 0.4041430652141571, 'acc': 82.8125, 'step': 9446},\n",
       " {'loss': 0.1812359243631363, 'acc': 93.75, 'step': 9447},\n",
       " {'loss': 0.2626109719276428, 'acc': 93.75, 'step': 9448},\n",
       " {'loss': 0.3338093161582947, 'acc': 90.625, 'step': 9449},\n",
       " {'loss': 0.30305927991867065, 'acc': 89.0625, 'step': 9450},\n",
       " {'loss': 0.29659053683280945, 'acc': 89.0625, 'step': 9451},\n",
       " {'loss': 0.2332083284854889, 'acc': 90.625, 'step': 9452},\n",
       " {'loss': 0.3019041121006012, 'acc': 87.5, 'step': 9453},\n",
       " {'loss': 0.17690005898475647, 'acc': 92.1875, 'step': 9454},\n",
       " {'loss': 0.2881486117839813, 'acc': 89.0625, 'step': 9455},\n",
       " {'loss': 0.20606306195259094, 'acc': 89.0625, 'step': 9456},\n",
       " {'loss': 0.2716468274593353, 'acc': 93.75, 'step': 9457},\n",
       " {'loss': 0.28289008140563965, 'acc': 90.625, 'step': 9458},\n",
       " {'loss': 0.18102611601352692, 'acc': 91.66666865348816, 'step': 9459},\n",
       " {'loss': 0.09133979678153992, 'acc': 98.4375, 'step': 9460},\n",
       " {'loss': 0.30004262924194336, 'acc': 87.5, 'step': 9461},\n",
       " {'loss': 0.1758502572774887, 'acc': 95.3125, 'step': 9462},\n",
       " {'loss': 0.18347439169883728, 'acc': 93.75, 'step': 9463},\n",
       " {'loss': 0.23629344999790192, 'acc': 92.1875, 'step': 9464},\n",
       " {'loss': 0.32900503277778625, 'acc': 89.0625, 'step': 9465},\n",
       " {'loss': 0.24245496094226837, 'acc': 90.625, 'step': 9466},\n",
       " {'loss': 0.22118225693702698, 'acc': 92.1875, 'step': 9467},\n",
       " {'loss': 0.22320395708084106, 'acc': 93.75, 'step': 9468},\n",
       " {'loss': 0.24877586960792542, 'acc': 90.625, 'step': 9469},\n",
       " {'loss': 0.2334049940109253, 'acc': 92.1875, 'step': 9470},\n",
       " {'loss': 0.17531217634677887, 'acc': 93.75, 'step': 9471},\n",
       " {'loss': 0.3911239206790924, 'acc': 90.625, 'step': 9472},\n",
       " {'loss': 0.270070344209671, 'acc': 89.0625, 'step': 9473},\n",
       " {'loss': 0.1437525600194931, 'acc': 93.75, 'step': 9474},\n",
       " {'loss': 0.2733078598976135, 'acc': 89.0625, 'step': 9475},\n",
       " {'loss': 0.2057996243238449, 'acc': 92.1875, 'step': 9476},\n",
       " {'loss': 0.22281043231487274, 'acc': 90.625, 'step': 9477},\n",
       " {'loss': 0.1983882188796997, 'acc': 95.3125, 'step': 9478},\n",
       " {'loss': 0.28579258918762207, 'acc': 87.5, 'step': 9479},\n",
       " {'loss': 0.18966691195964813, 'acc': 96.875, 'step': 9480},\n",
       " {'loss': 0.27394798398017883, 'acc': 89.0625, 'step': 9481},\n",
       " {'loss': 0.3847825527191162, 'acc': 89.0625, 'step': 9482},\n",
       " {'loss': 0.257118821144104, 'acc': 92.1875, 'step': 9483},\n",
       " {'loss': 0.1479347050189972, 'acc': 96.875, 'step': 9484},\n",
       " {'loss': 0.2144969254732132, 'acc': 90.625, 'step': 9485},\n",
       " {'loss': 0.14333879947662354, 'acc': 96.875, 'step': 9486},\n",
       " {'loss': 0.24070967733860016, 'acc': 92.1875, 'step': 9487},\n",
       " {'loss': 0.19414955377578735, 'acc': 93.75, 'step': 9488},\n",
       " {'loss': 0.2361435443162918, 'acc': 92.1875, 'step': 9489},\n",
       " {'loss': 0.2901403605937958, 'acc': 90.625, 'step': 9490},\n",
       " {'loss': 0.24944210052490234, 'acc': 90.625, 'step': 9491},\n",
       " {'loss': 0.2799053192138672, 'acc': 93.75, 'step': 9492},\n",
       " {'loss': 0.2366626113653183, 'acc': 93.75, 'step': 9493},\n",
       " {'loss': 0.2303202897310257, 'acc': 90.625, 'step': 9494},\n",
       " {'loss': 0.23005527257919312, 'acc': 92.1875, 'step': 9495},\n",
       " {'loss': 0.49472302198410034, 'acc': 85.9375, 'step': 9496},\n",
       " {'loss': 0.36328786611557007, 'acc': 89.0625, 'step': 9497},\n",
       " {'loss': 0.5259832143783569, 'acc': 90.625, 'step': 9498},\n",
       " {'loss': 0.14169174432754517, 'acc': 93.75, 'step': 9499}]"
      ]
     },
     "execution_count": 28,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "history['train'][-100:-1]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.741226Z",
     "start_time": "2025-06-26T01:45:37.737721Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[{'loss': 2.660419792175293, 'acc': 8.68, 'step': 0},\n",
       " {'loss': 0.5110557450771331, 'acc': 82.06, 'step': 500},\n",
       " {'loss': 0.45784917736053465, 'acc': 84.34, 'step': 1000},\n",
       " {'loss': 0.43342011790275575, 'acc': 84.74, 'step': 1500},\n",
       " {'loss': 0.410778385591507, 'acc': 85.36, 'step': 2000},\n",
       " {'loss': 0.40443380892276765, 'acc': 85.72, 'step': 2500},\n",
       " {'loss': 0.38391841192245485, 'acc': 86.4, 'step': 3000},\n",
       " {'loss': 0.3810647835969925, 'acc': 86.66, 'step': 3500},\n",
       " {'loss': 0.3711730754852295, 'acc': 86.78, 'step': 4000},\n",
       " {'loss': 0.3722163967847824, 'acc': 86.5, 'step': 4500},\n",
       " {'loss': 0.3689910971164703, 'acc': 86.62, 'step': 5000},\n",
       " {'loss': 0.35920627954006196, 'acc': 87.34, 'step': 5500},\n",
       " {'loss': 0.35336609568595884, 'acc': 87.2, 'step': 6000},\n",
       " {'loss': 0.35695234858989716, 'acc': 87.28, 'step': 6500},\n",
       " {'loss': 0.3472429292678833, 'acc': 87.94, 'step': 7000},\n",
       " {'loss': 0.3485064945936203, 'acc': 87.56, 'step': 7500},\n",
       " {'loss': 0.34187562642097474, 'acc': 87.54, 'step': 8000},\n",
       " {'loss': 0.348040713429451, 'acc': 87.28, 'step': 8500},\n",
       " {'loss': 0.34649943730831145, 'acc': 87.46, 'step': 9000}]"
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "history['val'][-1000:-1]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 绘制损失曲线和准确率曲线"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.744941Z",
     "start_time": "2025-06-26T01:45:37.741226Z"
    }
   },
   "outputs": [],
   "source": [
    "# 导入绘图库\n",
    "import matplotlib.pyplot as plt\n",
    "from matplotlib import font_manager\n",
    "def plot_learning_curves1(history):\n",
    "    # 设置中文字体支持\n",
    "    plt.rcParams['font.sans-serif'] = ['SimHei']  # 使用黑体\n",
    "    plt.rcParams['axes.unicode_minus'] = False    # 解决负号显示问题\n",
    "\n",
    "    # 创建一个图形，包含两个子图（损失和准确率）\n",
    "    fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5))\n",
    "\n",
    "    # 绘制损失曲线\n",
    "    epochs = range(1, len(history['train_loss']) + 1)\n",
    "    ax1.plot(epochs, history['train_loss'], 'b-', label='训练损失')\n",
    "    ax1.plot(epochs, history['val_loss'], 'r-', label='验证损失')\n",
    "    ax1.set_title('训练与验证损失')\n",
    "    ax1.set_xlabel('轮次')\n",
    "    ax1.set_ylabel('损失')\n",
    "    ax1.legend()\n",
    "    ax1.grid(True)\n",
    "\n",
    "    # 绘制准确率曲线\n",
    "    ax2.plot(epochs, history['train_acc'], 'b-', label='训练准确率')\n",
    "    ax2.plot(epochs, history['val_acc'], 'r-', label='验证准确率')\n",
    "    ax2.set_title('训练与验证准确率')\n",
    "    ax2.set_xlabel('轮次')\n",
    "    ax2.set_ylabel('准确率 (%)')\n",
    "    ax2.legend()\n",
    "    ax2.grid(True)\n",
    "\n",
    "    plt.tight_layout()\n",
    "    plt.show()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.816716Z",
     "start_time": "2025-06-26T01:45:37.744941Z"
    }
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAzoAAAHACAYAAABqJx3iAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAinVJREFUeJzt3Qd4U+X3B/Bv2qR70dJC2XvvIRtRliAI4hYUFdyoiOuPA8XFz70FJ4iKigNciCDK3nvvQhll0910JPk/571JaaErbdKs7+d5Lrm5zbhtQ+8995z3vDqLxWIBERERERGRF/Fz9Q4QERERERE5GgMdIiIiIiLyOgx0iIiIiIjI6zDQISIiIiIir8NAh4iIiIiIvA4DHSIiIiIi8joMdIiIiIiIyOsw0CEiIiIiIq+jhwcwm804fvw4wsPDodPpXL07REQ+Q+aUTktLQ40aNeDnx2tjNjwuERG5/7HJIwIdOZjUrl3b1btBROSzjhw5glq1arl6N9wGj0tERO5/bPKIQEeumNm+mYiICLufn5ubiwULFmDAgAEwGAxO2EOi8uPnk9xZamqqOqG3/R0mDY9L5O34GSVvODZ5RKBjKwuQg0l5DyghISHqufzPSu6Gn0/yBCzPKozHJfJ2/IySNxybWHBNREREREReh4EOERERERF5HQY6RERERETkdTxijA4RuW97x7y8PJhMJlfvCpWTv78/9Ho9x+AQEZHXYaBDROWSk5ODpKQkZGZmunpXqIJkwHF8fDwCAgJcvStEREQOw0CHiMo1WWJCQoLKBshkXXKCzIyAZ2bkJGA9ffq0+n02btyYk4ISEZHXYKBDRHaTk2MJdqSHvWQDyHMFBwer1rGHDx9Wv9egoCBX7xIREZFD8NIdEZUbr/57B/4eiYjIG/HoRkREREREXoeBDhEREREReR0GOkRE5VSvXj28++67DnmtxYsXq4YOycnJDnk9IiIiX8dmBETkU/r06YN27do5JEBZt24dQkNDHbJfRERE5FgMdIiILmq5LBOgyiSapYmNja2UfSIiIiL7eX3p2vJ9Z3DNR6vwzT6v/1aJXBocZObkuWSR9y6rO+64A0uWLMF7772nysRkmTFjhrr966+/0LFjRwQGBmL58uU4cOAAhg0bhmrVqiEsLAydO3fGP//8U2LpmrzO559/jmuvvVa13ZZ5aX777bdy/1x//vlntGzZUu2TvNdbb71V6Osff/yxeg9pCS37ef311+d/7aeffkLr1q1V++iYmBj069cPGRkZ5d4XIiKilMxcPPDtBny8eD88gddndLLzTNh1Ig11QjmZIZGzZOWa0GLS3y55750vDkRIQNn+lEmAs3fvXrRq1Qovvvii2rZjxw51+3//939488030aBBA1SpUgVHjhzB4MGD8corr6hAY+bMmRg6dCj27NmDOnXqFPsekydPxuuvv4433ngDH3zwAUaOHKnmqImOjrbr+9qwYQNuvPFGvPDCC7jpppuwcuVKPPDAAypokYBt/fr1ePjhh/H111+je/fuOHfuHJYtW6aem5SUhFtuuUXthwRdaWlp6mv2BIVEREQFGXNNuHvmeqw9dA7zt5/AkNY1UCfGvefS8/pAJzRQ+xaNJlfvCRG5WmRkJAICAlS2pXr16mrb7t271a0EPv37989/rAQmbdu2zb//0ksvYc6cOSpDM27cuGLfQ4IQCTLEq6++ivfffx9r167FVVddZde+vv322+jbty+ee+45db9JkybYuXOnCqDkPRITE9X4oCFDhiA8PBx169ZF+/bt8wOdvLw8jBgxQm0Xkt0hIiIqD5PZggmzN6sgR5gtwPSVCXh+aEu4M68PdMKsgU42Ax0ipwk2+KvMiqve2xE6depU6H56errKpvz555/5gUNWVpYKMErSpk2b/HUJRCIiInDq1Cm792fXrl2qdK6gHj16qFI5GUMkQZkEMZKBkiBKFlvJnARoEiRJcDNw4EAMGDBAlbVJpoqIiMgeUg3w0h87MW/bCQT4++Ge3g3w4X/7MXvdETzavwkiggxwV36+EugYza7eEyLvJWNTpHzMFYu8tyNc3D3t8ccfVxkcycpI2dfmzZtV4JCTk1Pi6xgMhf/gy/6ZzY7/AyRZnI0bN+K7775DfHw8Jk2apAIcaU/t7++PhQsXqnFHLVq0UCV0TZs2RUJCgsP3g4iIvNunSw9ixspDav3tm9risQFN0KRaGDJyTPhh7RG4M+8PdIJsGR052WB9OpGvk9I1yYiUZsWKFapETLIkEuBIqduhQ9of+srQvHlztQ8X75OUsEkgI6QznDQZkLE4W7duVfv377//5gdYkgGSMUObNm1S37cEbkRERGU1d9MxTPlLK/F+9urmGNKmhjq+jO3ZQG2bviIBeSb3zSb4TOmayMw1ITDQpbtDRC4m3cvWrFmjggLpplZctkW6mf3yyy+qAYH8UZexMs7IzBTnscceU53eZGyQNCNYtWoVPvzwQ9VpTfzxxx84ePAgevfurUrS5s2bp/ZPMjfy/S1atEiVrMXFxan7p0+fVsETERFRWTsXP/HTFrU+tmd9jO2lBTfimnY18Prfu3E8xYi/tp/A0LY14I68PqMTqPeDv59W2pKenefq3SEiF5OSNMmISEmXzINT3JgbaQYgAYR0NJNgR8a6dOjQodL2U95r9uzZ+P7771WXOClNk4YJkmUSUVFRKhC78sorVQAzbdo0VcYm7ahlXNDSpUtV1zjJAD377LOqNfWgQYMqbf+JiMhz7Tiegvu+2YBckwVD2sTj6cGFL5QFGfwxqqvW7ObzZQfdtqun12d05EpsWKA/UrLykMGOBEQ+T078JTtSkC14uDjzYysDs3nwwQcL3b+4lK2oP/QyZqYs+vTpc8nzr7vuOrUUpWfPnli8eHGRX5PAZ/78+WV6XyIiooKOns/EndPXqQRB1wbReOvGtvCzJg0KkkDn48UHsOVoCjYcPo9O9eybRqEyeH1GR4Ra59jIYEaHiIiIiKhIyZk5GP3lWpxKy0az6uH45LZOCNQX3d20alggRrSvqdY/X+aezW58ItCxjdNh6RoRucp9992nxgQVtcjXiIiIXD0h6Niv1uPA6QzUiAzCjDsvQ2Rwya2j7+pZX93+vfMEDp/NgLvx+tI1ERqoRaIsXSMiV5HxNTI+qCgypoaIiMiVE4I+8v0mrD98HhFBesy46zJUjwwq9XlNqoXj8iaxWLL3NKavOIQXrnGvCUR9JNCxlq7lMKNDRK4h3c9kISL3lZSShWrhQUWOR6gs0qr3bEYOqkWUfpJJ5AgWiwUv/LYDf+84qSYE/ez2TiqAKauxveqrQGf2em0C0dKyQJWJpWtERETk02QM76M/bEa3Kf/ipk9XqYDHFQ6cTseQD5ajy6uL1Ez0OXnuOz8JeY+pSw7g69WHIfNvv3NTO3RpEGPX83s2qoqm1cKRmWPC92uL7mTqKj4R6LB0jYiIiIqy+0Qqhn64HHM2HVP31x06j8HvLcN/e05V+sSMQz9Yjt0n0tT9L5Yn4IZPVuHIucxK3Q/yLT9vOIrX5+9R65OGtMDVbeLL1eF4TC9trM6MlYeQ60YTiPpEoMOMDhEREV1criNXn4d9uAIHT2egekQQ3r+lPVrVjMD5zFzVXve1+budPut7Vo4JT/20FeN/2KyuiHdrEIO3bmiryn+2HEnG1e8vw987Tjh1H8g3Ld17Gk/9vFWt39u7Ae7soQUr5TGsXQ3VhS0pxYh525LgLnwi0Alle2kiIiK6qFTt/37Zhuw8M/o0jcW8R3rhmrY18PP93TG6mzYR4tTFB3Dzp6udVsq2/1Q6hn+0Aj+sP6LKhsb3a4xvxnbBdR1rqf1pXycKqcY83Pv1Brz4O0vZyHG2H0vB/d9sQJ7ZooKUp65qVqHXkxbUt1v/30g20l0mEPWp0rX0HJauERER+bJdSVqp2tzNx+Hvp1MneF+O7ozo0ID8E7bJw1rh45EdEB6oV12onFHKNmfTUVzz4XLsOZmmroR/M6YLxvdrovZJ1IwKxux7u+Ge3g3U/S9XJOCGaStZykYVduRcJu6csQ4ZOSb0aBSDN64vekJQe43sUgeBej9sPZqiSkDdgW+VrhmZ0SGiiqlXrx7efffdMtctz5071+n7RESlkyvM361NVBkUKVWLjwzCD/d0xf19GhZ5kje4dTz+eLhnoVK2//21u8LjD2ylao/+sEWVqnVvGIN5j/REj0ZVL3mswd8PTw9uji9Gd0JUiEHNQD/4/WWYv52lbFQ+5zK0CUFPp2WjeXwEpo3qiAC9Y8KBGJlAtEMttf75soNwB2wvTURERF5Nxug+M2cbft18XN2/omks3rqxXX4Wpzh1Y0JVKduUebvVIOtpSw5g3aFz+OCW9qgRFWz3fuw/lYYHv92ksjiqVK1vE4y7slF+Fqc4fZtXw58P98JDszZiY2Iy7vtmA+7sUQ8TBzV32EmqO2Ydft54FH9tO4E0Y26FXkt+RqO61sWYnvXVBShflGrMxZ9bk/Dl8gQcPJOhMoYz7uyM8CDHtoIe07OeuqCwcNdJHDqTgXpVQ+FKPhHohAVYS9c4RoeIiMjnStUe/HajOrmTgOLJgU1xd68GZS7VkVI2mQSxS/1oPPnTVmyQUrb3l+HtG9viymbV7Opu9ezc7cjKNSE2PBDv3dwO3RtemsUpjpyY/nBvN7y5YA8+WXJQTc648fB5fHhrB9SODoG3jJ2Sgew/bTiKNQnnHPraL/+5C6sOnMWbN7RFlVICXG+aBHTlgTPq5ylZQBmPJiQ7+NVdnZ0yV1OjuHB1IeG/PafVxQFXTyDqG4FOkK0ZAcfoEDmFDDrMdVHduCFEasTK9NBPP/0UL7zwAo4ePQo/vwtXQYcNG4aYmBg888wzmDBhAlavXo2MjAw0b94cU6ZMQb9+/Ryyq9u2bcMjjzyCVatWISQkBNdddx3efvtthIWFqa8vXrwYTz75JHbs2AGDwYCWLVti1qxZqFu3LrZs2YLx48dj/fr16opk48aN8cknn6BTp04O2Tci7yxVO4LJv+9QJ3g1IoPwwa3t0bFudLleb1DreLSsEYkHZ23EtmMpuGvGetx7eQM8PqCpKjErqVTt+d+2Y/b6o+q+jIl496b2Ktixl7yPZHEk6Jowe0t+KZuMsbiqVXV4IrPZooIaORn/a3uSKucT8me9R8OquK5jTTSM1f5GlpcEhK/+tRuLdp9SXew+uLUDOtatAm8l8zFJYP3LxmM4kWrM3944LgzXd6ylysvK8/krq7G9GqhAR00g2q8JIkNcN4GoTwQ6oey6RuRcEuS8WsM17/30cSCgbKnxG264AQ899BD+++8/9O3bV207d+4c5s+fj3nz5iE9PR2DBw/GK6+8gsDAQMycORNDhw7Fnj17UKdOnQrtpgROAwcORLdu3bBu3TqcOnUKY8eOxbhx4zBjxgzk5eVh+PDhuPvuu/Hdd98hJycHa9euzS+zGDlyJNq3b4+pU6fC398fmzdvVsEQEV1KKjie/mUbftuilapd2SxOtWyu6JX8OjEh+On+bvmlbJJZWX/ofLGlbPtOpqnAaO/JdEgCSZoNPHhF6aVqpZFM0jwpZftuk8owSSnbHd3rYeLgZioD5QkSz2qlabIcPX+hq129mBB1Mn5th1oqi+UIbWpFoXP9aIybtQkJZzJw0yer8ORVTTG2Z9kze+4uJUsrTftpwxFV3mgTGWxQ3QTlZ9qmVmSllO7JuLNm1cPVnFDfrUvEfZc3hKv4RKATFmQrXWNGh8iXValSBYMGDVJZElug89NPP6Fq1aq44oorVJanbdu2+Y9/6aWXMGfOHPz2228qIKkIeU+j0aiCp9BQLTD78MMPVSD12muvqaAlJSUFQ4YMQcOG2kFBMko2iYmJeOKJJ9CsmdYCVDI6RHSpncdTVXAhJ7RaVzXHntDaStm6NojGEwVK2SSQkrE0jipVK40EVt/f0zW/lE0CL9mXj27toAIydw1AbaVpawuUpkl3uyFt49XJeIc6VZxyMi7ZuN/G9cDTc7bj9y3H8eq83Vhz8JxHl7JJadry/WfUZ03mWrKVpslHvU/TOPXz7Ns8rtKDX/n9SVbn8R+3YMaKQ2psVElZT2fyiUAn1JbRyclTqWxfHYhG5NTyMcmsuOq97SCZEcmafPzxxypr8+233+Lmm29WQY5kdKS07c8//0RSUpLKsmRlZakgo6J27dqlgihbkCN69OgBs9msMka9e/fGHXfcobI+/fv3V+VyN954I+LjtVmqpaROMkBff/21+ppkp2wBERFppWqz1iZisnW+mYqWqpXmqlbxaBEfiXHfbVTtdMd8tV5NuvjglY3UnDdyMi96NqqKd25q55RSIVspW9f6MZgwe7MqqZPSrNevb6NK7dylNG11wlmtNG3bCRX4CTkVk5+NnIwPaFEdwdbx1M4kA+/fv7mdmpT1hd93FChlc97nxFnzL0kmbM5FpWlNqmmlacPb1UScE8bf2GNo23jVpVD2T4LbYe1qwhX0vjSPjgwjkNpPWxc2InIQOWKVsXzM1SSDIidEEsx07twZy5YtwzvvvKO+9vjjj2PhwoV488030ahRIwQHB+P6669XZWSVYfr06Xj44YdVKd0PP/yAZ599Vu1P165dVQB26623qv3+66+/8Pzzz+P777/HtddeWyn7RuTOpOXzY7O35Jeq9W0WVylX6iVz8uN93dQJnTQH+GTpQXy16hCMuWZ1VV3GJzzggFK10lzRLE51ZXv4u01q3p/7v93o8lI2CTY/W3YQs9Yk4ljyhdK0BlVD1YSo17avWa7OdRUlF7tv7VIH7WpH5Wf+bvxktd1NKlxh2b7TeHvhXmwqUJomjQWGta2hfqata1ZOaVpZyOdOJt59a+Fe9TmQ8jlX7JtPnPEHG/yhgwUW6NQ4HQY6RL4rKCgII0aMUJmc/fv3o2nTpujQoYP62ooVK1RWxRY8SIbn0KFDDnlfKUOTsTgyVseW1ZH3k0yS7IONjMORZeLEiWo8j5S8SaAjmjRpopZHH30Ut9xyiwqMGOgQAb9sPKqCHL11AtCxvSqvjbCc0D0/VLqyxeCJn7YgzZinsjfv39we3RrGoLJI0PDdPV3VifDUxQdcWsomraHHfbcJW45oJ+ThQXoMaaONE+lQJ8otTsZb1IjA7w/1zB/LNeWv3Vh98GyZ2o67wqlUI+6euV4F0RI492kSq36eV7qgNK2sRnatiw//24/tx1JVqWKXBpX3/8HGO5uvX0T+Q1mH6bDFNBGp8jXJjHz55Zdq3UbGvfzyyy9qoL90OZMMipSWOeo9JcgaPXo0tm/frhoiSGOE2267DdWqVUNCQoIKbqQj2+HDh7FgwQLs27dPBUhSPidjhKQrm3xNAiRpaFBwDA+RL7N1NHtsQFPc3buBS06kpeuZNAiYNKQF/nqkV6UGOQVL2STQm35nZ1QJMeSXsknpUGWRsSLynhLkyED4N65vg3XP9MOUEa1VpzN3CHIKTigvY6dk32SuHekUJvu+/pBjW1s7wnuL9qkgp23tKKyaeCW+uKOzKk901yBHSMAomSbx+fIEuIJdgY50+2nTpg0iIiLUIlcbpYSiJD/++KMaPCsH+NatW6vORq5grV5joENEuPLKKxEdHa3GxkgwYyOtnqVhQffu3VWJm4yXsWV7KkraSf/999+qy5uUzElJnDREkIYEtq/v3r1btZyWrM0999yDBx98EPfee6/qsnb27Fncfvvt6msydkeaKkyePNkh+0bkyWS8gmQu5Cq3tCJ2JZnP5q6e9VE1zHmte8viiqZxmPdIL3SqWwVp2Xl44NuNeP7X7cjOMzm1VE1aed/79QakGvNUadifD/fEDZ1qI8jgvifjEnjdclkdzH2ghyqrS0ox4qZPV6vJYWV8kTuQ8rrv1x1R608Paoa4cNeOv7HHXT3qq9t/dp1U30dls6uGq1atWvjf//6nrnpKjftXX32l5p/YtGmTmu/hYitXrlTlFTIPhXQSkhIMaZ+6ceNGtGrVCpWJGR0ispFysePHL22eUK9ePfz777+FtkmwUZA9pWzyd7Igudhz8evbSFZHOrwVJSAgQLWcJqJL/bhBOwGUSQo96QTQ2eIjta5sUsr28eID+GrVYWxI1ErZ6saEOr5UbdZGNa+PuLtXfTwxsJnKkngKKWX77aGeeGbONvy6+bgadyWlbG/bStlMeYC/a4Y+vLVgj+qwJp9xV5R/VUSjuDDV3v3f3acwfUUCXhxWuef/dv3G5ApnQTLXhGR5ZHK9ogKd9957D1dddZVqiWpr1SoDa+UK5rRp0+CKjA4nDSUiIvIOeSazmhRRSOaACtP7++HJq5rhsvrRePSHzWqsxJD3l+N/17XB1W0c05Vt/vYT+WOTpFRN2mz3a3GhzbYnCQvwx7uDYjE8fA+Wr16FegeO4uCbJxAWdBIBWaeBgHAgLBYIqwaEym0cEBqn3V68bnBMo4VtR1Pwx9Yk1fNHgkdPNLZnfRXo/Lj+KCb0b4KokMobA1Xu0NRkMqmyNBlYKyVsRZFac2mJWpCUgsydO7fE187OzlaLTWpqqrrNzc1Vi73kOUH+cmVVh5QMY7leg8hZbJ9HT/pcyr5KtkLGrzhqDIunkWYG999/f5Ffq1u3LrZt2wZPIb9D+X3K71XK5ArypM8l+Z4le0/jdFo2YkID1FVjKlofaymbdGVbd+i86ja2JqEunh7cvNxlZVKqNuWvXarbnGhfJwof3trBYZN8OlVeDnDuIHBmL3BmD3Bmn3V9H3Q56bhCMoS2H4ucPtqaxuWkAedkOVj6e6igyBYAWYOj4CqAn7ywTutWarsttC5ZsAvbtqw9gjv9M9C6ZhRaHDkKHA8AgiKtS1SB9QhA7+SSSYsFyM3SJgmXQK6M3VZlvFrz+AjsSkpVLeAf6NMIbhvoyMFbAhuZ+C4sLEyVWrRo0aLIx544cUKVYxQk92V7SaTUrajacxmcK3Xs5RFonahozcYtMBzfXK7XIHImyXZ6Cr1ej+rVq6uuZJXVetnd9OnTB0uXLi3252O7QOMJ5HcoDQ/k+5G5gwrKzMx02X4RlUauEAtpVeyqCQk9qZTtu7svlLLNlFI2a1e2elXtK2VLPJuZP3+QkPmDHh/YVPsdZKcBp3YDp3YCp3drJ8ZyUqwP0hZDUIF12R4I6IMLb89/nPXrFjNgzgNMuYA513pru59Xwtfkfh6QlwWcPWANaPYA5xIASzEVPjp/ILoBENsUOVUa4sdDIZh9KBjHLLF4qHssRrcJAdJPAhmngfRTQMYp7Vaty7aTgCmnQFB0oEK/t1Hyj0HargEoeVi89vMqGPgERRa9yPcowUpOOpCTaV3PsN7K/YxitsvxwFJ4HrvQqlogp5aC67b7cdCFxuLu7rUx4ecd+GrlITWBb2WVNdod6EgbVOlIJDN4y4zi0kFoyZIlxQY75SGdhwpmguSEoXbt2hgwYIBqgmAvuSL57f5Far1uo2YY3FsbGEXkDuTzKUGOTBJpMMhfM/cnFzqOHDmiLnZIoxFfJH+LatZ07cBnR/4+Zc4gmbT04t+nJwVs5FvOpmerAc6CZWv2l7JNmL0FO46nYsgHUsrWWrV/Lov525PwxE9bkW3MwmXBp/B8Fx1a6tcBP+wCTu4EUio+wXKlCAgDqjYGqjbVbmPltglQpT6g10qr5N9bLRbkrDykJqJ9aXUuOnVqhZatepSc9TCmXAh6CgZAWdJu26IFbmoMp6XwbYF1C8xYsucUUjJz0KBqCFrXkPNfC5CXDRhTtfewLdlawKkCunRZSk4oOIwEPsmJ2lKKEdIIKCgcp40RSJlaA7HVagLVWwO9H4dbBToyKFYm0hMdO3ZULU5lLM4nn3xyyWPliu/Jk9ofIRu5L9tLIrOVy3IxOQks74mgrRmBMc/iMSeT5Fsq8vmubFK6Kp1qZJGB/eTZbL/Loj6DnvKZJN8zZ9Mx5JktaFsrEk2rh8OtmU1AnlE7SZWr/Wo9BzBla9vU9gLrckIbGKGVOgVHXShRUmVPDipls04wuvbQOYybtUkNvH/26haXlrLJvp85jNwT27F8xTLkHd2GObojqB90Av5ywr66iDcIjwfimgOxzbXMgny/ufI9WxfJ8qjv1Xpb8H7Bx8lSkGQi/A2AnywF1qVJgLot6r4e8A8AqtTVAhnbElHDWh5WMvnbKJOvrjl4DvN3nMDjP27Frw/2KD4jIa8pvzNZJIAqp/nbknD/2o0ICfDHkruuAMJLKEuT35Fk0goGPyUtFjMQEKJlZKT8TN3K/dCLbov7eoj2O5MALuOM9VaWUxfdt65nnlXvGYU0RPmlAWePAWfXadvdLdApqra74HiagqTEbdGiRRg/fnz+NrlyXdyYHmdie2kix7Gd/EpZk2QCyLPZytMY1JCnkDFlP2046txsjow/zLHj5FEtydrV9osDl+LKpOwVGAkEW8dmFAyCirqVMia1D0UHVNXzsvFd02ys8z+OrYdPI2B9LhbvBHrWj0CY3gT/nCxcfnQX9NvuUQGH/HWQsSsoGAdJ8BXXUgtq1NJCuw2Jdsz3KxkO2WcZtyIBi4surEmw89LwVliTcFaNM/nov/14tH8TpzbZeGPBHrU+tlcDNQFtiSToswVXlSUwTFuiy1AlJYFY1nmknDmOhz9fgHBTMh7rEY36des6fTf19paUydwNderUQVpammoXLRPYydwQQuZ4kFIOGWMjHnnkEVx++eV46623cPXVV+P777/H+vXr8emnn6Kyac0IGOgQOYIMWI+KisKpU6fy54Bxp0ngqOwnixLkyO9Rfp8XNyIgclcyEebuE2kI1PthaNuylVwVOyBdxpHIQHS5PbtfnZBpQYuUbTphHhU5afeXcSkB2liUQutyKye1uguBk5Q7yZgJkW0rU3JMeZj8j+8qi+1sUIZc7rkw0aLttDnLEoC9llo45FcHLdp1QeNWl2lBjWRunPm3X15bxuq4AQk2pDXyQ99tUoHOgJbV0LJGpFPe68cNR3HwdIZqay2tuj2en78arxMZWhW1O+rwzepEGM9Uw+dXd3KvQEcOhhLMJCUlITIyUk0eKkGOjC0QiYmJhcpYZNI9CYaeffZZPP3002r+Hem4Vtlz6BRuL81Ah8gRbCWotmCHPJcEOaWVFBO5k9nrtblzrmpVXbU0LpGU2EgAc3qPFsyoZa82SFwGrZdGApDiBnUXtUjJmRpQH2gNXAquB2qZCXuDAwnK8gOf81rwYwuCCt0W+Jp83+q9LwqiCq0H5gda6SY9/thxFgfP5yIbAWhbLxaJ5/Mw53wdHLHEoX3dGHxwS3vU8ISuak4ypE085m1Lwl/by1DCVk7GXBPe/WevWn/wikYID/KuTPtdPeqrQGfR7pM4eDodDWLD3CfQ+eKLL0r8umR3LnbDDTeoxdU4YSiRY0kGJz4+HnFxcWxB7MGkXI2ZHN+TaszFvK1J+HnjUTW/3Ce3dUTt6PJ1Na1sciL422Ztwt8bbWVrUuKUnVogoLEtu4Hzh4rPzKgB6U2A2GYXBqNLO+CCQYs7ZBQkWFHzt8Q67S3kdPP6AWa8t2gfPvtvPywFmoXdd3lDPDagic93tpPjnmR1ZDyTs0rYZqw8hJOp2apN96iudeBtGsSGoV/zOPyzSyYQPaRKAp3JNVO8ugADHSLnkJNknigTuT+ZWX3lgTNqbMvfO07AmHthDqzRX67FT/d312aAdxfSHviSgc2ncCQhAc/l7UPNkAx0+9cM/G59zMUD1wuScSu2YEbdWoObiJrOLb3ywK5sjw1ois71tAlGjdnZePeWDujfspzlgV7ImSVsKZm5+Pi//WpdJtYM1HvnsXVMzwYq0JGAUf4u+fs57/+gzwQ6LF0jIiJfJOUhkrn5ZeMxJKVcCAYaxYWp+WdmrUnEwTMZGPPVOswa2xXBAZVwciUD/VOPahmYM/u127SkwoGNlF8VQfpYNZazF4nTtMTOBTIzvQpmbAGN9Vbm9GBAU2a9m8RiyeO9MX/+fPRp4rwskjeUsD02ewt+G9fTISVsU5ccQKoxD02rhWN4e++YvqAoXRtEY/qdndGrUVWnBjk+FejkNyMwMtAhIiLvL037c2uSyt7IpJA2EUF6DGtXE9d3rIU2tSJVKc7AltVw3dRV2JSYrK5STxvVQV3ZrzApJ5P2sRLEFFoOaIt0/yqNtBPOn4SwKjINVfDdDiPOWiIw9qrLEB1Xs/BEhWWcqZ1KJ40eDL5dqVaGLmznVFOMD//brzIwFXEixYjpKxLU+hMDmzo9AHD1z++KpnGV8l4+E+iwvTQREXkzKQFZsf9CaVp2nlaaJudLlzeJxfUda6Nv87hL5klpFBeOz0d3wsjP16gJOCf9tgOvDG9Vtk6KtskRZcLASwKa/drXiiPznMgM9DGNgJiGQGQtLViRMTK2mdWl5KxAk6PP/tmHd/L2okejGET3kn5hRK5RNUxK2FqqOYik3GxAi2poVbP8JWwyNkr+z3aqW0X9PyXH8MkxOtJSla1wiYjIGxyQ0rQNWmnaidQLpWmN48JwQ6daGN6uJuIiSh5QL2My3r+5He7/dqMqZasZoceDnSO12dzTZHb3AkvaCW2293TrbUljY6RNcmRtLZBRAY1taQhE1bFrAkyz2YIfN2jd1m7o6KS5c4jsMKRNDVXCNm+bdGErfwmb/B+2dRJ8alAznqM6kM8FOma5+JRrrpwaZCIiIieRkrRX/tyJjYkXxrJIq+Vh7Wqo0rTWNbXStBIdXAIc/E8FM1eln8SGmCMwpZ1AzNI0YJkdc8iExAAxjS8EMbaARiYTNDimHfHqhLM4ej4L4UF61VaayB1oXdisJWz/7sOEAU3tfo23F+xVGdm+zeLURQdyHJ8JdAL8tHGIkmWXrA4DHSIi8mRS6iJBjtTya6VptVTJS5k6NSVtBRZO0oKcAtQpljU2yrP4wRQSi8CoeCC8ulZSFma9VferWZc4hwUzJflx/VF1KxOEXlx+R+QOJWwfLT6AAS2r21XCtvVoMv7clqTOUZ+4yv4giUrmM4GOfIBCA/QqyJFF2gMSERF5qnMZ2mD+j25tj6taxZftSTKW5t9XgK0/aHPLyDiZNjdpWRhrMGMJq4bnF53B19vSEWwx4PuRXdGmVhRcPu/PtqTCc+cQeUEJ22vzd6tb6YDYrHqEk/fU9/hMoCNCA/1VkMMW00RE5OlSsrSJemPDyzChZdZ5YNnbwJpPLnQ7a3U90Pc5oEq9Qg+VhM6zN5mRYFyHZfvO4K4Z6/DL/T1QJ8Z1E4r+sSVJDdRuUi0MbWs5Zs4SIleXsC3fdwYr9p9FgL8fHu3n2IlHSeNTjQNDA7S4Lo0tpomIyMOlZmnHssjgEq5Z5hqBlR8A77UDVr6vBTn1egF3/wdc/8UlQY6NXI2eOqojWtaIwJn0HIyevhZn08vQDtpJbAO1pQkBB2qTu5awvTSslVqXErbtx1JKba5hy+aM7FoHtaNddyHBm/lUoBNm7UjAjA4REXkyOUmSci4REWwo6gHA1tnAh52BBc9qk2/GNgdu/REY/TtQs0Op7xEWqFeT+tWqEoyEMxm466v1yMyp/OPnvpNp2HwkGXo/nVdPokie7+o28bi6dbxqLCAlbDnWFu9Fmbc9CduOpaj/Z+OuaFSp++lLfCvQsWZ0Mlzwh5qIiMhR0tRUCdp6RNBFgc7BxcCnlwO/3A2kJALh8cCwj4D7VwBNBmiDVssoLjwIX911GaJCDNhyJBkPzdqEPFPxJ2/O8OMGrQnBlc3iOL6W3J40JogJDVAlbB/8u6/Ix+SazHhrwV61fnevBogJ4+faWXwq0AkNZOkaERF5vlTr+ByZvT6/A9mJ7cA31wEzhwEntgIB4UDfScBDG4H2o+yas6aghrFh+GJ0Z/Vei3afwnO/blfz0VUGOSH8ZaMW6NzAJgTkASRoeWm4VsL28eID2HY0pchSTMmSSkA0pld9F+yl7/CpQCcskKVrRETkPY0IZN4cpBwF5j4ATOsJ7P8H8NMDXe4DHtkM9HoMCKh47X/HulXw/i3t4acDvlt7BO8v2o/KsHjPaTVGSMY/9GkaWynvSVRRg1vHqzI2Wwlbdp4p/2tZOSa894+W6XnoykaqdI2cxyczOgx0iIjI0zM6ocjCo7pZwAcdgc3fau2iW14LPLgWGPQaEFrVoe85sGV11VlKvPPPXvywLhGV1YTgug41YfD3qVMW8nAvXqOVsO05mYYPClwY+HJFAk6lZaN2dDBu7VLXpfvoC3zqr4YtapbaZiIiIk8ljQheNEzHLTk/A3lGoG4PYOwi4IYZ2pw4TjKqa938gdNPz9mO/3afctp7nU7Lxr/W17+hUy2nvQ+Rs0vYpi45oCYGTc7MwbQlB9S2x/o3LfNcO1R+PvUTDg1g6RoREXlH6VpdnTXI6DcZuONPoFanSnnvxwY0wXUdaqmynAe+3aiaFDjD3E3H1Hu0rxOFRnHhTnkPosoqYXvix614b9E+NU68WfVwXNO2hqt3zyf4VqBjzejIpKFERESeHOiEI1O7U6OdXZ3UKkrmsfnfda3Ru0kssnJNakLRQ2cyHPoe0uyg4Nw5RN5QwjZ9xSG17amrmsFPBryR0/lk6Vp69oVBYURERJ44WWiYLku7ExhR6e8v42WmjuyA1jUjcTYjByM/X1PqBIn2kHlz9p1KR5DBD0PaxjvsdYlcUcL2srWETVxWP5qNNSqRTwU6oey6RkRE3pbRcUGgY6uS+PKOzqgXE4JjyVkY8fFKfL3qkENaT9vmzhncKv7SeYKIPMyg1vG4uXNtBBv88ezVzVVWlCqHb2Z0OI8OEZHXMplMeO6551C/fn0EBwejYcOGeOmllwqdgMv6pEmTEB8frx7Tr18/7NtX9OR+7ig1MxthMGp3glwT6AiZwHPugz3Qr3k15JjMeO7XHRg3a5NqllBe0n73983H1fr1bEJAXmLKiNbYPnkg2tSKcvWu+BSfCnQ4RoeIyPu99tprmDp1Kj788EPs2rVL3X/99dfxwQcf5D9G7r///vuYNm0a1qxZg9DQUAwcOBBGozV4cHPGzFT46ayBW6BrB+pHhQTgs9s7qivVej8d/tyWhKEfLC93Kdv8HUmqO6q03+1aP8bh+0vkCpLF8ee4nErnk13XGOgQEXmvlStXYtiwYbj66qtRr149XH/99RgwYADWrl2bn81599138eyzz6rHtWnTBjNnzsTx48cxd+5ceAJTlhZEmHUGQB/kFidxY3s1wI/3dUPNqGAcPpupStlmlqOU7cf1R/ObEHDANhFVhE9NxxoWdGHCUPnDyxpJIiLv0717d3z66afYu3cvmjRpgi1btmD58uV4++231dcTEhJw4sQJVa5mExkZiS5dumDVqlW4+eabL3nN7Oxstdikpqaq29zcXLXYy/ac8jxXmDO1QMcUEAZTnvtcvGsVH4ZfH+iK//tlO/7ZfRqTft2BlfvP4NXhLRBehrE2R85nYuWBs6qJ3LA21cr986GKq+hnlMiZyvq59KlAJzRA+3bzzBZk55kRZNAyPERE5D3+7//+TwUizZo1g7+/vxqz88orr2DkyJHq6xLkiGrVqhV6nty3fe1iU6ZMweTJky/ZvmDBAoSEhJR7XxcuXFiu5+WknwX8gUyzAYvnzYO7GRIFhNfT4dfDfpi/4yTW7T+BO5uYUDus5OfNOyKFJn5oEmHG5pX/YXNl7TA5/DNK5EyZmdZmLKXwsUDnQmAj5WsMdIiIvM/s2bPx7bffYtasWWjZsiU2b96M8ePHo0aNGhg9enS5XnPixImYMGFC/n0JpGrXrq1K4iIiIsp1NVJOIPv37w+Dwb6uYlKR8Ou6N9V6UGQcBg8eDHd0NYBbjyRj/OytOJZsxHs7DZh4VVOM6lK7yIoKs9mC195eJiOQcN+Athjchm2lXakin1EiZ7Nl1UvjU4GO1PpKsJORY1Lla1XDAl29S0RE5GBPPPGEyurYStBat26Nw4cPq6yMBDrVq1dX20+ePKm6rtnI/Xbt2hX5moGBgWq5mJwAVuQksDzPl65kIWbtaqY+JBL+bnwS2rlBLOY93BuP/7QFC3eexIt/7sa6w8l47fo2l7SNXr7vDI6nGBERpMegNjVh4MVIt1DRzziRM5T1M+lTzQgKdl5LY4tpIiKvLWnw8yt8eJMSNrPZrNal7bQEO4sWLSp0dVC6r3Xr1g2eMIeObbJQv2DXtZYuq8gQAz69rSMmDWkBg78Of20/gSHvL8fWo8mFHjd7/RF1O6xdTVZcEJFD+FygU7AhAREReZ+hQ4eqMTl//vknDh06hDlz5qhGBNdee636upRNSSnbyy+/jN9++w3btm3D7bffrkrbhg8fDncnc9TYJgvVuWiyUHvJz/yunvXx033dUatKMBLPZeK6qSsxY0WCKsVLyczF/B3a+KgbO9V29e4SkZfwqdK1gpOGZuQw0CEi8kYyX45MGPrAAw/g1KlTKoC599571QShNk8++SQyMjJwzz33IDk5GT179sT8+fMRFOT6Vs1lyeiE66wDcT0k0LFpWzsKfz7cC0/+tAV/7ziJF37fidUHz6F1rUjk5JnRrHo4WtX0rO+JiNyX9wc6h1fBf+mbaJUqyavB+Z3XWLpGROSdwsPD1Tw5spSUYXjxxRfV4mkk+xGOLLeYLLQ8IoMNmDaqI2asPIRX5+1SmRxbNueGTkU3KiAiKg/vL10zpsDvwD+Izth3UemaycU7RkREVM7SNesYHQR5ZvZDgpk7e1woZRN6Px2Gt6vh6l0jIi/i/Rmd4Ch1Y8jLKFy6xjE6RETkgaR0LT4/o+OZgc7FpWyfLDmAptXDEcNuqETkQD4Q6FRRNwGmwoFOGgMdIiLy0ECnibUZgacHOrZStievaubq3SAiL+T9pWvWQMdgygQs5vz20szoEBGRJ0rNystvL+2ppWtERJXB+wOdIK10TQcLYExFWKDWmz+dzQiIiMgDqa5r+Rkdz2tGQERUWbw/0NEHwGII1daN5/NL19LZXpqIiDyQ1l7aO8boEBE5k/cHOgUaEuiyzrN0jYiIPFrBCUOZ0SEi8vVAJ6hKfqvpcGt7aZauERGRJ8rMzESQLle7wzE6RES+HehYgiO1lQIZnXRmdIiIyAPlZaVeuBPAjA4RkU8HOraMji4rmYEOERF5NmOKujEbQgB/758lgoiovHxqjA6MyQjnGB0iIvJQuSYz/HO1eeHYiICIqGQ+EehYrC2mpevahWYEJtfuFBERkZ1SVcc1rRGBjoEOEVGJfCLQsU0aKqVrYdZmBDkmM7LzGOwQEZHnSDXmIQxaa2kdGxEQEZXItzI60owg4EI9M7M6RETkSThZKBGRkwKdKVOmoHPnzggPD0dcXByGDx+OPXv2lPicGTNmQKfTFVqCgoLgqjE6/n46BBv81V22mCYiIk8LdMJsk4Uyo0NE5LhAZ8mSJXjwwQexevVqLFy4ELm5uRgwYAAyMqwDI4sRERGBpKSk/OXw4cNwSdc1Y7K6tZWvsfMaERF53BgdZnSIiMrErr6U8+fPvyRbI5mdDRs2oHfv3sU+T7I41atXh6tYbBmdrPPqJixQj9Np2cjIYaBDRESeldGJsGV0Aq1zxBERkePH6KSkaL38o6OjS3xceno66tati9q1a2PYsGHYsWMHKlV+1zVtf0MDWbpGREQeWrrGjA4RUZmUe6Yxs9mM8ePHo0ePHmjVqlWxj2vatCm+/PJLtGnTRgVGb775Jrp3766CnVq1ahX5nOzsbLXYpKZqs0BLqZws9srVh8EgmaU8I3IzUxEaoAU6KZnZ5Xo9IkeyfQb5WSR3xM+le0k15qIWx+gQETk30JGxOtu3b8fy5ctLfFy3bt3UYiNBTvPmzfHJJ5/gpZdeKrbpweTJky/ZvmDBAoSEhNi/sxYLhsIPfjDj3z9/RkZyVZXMWrV+E3RHLPa/HpETyLg3IneTmWnNHpDbjNGxtZfmhKFERE4IdMaNG4c//vgDS5cuLTYrUxyDwYD27dtj//79xT5m4sSJmDBhQqGMjpS9SeMDaWxgL5UJ2h6KwLw0XNm9I/7MzcP280lo0KQ5BveoZ/frETmSfD4lyOnfv7/6/0HkTmwZdXKj9tLWCUNZukZE5MBAx2Kx4KGHHsKcOXOwePFi1K9fH/YymUzYtm0bBg8eXOxjAgMD1XIxOQks74lgtr8W6Bhy0xAerAVLWXkWnliS26jI55vIWfiZdC+pWXkIt2V0WLpGROS4QEfK1WbNmoVff/1VzaVz4sQJtT0yMhLBwcFq/fbbb0fNmjVV+Zl48cUX0bVrVzRq1AjJycl44403VHvpsWPHojLl+IdqK1nnERaoNU9gMwIiIvLcjA4DHSIihwU6U6dOVbd9+vQptH369Om444471HpiYiL8/C40czt//jzuvvtuFRRVqVIFHTt2xMqVK9GiRQtUphx9WIFAR/u22V6aiIg8r+sax+gQETmldK00UtJW0DvvvKMWV8u1ZXSMyQi1Bjrp2SbX7hQREZEdUrNyCgQ6HKNDROS0eXQ8Sa6+QOlakDXQMbJtKhEReQaz2YI8Yzr0OrO2gWN0iIhK5DOBTuExOtbSNWZ0iIjIQ6Tn5CHEomVzLDp/wFCO6RaIiHyIzwQ6+aVrWRdK19KyOUaHiIg8Zw4dWyMCnZSt6XSu3iUiIrfmM4FOkc0IGOgQEZEndVxja2kiojLzwYwOAx0iIvI8bC1NRGQfnwl0cmzNCIzJ+c0IWLpGRESeNFkoW0sTEZWdb2Z0ArRAJyfPrBYiIiJPGqPD1tJERKXzmUAnx986RseYglDDhe0sXyMiIk/AMTpERPbxmUAnV3+hDac+Nw1BBu1bT2egQ0REHiDVyIwOEZE9fCbQsej0sAQU0ZAgh4EOERF5WEaHY3SIiErlM4GOElTlkrl00o0MdIiIyDMCnfxmBCxdIyIqlW8FOsFVLsnosHSNiIg8rxkBAx0iotL4VKBjCYrUVowFMjoMdIiIyNMyOgx0iIhK5bMZnXBOGkpERB4W6ESwGQERUZn5VqATFKXdZp0vkNExuXafiIiIyiDVWGDCUI7RISIqlU8FOpZgW6DDZgREROSBXdeY0SEiKjMf7bp2HuFBbC9NRESewZhrQk6emWN0iIjs4KMZnfMIDWAzAiIi8pyOa/4wIVSXrW1goENEVCrfbEZgTEaYNaPD0jUiIvKojmuCY3SIiErlW4GOrb20mkfHX62y6xoREXnG+BxroKMPBvwNrt4lIiK351OBjqXAGB1bM4I0BjpEROTmUo25CAcbERAR2cNH59FJRlgAMzpEROSBpWssWyMiKhPfLF0zZSNCrwU4DHSIiMjdpWSytTQRkb18K9AJCAP8tJK1CKSrW3ZdIyIij5oslB3XiIjKxLcCHZ0uv3wtzJymbhnoEBGRJ5SuRTCjQ0RkF98KdESQNpdOqDXQMeaakWcyu3iniIiIyjpGx1qGTUREJfK9QMea0QnK0wIdkZFtcuEOERERlT5hKMfoEBHZx2cDHUNOMgL02refnsPyNSIicvN5dDhGh4jILj4Y6ERdaDFtnUsn3chAh4iI3Lx0zTZhKNtLExGViQ8GOhcmDc0PdNiQgIiI3FiaMY8ThhIR2cmnA51QBjpEROQppWu2jA5L14iIysR3Ax1jMsKtgQ4nDSUiInclnUHlgtyFjA4DHSKisvDZ9tJaRsdfrTKjQ0RE7ly2Ji60l2agQ0RUFr4X6BRVusZmBERE5MZlayIiv3SNY3SIiMrCpwOd8CCWrhERkScEOpYLXddYukZEVCY+GOjYStdSEBrAZgREROTeUo25CEQuDLAeq5jRISIqE9/N6GSnIDxQW2WgQ0RE7pzRibA1IoAOCAhz8R4REXkG321GACDaTysDYOkaERG5q0KThUrZmp/vHbqJiMrD9/5a+uuBAC3tH+WnXSFjRoeIiNxVahYnCyUiKg/fC3QKlK9FIV3dMtAhIiKPyOiwtTQRUZn5aKCjla9FWLRAJyPb5OIdIiIiKj7QYUaHiMh+Pp3RCbMGOszoEBGRO3ddC2draSIiu/l0RifElKZuGegQEZG7SmVGh4ioXHw6oxNsSlW36UYGOkRE5MZjdMAxOkRE9vLpQCcoTwt0snJNMJktLt4pIiKiYjI6+aVrzOgQEZWVbwY61rl0DDkp+ZsycpjVISLyFseOHcOoUaMQExOD4OBgtG7dGuvXr8//usViwaRJkxAfH6++3q9fP+zbtw/u34wg0tW7Q0TknYHOlClT0LlzZ4SHhyMuLg7Dhw/Hnj17Sn3ejz/+iGbNmiEoKEgdbObNmwd3yOj4ZyfD4K9T6yxfIyLyDufPn0ePHj1gMBjw119/YefOnXjrrbdQpYr2t1+8/vrreP/99zFt2jSsWbMGoaGhGDhwIIxGI9yJBGSpxrwLGR2WrhEROSfQWbJkCR588EGsXr0aCxcuRG5uLgYMGICMjIxin7Ny5UrccsstGDNmDDZt2qSCI1m2b98OVwc6yDqPsEC9Ws1gQwIiIq/w2muvoXbt2pg+fTouu+wy1K9fXx2rGjZsmB88vPvuu3j22WcxbNgwtGnTBjNnzsTx48cxd+5cuBNpliOl1WxGQERkP+0sv4zmz59f6P6MGTNUZmfDhg3o3bt3kc957733cNVVV+GJJ55Q91966SUVJH344YfqSporu64hKxmhgXqcz8xFGgMdIiKv8Ntvv6nszA033KAu0NWsWRMPPPAA7r77bvX1hIQEnDhxQpWr2URGRqJLly5YtWoVbr755kteMzs7Wy02qanaGE+54CeLvWzPKe2559K0TI4to5OnD4WlHO9H5KzPKJErlPVzaVegc7GUFG2MS3R0dLGPkYPGhAkTCm2TA1BJV82cfkAxhMMgV/UkoxPgr30vGdn8z0wuwYMJuTNP/FwePHgQU6dOVceep59+GuvWrcPDDz+MgIAAjB49WgU5olq1aoWeJ/dtXyuqdHvy5MmXbF+wYAFCQkLKva9y4a8kx1TBhB4ROi2js2bzTpw5UO63I3L4Z5TIFTIzrVluZwU6ZrMZ48ePV3XQrVq1KvZxctCw52BSGQeUoJyzGCiBTuZZZPtLEOWHZavWInUvO6+R6/BgQp58MHEncnzq1KkTXn31VXW/ffv2qlxaqggk0CmPiRMnFrpoJxfgpDxOSuIiIiLKFUDK//n+/fursUTFWZNwDti6HhF+RsACXNa7HxDfrlzfA5EzPqNErmBLgjgt0JGxOnLgWL58ORzN6QeUnHRgx6Pws5jQsFo4EtIz0KRlGwzuUNPB3wlR6XgwIW84mLgT6aTWokWLQtuaN2+On3/+Wa1Xr15d3Z48eVI91kbut2tXdBARGBiolovJ/9mK/L8t7fkZudoFuDDrGB1DaLQ8qdzvR2Svin7GiZyhrJ/JcgU648aNwx9//IGlS5eiVq1aJT5WDihy8ChI7tsONC45oOijAD8DYM5FtQCtw44xz8L/yORSPJiQO/LEz6RUGlzcEXTv3r2oW7euWpfmBHIMWrRoUX5gIwGddF+7//774W6tpXUwI8Rim0eHXdeIiJzSdU061UiQM2fOHPz777/qYFGabt26qYNJQXL1Wra7jE6X33kt1i8zv7MNERF5vkcffVR1B5XStf3792PWrFn49NNPVSWC0Ol0qvT65ZdfVo0Ltm3bhttvvx01atRQXUHdbbLQMBRoec2ua0REZWZXRkcOEnLA+PXXX9VcOrZxNtKtRiZcE3KwkA43Ms5GPPLII7j88svVHAZXX301vv/+ezVpmxx0XEoCnYxTiPaXQCcQ6dkm1+4PERE5hMz3JhfkpAz6xRdfVBflpJ30yJEj8x/z5JNPqqkR7rnnHiQnJ6Nnz56qs6jM9+ZugU5+a2n/AMDgXvtHROQ1gY50sRF9+vQptF3mKrjjjjvUemJiIvz8LiSKunfvroIjma9Aut80btxYdVwrqYFBZbaYjvJLB1CF8+gQEXmRIUOGqKU4ktWRIEgWdyala2G2yUJZtkZE5LxAR0rXSrN48eJLtslcBrK4FWvpWiQk0GHpGhERuWegw8lCiYgqYYyOV7EGOhEWNUkBAx0iInI7qca8/MlCEcSMDhGRPXw30AnSStdCzVrrVJauERGRe2d0GOgQEdnDz9czOqGmNHXLjA4REbkbjtEhIio/nw90gkxaRoeBDhERuZtCXdc4RoeIyC4+HOhopWuBudZAx8hAh4iI3DijwzE6RER28fP1jI4hN0XdcowOERG5E2OuCdl5ZoTDVrrGjA4RkT18PtDRZ1sDnRwTzObS22cTERFVhlRjrrqN0LEZARFRefh8oOOXnZy/KSOHWR0iInKf8Tmiir9R28DSNSIiu/j5entpXXYagvxMaj0jW7slIiJyh/E5IsrPGugwo0NEZBcfDnQi81erB2Sr2/Rs7aBCRETkaqlZWpVB/oShDHSIiOziu4GOvx4I1IKd6gHa1bJ0ZnSIiMjNMjrh+WN02IyAiMgevhvoiGAt0Klm0K6WsfMaERG5W6ATYmF7aSKi8vDxQEdrSBCr166WpXEuHSIicrNmBMHmDG0DMzpERHZhoAOgqjXQYUaHiIjcKaMTgFwYLDnaBo7RISKyi28HOtbOa9F+2tWydAY6RETkRoFOmG2yUMGMDhGRXXw70LFmdKJ0DHSIiMj9JgwNs3VcCwgD/PxdvUtERB6FgQ6ASKSrW5auERGRO2V0wmHruMayNSIie/l4oKOVroVZtECHGR0iInIXKVl5iGBraSKicvPxQEfL6ISZ09QtAx0iInKnrmv5Y3TYWpqIyG4MdGSOApM10GF7aSIicqNA50LpGjM6RET2YqAjF8rytEAnI4eBDhERuZ7JbEFadt6FZgQco0NEZDffDnSs7aUDclPUbXq2ycU7REREJBNYa5OFhttK15jRISKym28HOtaMjiFHAh0L0q0HFiIiIld3XBNV9EZtQ1Cka3eIiMgDMdCRH4IlDyHIRgYzOkRE5EaBToy/NdBhRoeIyG6+HegYggH/ALUahXTOo0NERG4hNUs7HkXlBzoco0NEZC/fDnR0uvysTpQuHek5ebBYLK7eKyIi8nG2jE6kH9tLExGVl28HOsIa6ETqMiAxTmYOy9eIiMg9Ah02IyAiKj8GOtbOa1V0GeqWk4YSEZGrpVqb44Tmz6PDjA4Rkb0Y6FgzOtUM2sGEgQ4REblLRifEol2EY6BDRGQ/BjrWQCfWXwt02JCAiIjcJdAJMlkzOhyjQ0RkNwY6wVrpWrS/VgedbmSgQ0RErpWqAh0LAky2jA7H6BAR2YuBjjWjE+3HMTpEROQ+GR2Z380PZm0DS9eIiOzGQKdAe2nBQIeIiNwhoxNua0Sg89fmfSMiIrsw0LEGOhHQMjoco0NERK6WasxDmK7AHDoy7xsREdmFgY61vXS4xZbR4Tw6RETk+tK1CLaWJiKqEAY61oxOqDlV3aZna51uiIiIXMFisahAJ1zHQIeIqCIY6Fi7rgWbtIxOBjM6RETkQpk5JpjMFoShQOkaERHZjYGONaMTaMqAHnlIY3tpIiJygzl0ovysgQ5bSxMRlQsDnaDI/NVIZLAZARERuUWgExuQo21g6RoRUbkw0PHzzw92pMV0Rg4DHSIicvVkoTKRtVHbwIwOEVG5MNAp0HlNMjosXSMiInfI6FTRZ2sbOEaHiKhcGOgUGKcTqWPpGhERuRbH6BAROQYDnQKBThTSkc5Ah4iIXDxZqAi3TRjKMTpEROXCQOeijA4DHSIicoeMTrhtwtACTXOIiKjsGOgUmEtHNSPIzlOTtREREbmyGUGoxTZhKEvXiIjKg4FOwYwOMmC2AFm5nDSUiIhcG+gE5Qc6LF0jIqqUQGfp0qUYOnQoatSoAZ1Oh7lz55b4+MWLF6vHXbycOHECbjdGR5ehblm+RkREri5dCzKlaxuY0SEiqpxAJyMjA23btsVHH31k1/P27NmDpKSk/CUuLg7u1l46xk8LdDKymdEhIiLXBjqGPO2YxPbSRETlo7f3CYMGDVKLvSSwiYrSAgq3Y83oVLEGOumcS4eIiFwk1ZgLPfKgN7HrGhFRpQY65dWuXTtkZ2ejVatWeOGFF9CjR49iHyuPk8UmNTVV3ebm5qrFXrbnFPdcXUC4+kFIe2mRkmlEbm6I3e9DVB6lfT6JXImfS9dkdEJhvLCBpWtERO4Z6MTHx2PatGno1KmTCl4+//xz9OnTB2vWrEGHDh2KfM6UKVMwefLkS7YvWLAAISHlD0AWLlxY5PbwrCO4EkCYRQt0Fq9Yg7O72HmNKldxn08iV8rMtA6Ip0oNdKrqrD93fTDgb3D1LhEReSSnBzpNmzZVi0337t1x4MABvPPOO/j666+LfM7EiRMxYcKEQhmd2rVrY8CAAYiIiCjXFUk5iezfvz8MhiIOGKlJwO5nEKEyOhY0a90Og9vG2/0+ROVR6ueTyIVsGXWqHNl5JhhzzRcmC+X4HCIi9y9dK+iyyy7D8uXLi/16YGCgWi4mJ4EVOREs9vkRserGH2aEIQvGPAtPOKnSVfTzTeQM/ExWrtQsbYxohC2jw/E5RESeNY/O5s2bVUmb2zBIaUBg/lw6bC9NRESu7LgWF2Adp8rxOURElZfRSU9Px/79+/PvJyQkqMAlOjoaderUUWVnx44dw8yZM9XX3333XdSvXx8tW7aE0WhUY3T+/fdfNd7G7TqvpZ9Qc+lkMNAhIiIXdVwTVQ05gKyydI2IqPICnfXr1+OKK67Iv28bSzN69GjMmDFDzZGTmJiY//WcnBw89thjKviRRgJt2rTBP//8U+g13CnQidSlI43tpYmIyIUZnVhDthboMKNDRFR5gY50TLNYiu9IJsFOQU8++aRa3J51Lh0pXWNGh4iIXCHVGuhU0dtK1yJdu0NERB7MJWN03FKwNplplC4dGTkMdIiIyIWBjp9tslBmdIiIyouBzkUZnShksHSNiIhcWroW4WedMJRjdIiIyo2BzsWla2xGQERErg508ttLM6NDRFReDHRsgrTStUiks700ERG5dB6dUNhK15jRISIqLwY6RY3RyTa5em+IiMiHMzoh5gxtA0vXiIjKjYFOEWN0mNEhIiJXBjpBpnRtA0vXiIjKjYHORRkdGaMjgU5JLbSJiIicOWFogMma0WF7aSKicmOgc0kzgnSYzBZk55ldvUdEROSjGR1DHjM6REQVxUDnktI17eDC8jUiInJNoGOBf6410OEYHSKicmOgc1HXtVBdNgzIQzrn0iEiokpkNlvURbYg5EBnth6DmNEhIio3Bjo2QVIHrVOrkWxIQERElUwmq5bhoeG21tJyTAoIc/FeERF5LgY6Nn7+1mBHG6fDQIeIyDv873//g06nw/jx4/O3GY1GPPjgg4iJiUFYWBiuu+46nDx50i3G51Q1ZF+YQ0enXYAjIiL7MdApqiEBMpDBQIeIyOOtW7cOn3zyCdq0aVNo+6OPPorff/8dP/74I5YsWYLjx49jxIgRcIdAp3pgjraB43OIiCqEgU4xk4Yyo0NE5NnS09MxcuRIfPbZZ6hSRbuQJVJSUvDFF1/g7bffxpVXXomOHTti+vTpWLlyJVavXu3y1tJxgQUyOkREVG4MdIrpvMZAh4jIs0lp2tVXX41+/foV2r5hwwbk5uYW2t6sWTPUqVMHq1atgqszOrH5pWtsREBEVBH6Cj3ba+fSYekaEZEn+/7777Fx40ZVunaxEydOICAgAFFRWhbfplq1auprRcnOzlaLTWpqqrqVgEkWe9meU/C559KN6jbaX7s1B4TBVI7XJnKEoj6jRO6irJ9LBjpFtJiO0mWwvTQRkYc6cuQIHnnkESxcuBBBQUEOec0pU6Zg8uTJl2xfsGABQkJCyv26so82a49J4wF/6NK0pgjHz6Zjw7x55X5tIkco+BklcheZmZllehwDnSKbEaQjMdvk6r0hIqJykNK0U6dOoUOHDvnbTCYTli5dig8//BB///03cnJykJycXCirI13XqlevXuRrTpw4ERMmTCiU0alduzYGDBiAiIiIcl2NlBPI/v37w2AwqG27Fu4DEhNQNzoQOAHE12+KwYMH2/3aRI5Q1GeUyF3YsuqlYaBT1BgdXTp2sXSNiMgj9e3bF9u2bSu07c4771TjcJ566ikVoMiJ26JFi1RbabFnzx4kJiaiW7duRb5mYGCgWi4mr1ORk8CCz0+zXmCL9NPm0fEPjoQ/TzDJxSr6GSdyhrJ+JhnoFNF1jROGEhF5rvDwcLRq1arQttDQUDVnjm37mDFjVIYmOjpaZWQeeughFeR07drVhV3XtONOmG3CULaXJiKqEAY6RWZ0GOgQEXmzd955B35+fiqjI00GBg4ciI8//til+2TruhZisdaes700EVGFMNApZowOAx0iIu+xePHiQvelScFHH32kFndhC3SCGegQETkE59EpiO2liYjIRdKsgU5gXrq2gaVrREQVwkCnqPbSSEeGMcfVe0NERD7EltEJMFkDHU4YSkRUIQx0imhG4K+zwJKd5uq9ISIiH2GxWPIDHX2uLdBhRoeIqCIY6BRkCIZFH6xW9Tmp6sBDRETkbFm5JuSZtWOOX451fghmdIiIKoSBzkUsQZHqNtSchuw8s6t3h4iIfKlszc8CXU6GttF6PCIiovJhoHMRXUh0/qShbEhARESVGejEBxU47jCjQ0RUIQx0LqKzzaUD6bymzVJNRETkTKlZWoBTPUgLeOAfCOgDXbtTREQejoFOMZ3XpMV0Wrb1gENERFQJGZ1qgdnaBmZziIgqjIHOxfIzOlK6xowOERFVXqATG2Cd2oBz6BARVRgDnWJaTEdyjA4REVWSVGugE+Nv1DawtTQRUYUx0Cku0IGUrjHQISKiysvoxOhtgQ5L14iIKoqBTnGlazppRsBAh4iIKi/QibJldNhamoiowhjoFBvopCPdyECHiIicL9WoBTqRuixtAzM6REQVxkCnmEBHStfSmdEhIqJKHKMTlh/ocIwOEVFFMdAptr00mxEQEVHllq6FIVPbwIwOEVGFMdC5WIEJQ5nRISKiypwwNNhsDXTYXpqIqMIY6BQT6ITosmHMsh5wiIiIKiGjE2TO0DYwo0NEVGEMdC4WGAELdGrVYkx29d4QEZEPBTqBJlugw4wOEVFFMdC5mJ8fcgO0A4yf8byr94aIiLxcTp4ZWbkmta7PTdM2sr00EVGFMdApgilAa0jgn53i6l0hIiIfaS0t/HOsgQ5L14iIKoyBThHM1s5rDHSIiKiyytbCg/TQZdsCHZauERFVFAOdogRrgU5gbqqr94SIiHxkDp2IIAOQbT3uMKNDRFRhDHSKoAuJVrdBeczoEBFR5WR0YoKkdjpH28j20kRElR/oLF26FEOHDkWNGjWg0+kwd+7cUp+zePFidOjQAYGBgWjUqBFmzJgBd+ZvDXRCLelqkCgREZGzA534QGuQIwLCXLdDRES+GuhkZGSgbdu2+Oijj8r0+ISEBFx99dW44oorsHnzZowfPx5jx47F33//DXdlCLVNGpqODE4aSkRETpRq1I4zsYHZ2oaAcMDP37U7RUTkBfT2PmHQoEFqKatp06ahfv36eOutt9T95s2bY/ny5XjnnXcwcOBAuCO/UC2jE6XLQHp2HqqEBrh6l4iIyMvH6MQarIEOx+cQEXnGGJ1Vq1ahX79+hbZJgCPb3VawltGJhBboEBEROX2Mjt4a6HB8DhGRazI69jpx4gSqVatWaJvcT01NRVZWFoKDgy95TnZ2tlps5LEiNzdXLfayPaesz9UZwtQPJkqXjpQMI3JzL91HIkex9/NJVJn4uay8jE4VP6O2ga2liYg8I9ApjylTpmDy5MmXbF+wYAFCQkLK/boLFy4s0+Oi0/egl8ropOOv5atxcoel3O9J5OjPJ1FlyszMdPUu+ExGJ9I/S9vA0jUiIs8IdKpXr46TJ08W2ib3IyIiiszmiIkTJ2LChAmFMjq1a9fGgAED1PPKc0VSTiL79+8Pg8FQ+hNONwT2vYJIXQZatGmPwa2r2/2eRE77fBJVIltGnZwf6ETorIEOS9eIiDwj0OnWrRvmzZtXaJuc1Mn24kgbalkuJieBFTkRLPPzw6vmj9HJyjXx5JMqRUU/30TOwM+k86UatUAnDNbsGTM6RESuaUaQnp6u2kTLYmsfLeuJiYn52Zjbb789//H33XcfDh48iCeffBK7d+/Gxx9/jNmzZ+PRRx+F2wqKUjf+OgtyMjhpKBEROT+jE2Kxla4xo0NE5JJAZ/369Wjfvr1ahJSYyfqkSZPU/aSkpPygR0hr6T///FNlcWT+HWkz/fnnn7tta2nFEIQcP5miGjBlnnP13hARkRdLydQCnWBzuraBgQ4RkWtK1/r06QOLpfjB+TNmzCjyOZs2bYInMfqHI8BshDnzvKt3hYiIvJTZbEGadRqDQFOGtpFjdIiIPGMeHU+VY4jUVrKSXb0rRETkpWSuNtu1Q0OeLaPDMTpERI7AQKcYuQFaoONnZEaHiIicI8XaiCDI4Af/HJauERE5EgOdYpgCrQ0JstmMgIiInCM1SytbiwgyANnWVt4sXSMicggGOsWwWDuvGXIZ6BARkXNbS0cGGwCjNdBh6RoRkUMw0CmGJVgLdAJyGOgQEZFzpFgzOirQyU7TNgZax4gSEVGFMNAphn9otLoNMnFWcCIico40W0YnyP9C6RozOkREDsFApxj+IVXUbUie9QobERGRkzI6sYEmqSXQNnKMDhGRQzDQKYY+NEbdhloY6BARkXOkZmkZnbiAbG2Dnx7QaxNWExFRxTDQKUZQhBboRFjSkWsyu3p3iIjIC6UatYxOtN54obW0TufanSIi8hIMdIoRaA10InUZyLDOWk1ERORIKdaMzoVAh+NziIgchYFOMQzWZgRRSFczVxMRETmrvXSUn7V0jeNziIgchoFOcYK1ZgTBuhxkZFhnqyYiInJC6VqEX6a2ga2liYgchoFOcQLCYbL+eIxp51y9N0RE5MXNCCKQpW1g6RoRkcMw0CmOnx/SdWFqNSftrKv3hoiIvLi9dCisGR2WrhEROQwDnRJk+GlX1nLTmdEhIiLHslgujNEJsWRoG5nRISJyGAY6Jcjy1w445szzrt4VIiLyMrlmINekTRIaaLKN0WFGh4jIURjolCDboB1wzJnM6BARkWNlWht6+vvpYMizTk7NjA4RkcMw0ClBjkHrfqPLYkaHiIgcK8uk3UYGG6DLtgY6HKNDROQwDHRKkBugBTp+2Smu3hUiIvLSjE5EkB4wpmp3WLpGROQwDHRKYLLOZ+CfnezqXSEiIi+TlafLz+jAltFhoENE5DAMdEpgCdImDTXkMKNDRETOKV2LUIGONaPD0jUiIodhoFMCXbAW6ATmMdAhIiInla4VyuiwGQERkaMw0CmBX4gW6ATZuuEQERE5ONBRpWsco0NE5HAMdEqgD4tWtyEmBjpERORYWSZtjE6VQB2Ql6VtZEaHiMhhGOiUwBAWo27DzAx0iIjIsbKsGZ0YQ/aFjczoEBE5DAOdEgTYAh1kAGazq3eHiIi8sHQtRm/UVgwhgL/epftERORNGOiUICRSC3T8YAE4lw4RETmhvXS0vzWjw2wOEZFDMdApQWhICDIsgWo9L/0cPJ3FYsGkX7fj+qkrkZKZ6+rdISLyaZnW9tKRfpnaCsfnEBE5FAOdEoQG+iMZYWrdmHYWnm7BzpOYueow1h8+j8+WHXT17hAR+TTbGJ1wnbV0jXPoEBE5FAOdEgTq/ZGGUK8IdDKy8/DCbzvy789YeQjnM3Jcuk9ERL4sP9CBLaPDQIeIyJEY6JQiTaeVEuR6eKDz7j97kZRiRO3oYDSrHo707Dx8sTzB1btFROSTck1mZJu1MTohFpauERE5AwOdUmT4WwOdDM8do7PzeCq+XHFIrb94TSs82r+JWp++IoFZHSIiF0g1WtM5UrFmzrCuMKNDRORIDHRKYbQGOqaM8/BEZrMFz8zdBpPZgsGtq+OKZnEY0KIaWsRHICPHhM+Xc6wOEVFlSzNqDWHCAvXwy07VNrJ0jYjIoRjolMKoj9RWsjwzo/P9uiPYlJiM0AB/TBrSUm3T6XQY36+xWp+xgmN1iIgqW4p1gE5EkB7Itk5KzUCHiMihGOiUIjfAFugkw9OcTsvG//7apdYfG9AU1SOD8r/Wv0U1tKyhZXXYgY2IqHKlZmkZnYhgA5Cf0eEYHSIiR2KgU4q8AO0Km1+25wU6r87bperAJaC5vVvdQl/TsjraWJ2vVh7COWZ1iIgqfYxOZHCBjA7H6BARORQDnVKYg6qoW312iqt3xS4r95/BnE3HoNMBr1zbGnr/S3/V/ZrHMatDRF5nypQp6Ny5M8LDwxEXF4fhw4djz549hR5jNBrx4IMPIiYmBmFhYbjuuutw8uTJStvHFFtGJ8gAGJnRISJyBgY6pTAHRanbgFzPCXSy80x4du52tX5b17poV1v7Hi7GrA4ReaMlS5aoIGb16tVYuHAhcnNzMWDAAGRkWLubAXj00Ufx+++/48cff1SPP378OEaMGOGC0jWO0SEicha9017ZWwRHq5vAPOsVNw/wyZKDOHgmA7HhgXh8YNMSHytZnVY1I7D9WKrK6jx1VbNK208iImeYP39+ofszZsxQmZ0NGzagd+/eSElJwRdffIFZs2bhyiuvVI+ZPn06mjdvroKjrl27On0fU2yla5LRsVUMBFnHhBIRkUMw0CmFPkQrXQvykEDn0JkMfPjffrX+3JAWWllECVRWp28TjJ25XmV1xvasj5iwwEraWyIi55PARkRHaxeuJOCRLE+/fv3yH9OsWTPUqVMHq1atKjLQyc7OVotNaqp2TJDXkcXufcrUXis0wA+W7DTI1KG5/sHyguX4Dokcz/a5Ls/nm8jZyvq5ZKBTCv9Q7cAYYMkBcrMAQzDclcViwXO/bkdOnhm9GlfF0DbxZXpe3+ZxaF0zEtuOpeCzZQn4v0HM6hCRdzCbzRg/fjx69OiBVq1aqW0nTpxAQEAAoqIKl/VWq1ZNfa24cT+TJ0++ZPuCBQsQEhJi9341NQP3N9ch8vxeWLJSVKDz74p1MBq0C1VE7kLKP4ncTWZmZpkex0CnFIGhEciz+EGvM2stpt040PljaxKW7TuDAL0fXhzWSmVrysI2r86Yr9Zj5qpDuLsXszpE5B1krM727duxfPnyCr3OxIkTMWHChEIZndq1a6uxPxEREeW6GiknkP0v7wm/PWa17cpBw4CAsArtJ5Gj5H9G+/eHwVBydQhRZbNl1UvDQKcU4cEGpCAUMUgDss4DEWXLklQ26eDz4h871fqDfRqhftVQu55/ZbM4tKkVia1HU/DpsoOYOKi5k/aUiKhyjBs3Dn/88QeWLl2KWrVq5W+vXr06cnJykJycXCirI13X5GtFCQwMVMvF5ASwIieBBnOWtqLzgyEkSq48lfu1iJyhop9xImco62eSXddKERqgR7LFeoVNAh039daCPWqC0AZVQ3FfnwZ2P9+W1REzVx7G2fQLtehERJ5EynglyJkzZw7+/fdf1K9fv9DXO3bsqA6SixYtyt8m7acTExPRrVu3yt3Z/I5r4QxyiIgcjIFOKcKC9Cqj486BzpYjyfh69WG1/vLwVgjU+5frda5oqmV1snJN+HQp59UhIs8tV/vmm29UVzWZS0fG3ciSlaVlTyIjIzFmzBhVivbff/+p5gR33nmnCnIqo+NaQbr8QIcd14iIHI2BTinCAvVIsVgDHWMy3E2eyYyn52yDxQJc274mujeqWu7XKpTVWXUYZ5jVISIPNHXqVNVprU+fPoiPj89ffvjhh/zHvPPOOxgyZIiaKFRaTkvJ2i+//FL5O5vNyUKJiNwq0Pnoo49Qr149BAUFoUuXLli7dm2xj5X5C+QEuuAiz/MUoYF6JEMrXTNnnoO7kUzOjuOpiAjS4+nBFR9XI1mdttaszmfM6hCRh5auFbXccccd+Y+R45Acy86dO6cmEpUgp7jxOU6VY83oBHGyUCIilwc6ckVM0v3PP/88Nm7ciLZt22LgwIE4depUsc+RjjRJSUn5y+HDWpmVp2R0bGN0ctPdK9A5kWLEWwv2qvWnBjVTE4RWlJbVaaLWmdUhInIyoy2jw0CHiMjlgc7bb7+Nu+++W9Uzt2jRAtOmTVNzCHz55ZclnjzLlTLbInMVeIpAvR/SdVrpmu7gv8CB/wCzCe7gpT92Ij07D+3rROGWznUc9rp9msaibe0ojtUhInIynS2jw9I1IiKHs6u9tLTjlEGbMp+AjZ+fn5pdWmaTLk56ejrq1q2rJm7r0KEDXn31VbRs2bLYxzt6BuqKzu6b4K91MQs4uQX4ejgsYdVgbjkC5lY3ANVau6RTzpK9p/HntiT4++kweUhzmEx5MDkw/nqoT32M/XqTmlfnrm61Oa+OE3H2aXJn/FxWUkaHpWtETmEymfh3zANJZ0x///I11yp3oHPmzBn1gbk4IyP3d+/eXeRzmjZtqrI9bdq0UYND33zzTXTv3h07duwoNK+BM2egrujsvost7TEi5wW8ELsUzTLWICD9JPzXTFVLWlANHKnSHUerdENWYCwqQ44J+N8W+eXr0KuaCQmbliFhk2PfQ5ob1A3zx+F0MybO/A/D62kT2lFhZ43AypN+yDUD8hMyW7Sf3cXrRW0ruB6m98OprIWIc9/5aMlHlXX2aSonNiMgcgoZlyfdFmW+LPJMMs+ZVIJJZVh5OX3CUGnXWXBeAglymjdvjk8++QQvvfRS5c5AXc7ZfT86sBIbTzXBucG3QFcvHHkHFsFv+0/Q7fsb4cbjaJH0k1rMtbvC0up6mJsPA4KrwFneXrgPZ7MTUD0iEO+O6aEaJjhDWOPTKquz6ower97eC1WZ1Skk12TGddPWYNcJa+lJBe1JM2DKtS0xqJULBkQTVXD2aapoe2lmdIgcyRbkxMXFqYvkFTlZpsoPUuUim238v3TNLC+7zpCrVq2q0kgye3RBJc0mfTEJNNq3b4/9+/cX+xinzUBdzueHB2vPMeZZYAgKBVpeoy3GFGDX78DW2UDCUvgdWQ0cWQ3/vycCjQcAbW4EmlwFGBzXZW7fyTR8vuKQWp88rBWiwpyXAujbIh7taidg85FkfLEiEc8OaeG09/JEU5fuU0FOVIgBt15WB346Hfz8dPDX6eDvhwLrOvU1dVvw69ZtUtI59e+tOJBmwsM/bMUdR1JVB70APbu/k+txRnQnY6BD5HBSfWQLcmJiYly9O1QOwcHa+a0EO/J7LG8Zm12BTkBAgJpRWmaTHj58uNomJ2lyX2ahLuuHb9u2bRg8eDA8hS1jkp590SCYoEig/ShtST0ObPsJ2DYbOLEN2POntsjBq8U1QJubgLo95ey3QhHuM3O3I9dkQb/mcRjQwrlNHWzz6twxfR2+WXMY91zeAHHhntMa3Jl2n0jFB//uU+uTr2mJYe1qlvu1JOPod3QTdhsa4dNlhzBj5SFsOpKMj25tj1pVyl+qSUQegO2liRzONianIsMdyPVsvz/5fZY30LH7rFtKyj777DN89dVX2LVrF+6//341B4F0YRO33357oWYFL774ohpbc/DgQdWOetSoUaq99NixY+Epwq2BTkZ2XvEPiqgB9HgYuG85cP8qoOejQGRtrf560zfAV0OBt5sD390C/PMCsOV74PgmICejzPvxzZpErE04h2CDP164pmWlpGEvbxKruroZc834dAk7sNlK1h7/cYsKOPu3qIZr2tao8Gv664AnBjTB57d3QmSwAVuOJGPIB8vx7+7C2VMq3bJ9p9Hnjf/w6+Zjrt4VIjvaS3OMDpGjsVzNszni92f34I6bbroJp0+fxqRJk1T9Y7t27TB//vz8BgWJiYmqE5vN+fPnVTtqeWyVKlVURmjlypWqNbWnCA3Uokhp5Vwm1VoA1V4ArpwEJK4Ctv4A7JwLpJ8A9szTloKi6gCxzYDYpkBsc+t6k/wDX3aeCVPm7VZX+sXDfRtX2pV+27w6o79cy6yOlbTc3n4sVQUkrwxv5dA/pP1aVMMfD/XEg7M2YuvRFNw1Yz0e6NMQE/o3gV7q3ahER85lYtysTUjJysXEX7ahXe0o1I3R2sMTuXd7aWZ0iIgcrVyj2KVMrbhStcWLFxe6/84776jFk10oXStjoGMjAV+9Htoy+A3gyBrg9B7g9G7t9tQuIPMMkJyoLfsWFH5+RC1kRTXC36cikZUWhw66mujRtRvu6a21u64svRtXVVmdTYnJ+GTJQTznw2N19pxIw7v/aJO0Pj+0BeIiHB/01Y4OwY/3dcOrf+7CV6sO4+PFB7Dh8Hl8cEt7p7yftzDmmvDAtxtVkCOxZ2aOCU/8uBXf39NVjY0icktsL01ETlKvXj2MHz9eLb7K6V3XvIGtdC01qwJ92PWBQP3e2lJQxpnCwY/tVrI/qUcRnHoUMhpquG08sLSR3lMVqNoYiGkEVG1iXW8MVKkL+Dt+4LBkLB7t1wS3S1Zn9WHc66NZnTyTGU/8pJWs9W0Wh2vbl39cTmkC9f6q2USnetH4v5+3Yk3COQx+fxnev7k9ujeq6rT39WQv/rET246loEqIAdNGdcSdM9Zh7aFz+HJFAsb2qtyLA0RlxglDiaiAPn36qGqpd999t8KvtW7dOoSG+nZVAwOdMogKCVC3361NhNliUSf9DruyHlpVWyTrY5WVY8Lrc1djy6a1aOx3DL0jz6Bf7HkEnt8HpB7TskCJslw0SaufHqhS3xr4NNSCH1sQJO9RgRKrXo2rokOdKGxMTMa0xQcxaajvZXU+W5agyskigvR4dUTrSqn9Hdq2BlrWiFCZit0n0jDqizXq8/fgFY2YpSjg5w1HMWtNovqIv3dze3RpEINnrm6OZ+Zsx+t/70GfpnFoFBfm6t0kKsxihs42TjMw0tV7Q0QeQBpTSWMvvb70U/jY2MqZ39Gdsei/DK7rUEvNbSITPH639gj6vLkY7/2zD5k5dpaylbF99PCPVmD6xmRsQhPEXX43Bj72JQLv/BWYsBOYeBS4ZzEw4nPg8qeAliOA6q0BQwhgzgPO7tPGAK38APj9YWD6IODNRsBrdYHP+gJz7gP+exVY8R6w9jNg8yxgxxxg7wLg0HLg2EYto5R8BMg8B+Qa1ayWtrE64ts1h3Eq1QhfIr+XdxZqJWuThrZEtUosIWsQG4Y5D/TADR1rqc/gWwv34o4Z63AuI6fS9sHdO+A9M3ebWn+kb2P0bqL9YZeW3xKg5+SZ8diPW1RGjsidGExZF+4wo0Pk8+644w4sWbIE7733njrvkmXGjBnq9q+//lLj3GX6leXLl+PAgQMYNmyYGiMfFhaGzp07459//rmkdO3dApkheZ3PP/8c1157repo1rhxY/z2229l2jcJrsaMGYP69eur1s9NmzZV+3mxL7/8Ei1btlT7KfPfFBzqIi2/7733XrXPQUFBaNWqFf744w84EzM6ZRAZYsDUUR2x/tA5vPznLjWvzDv/7MWstYfx2ICmKhCS+VAqGqH/tOEoJv26A1m5JjU557s3tUPPxheVKcnBsEZ7bSnIbAbSjgNn9gFn91tv9wFn9gMpR7Q5f46t1xZ76fwAQyh6BYRgVagfknMNSP8sDnFNWmkZpOgGQHR9bT3Q+66aywny4z9tRY7JjD5NY3FdB+eVrBUnOMAfb9zQFp3rR2PSr9uxdO9pXP3+Mnx4a3t0rBsNX5VqzMX932xUXQElwHn4ysaF/qC/fn0bDHhnqepi98nSgyoTRuQu9GZroKMPAvRa5QAROYecZ8n5lStIt9yyVIFI4LB3714VAEjXYrFjxw51+3//939488030aBBA9Xc68iRI2qqlldeeUUFFTNnzsTQoUOxZ88e1KlTp9j3mDx5Ml5//XW88cYb+OCDDzBy5EjVDTk6uuRzCZlOplatWvjxxx/V3ETSWOyee+5RwcyNN96oHjN16lTVnfl///sfBg0ahJSUFKxYsSL/+bItLS0N33zzDRo2bIidO3eWu210WTHQsYOMl5jzQHf8sTUJr83fjaPns/DkT1sxfcUhPDO4+aVBSRlJ2+rn5m7HL5u0drg9GsXgnZva2TcORhofRNbSloZXFP5abhZw9sCFwEcCopxMIDfDepuptbmWW3msbd1kzRhYzKqOXLoDydy08ZIHTEsENhQRNIXGaUGPBD8qCCqwHhJdofI5V/lieYI6UZaxWlMqqWStODd2qo02tSLxwDcbcfBMBm76ZDX+b1AzjOlZ3+faaMpB68kftyLhTAZqRAapCwMXl/PFRwbjhaEtVUZHmkhc0TQOLWpw0De5B4MpU1thNofI6STIaTHpb5e8984XByIkoPRT7sjISDVnpWRbqlevrrbt3r1b3Urg079///zHSmDStm3b/PsvvfQS5syZozI0Jc1teccdd+CWW25R66+++iref/99rF27FldddVWpk0dLkGQjmZ1Vq1Zh9uzZ+YHOyy+/jMceewyPPPJI/uMk0yQk2yTvI1PTNGmiVQhJ0OZsDHTsJCeTMm5iQMtqmLnyMN7/dx92JaWqsRNytV9mtG9SrewHrZ3HUzFulnbSKudo0kb4/j6NKpwhKsQQDFRvpS32MOVdEgxZcjIwc+lObNm1F3X9TqKp4Qy6RKUiyngEuqxzQMYpbZEOcxeT9qm2zI/cBkdrzRNkbJG6ta3rtXXbtkL3CzxWbuUEISjKaVdD959KV6ViQrrNyYmzqzWrHoHfHuqpmhRI0C1ZxnWHzuH169uqlte+QgLQ+TtOwOCvw8ejOiI6tOjPwIgONdXjFu48qQKeXx/sgQA9q3bJ9fS20jW2liaiUnTq1KnQ/fT0dLzwwgv4888/kZSUhLy8PGRlZalpXkrSpk2b/HVpVBAREYFTp06VaR8++ugjVZom7yHvlZOToxonCHmN48ePo2/fvkU+d/PmzSojZAtyKgsDnQp0xbq7dwNc37GWCna+XnUYi/ecViVFN3Wug0f7Ny4xIyNXo79dk6g6RckYguoRQXj/lva4rL4blSFJgOEfCQRdGCQr4dfo27ph5f4zeGbudnU1HVnAFU1j8dJVtVHLcgI4lwCcTwDOHQTOHdLWpYmCTJ6atEVbHE3GKEnAExxl3618b1KaJ1mrggtksJ8ZL85ejbC8ZHRvGI0bmuqB1KQiH6sFXgFa8JUfiAVomTYnCAvUq3bT8nl56Y+d+HvHSew+sRzfju1SaXMsuZIEdlP+2p0fgMp8OSVdnHj12taq9FQuSnzw7z5VcuorjiVnISY0AEEG55YHUAXG6DCjQ1Qp5WOSWXHVe1fUxd3THn/8cSxcuFCVszVq1EiNm7n++utV8FFaZubiY6SUlZXm+++/V+/51ltvoVu3bggPD1flb2vWaBe25f1LUtrXnYWBTgVVCQ3A80Nb4vZu9fDaX7vVlWPpzvbb5mO47/KGqq2tjK+4eFzBxJ+34c9tSer+lc3i8OYNbYu9Iu2OpMXxX4/0wtTFB9Ty357T6HfwrGpYMKbncBguntxSSuLOHy4QACUA2WmAORcw5WqNFNSt3M8rsN12P+/Sr8mSk64FGqrsLlMry3MQ+a3NlBWJV6Wq8O1yvIjO3xr8SNCjLxwMqW0G+Pvp0SM1E/7p32rlfbYArKQgzRCs/jjJ565trSjVle3w2Uzc8tlq/HBPN9SIcn3myVlOp2XjwW83wmS24Jq2NXBb17qlPic2PBAvD2+tJmKVeYn6Na+GtiUER94izZiL279Yo/4GfXpbJ6/+XHh0Rodz6BA5nRwzy1I+5mpSuiYD/0sjY1+kDE0aC9gyPIcOaRPLO8OKFSvQvXt3PPDAA/nbpCGCjQQ+0vxg0aJFuOKKK4rMJB09elSNQarMrI77/8Y9RP2qoZh2W0d1pVlKiWRMh5Q8SdbmsQFNMMLasEC2j/tuI46cy4LeT6fGV9zVo75HtgqWK8SP9m+Ca9rVwDNztmH1wXP431+7MXfTMbxybWt0rFulcPlcXDNtcSSzScsUZSUDxmQ7b1O0IKmsJPMji+S1bOvqvuyHBF5FXEWxmKSbAZBXfJc6eQU1umvfnrLvi3+gNfCJRNugKCyKj8Dy3GycTtVj5Ydf4Kr29REWFqH93NUSUsztRdvk+5HvQy3W7ykv+8J6we2mgtsLfL3AvhUK0OQ9KjiOSBpDPPzdJpxKy1btou0ZM3V1m3j8tT1elftJCdsfD/X06iyH2WzBY7O34MDpDJUx1vt73t8Yb2ewNSNg6RoRWUmwIFkSCVqkm1px2RbpmPbLL7+oBgRyHHzuuefKlJkpL3k/aXjw999/q/E5X3/9tZqnR9ZtpJTuvvvuQ1xcXH7jAQmQHnroIVx++eXo3bs3rrvuOrz99tsqCyXjj2TfSxsfVBEMdBysszQsuL87/tiWpDI8UjbyhLVhweVNY/H5soNqwslaVYLx4a0dSiy58RQNY8Pw3d1d8cvGY3j5z51qvpfrpq7ErV3q4KmBzVTXOqfx8weCq2iLveQPgm2yvgJBjAk6jPx8LdYlpqB7w1jMHNMFurKUoFksWuBlC3ok+yS3BTNQtqAg/zG5yMvJwqa1q9C+RQPoZX8KBmQSjF0cnEnwJEFG+kltsSad+tn+R0vX83VwP5LBujj4uSRzZf26/F6lLFB+nvllghbM33IUNQ6dwMgA4OEODRG6PaHA4yyFywmlk1VAqBbAyW1AKF7tFoCjB87h2KlkfDh/Ex6/ur32XvaS95IA0JZJtI1ju3hdHiOfK3kPye6pdVuQ7G/dblsvsL3gc9T7yedKspkm6/da4L5aty15+feX7k5C/J4TuMdgxui2tREXeukVNnItjtEhootJedjo0aPRokULNQ5m+vTpRT5OgoW77rpLZVmqVq2Kp556CqmpqU7br3vvvRebNm3CTTfdpIITaWgg2R1pe20j+200GvHOO++o70P2S8rpbH7++We1XZ6bkZGhgh3p0OZMOosMFnFz8ouTThTSpk4GTdkrNzcX8+bNU234Lq5NdCZjrglfrTyED//bjzTjhTl3ZE6e/13XxisHjp/PyMGUv3Zh9vqj6r60yX5uSHNVYuQpXcFkkLuMewkN8Mffj/Z2+pgXuz6f8t9VSv6KCoKyU5Gcmoq5a/cjLzsD1YIt6N84HEEWOSHPsi6ZF93KkmENDopgG2skzR5U2Z2t9M52G3jpNsleXRyoycm3u9IHAwHWYMgQemFdvjf5Xi7uRmj7+RX3M3NXz54uV9OOiv799VaOOC4lfH4nmpz8HehyHzDoNafsJ1F5uercyRHkZDshIUFlG2S+FvJMJf0ey/o3mBkdJ5KymHsvb4gbOtXG+4v24Z9dJ3FP7wZqTIGnnPSXZ8ySdP+SUj0pZ5OymUe+36zmCHp5eCvUjSk8mK4iYw+OJxtRJzrkkjFQFXHoTAbe+Fsb5P701c3db2C/fG6knr+Ymn7JD17RSWs7fSLViKbHwvHdPV1LHv8lwZNkl2wztBcMaBzxOZXXl7FUtoxUcRmr/HXJWpkLZDX8YMyzYNPRFMgUCPFVwtC4WkSB8kFdgcxIgXJCW0Ai31d+O3Vt3ZyTDj9b2WJelrZkni3f9yc/K1X+JwFSSOF1CZZUlsl0aYbKlo0ptG65dLtqdqG3Zn2kA6HfRfdtmSHtfmaeBasOpSDbpEOt6DC0qRNzITNEbprRYTMCIiJnYKBTCeQk84VrWqrFV3RtEIN5j/TCp0sO4oP/9mPZvjNq4saH+zbG3b0alKm9rzRtOHwmEwlnM3D4TAYOnc3EIVk/m4Ez6dp4mPAgPW7oWBu3daurxklVdEyDzIskk092bxiDWy8rfsItdybBpAQ3N32yCntOpmHk52vw3d1dEBVSTLAjgYI+UFucQV5fTuTUyVztcmVGr5+2EtuNqWhfJ0o1W0AF20OnZ+Xgmnf+QWpqCkZ1qIoJl9csEBRZMzeSzZHyt/wARjI+wYWDGVmkO6GbSM/Ow4iPV2CvMV2NkZOS0or+rKgyuq4xU0ZErnXfffepiTyLMmrUKEybNg2eyH2O0OSVLbgf6tsYQ9rWUBOiLt9/Bm/8vSe/WYG0Rk7JylWBi7Splq5hklHRgplMnM0ouUViSIC/Kgn8ckWCWno3icXobnXRp2lcueYhmrnqENYeOqde97Xr2nh01k2Cvll3d8XNn67On+fp2zFdnTteykkm/74D24+lqgsGH93awSFz4EQEB+ClGzrjti/W4v2NOejSIQ49GpVvwl93IVXIT/y4BXtPpiMuPBBTRzrmZ0XOozdzwlAicg8vvviiGj9TFE8uW2agQ5Vy0v31mMvw6+bjqlnBvlPpuPGTVYgKMSA5M7fE58oYn/pVQ1SWQl6nbkwI6sVot6EBeizZd1rNYfTfnlNqDiNZakcHY1SXurixU21VSlcWEmy9Nl/rejZxUDPUjnazkrVykK5kksmRYEcChdu/XIOvx3ZBRJDnBDtS8vjd2iMqKfTeze0c2h65V+NYjOpaB9+sTlSZvL/G93LYz0YyKz+sO4KDp9PxSN/GiItwfo341CUH8Nd2bQLVqaM6VMp7UsXoTdZujGwvTUQuFhcXpxZvw0CHKoVkR4a3r4k+TWPx2vzd6uTVFuTIHCf1rAFMvaqh+YGMrMvEmCW5ommcWhLPZuKbNYfVyaW07pbJJN9euFc1QRjdvR5a1bww6WlxJWtZuSZ0bRCNkV1Kn5fFUzSuFo5v7+6CWz5djS1HU3DHl2tVF7nSfq7uQDJRMs5LjO/bRAUmjjZxUHMs3XsGiecy8fIfO9X4sopISsnCjBWHMGtNItKytQYki3adwme3d0LrWsV/Bitq8Z5TKlsqJl/TCh3rutHEw1Qsg8mW0WGgQ0TkDO5/tkNeRcaJTBnRRk2mKle9JagJdcBJd52YEDw9uDke7dcEv285jq9WHcKO46n4ccNRtcjYjtHd6mFQ6+qqpK4gCZDWJJxTMxe/fl1bj5zTqCTNqkfgm7FdcOtna7AxMRl3Tl+LGXde5pCfu7PI+Kz7v9mA7DwzLm8Si4eubOSU95GfwRvXt8HNn61WnQKvalUdVzarZvfr7Diegs+XJajPXp5Za3LQIDZU9RE4eCZDjTF644a2KvB2NMlGytxC0sfglstqq7bu5BnYXpqIyLlYwE0uIaVoLWtEOvxkWzqw3di5tpoM8uf7u2NYuxqqlGdTYjLG/7AZPf73L978ew+OJ2snGEfOZapJToVM3ioBkzeSn/U3Y7qo5g3rDp3HmK/WISvH5LZjTZ78catqPlEzKhjv3tTOqcFnlwYxatJe8dTP25CcmVPm/ZRMyqjP1+Dq95djzqZjKsjpUj8aX4zuhH8evRxzx/VQWUwJ2CQYkY5+kkF0lMycPNz79QakGvPUnFy+1PDEqyYMZekaEZFTuO8lXaIKlspJ1ylZnr26Bb5fm4hv1ySqlssyr5GMZ+jfvBpOpRmRmWNSjRGk7bc3k9KpmXddpgbgrz54DmNnrsMXozurNujuRDIj83doY00+GtmhzOOsKuKJgU3VOK+DpzPw/G878N7N7Yt9bHaeCb9tPq72U7raCWl+IfNjSUfBtgUmAZYxP/IzlnLNT5cexEf/HVDNAt65qV2FywdVQPjTVjVBr4xlmzaq4yXZSnJvbC9NRORcDHTI68kYIOn+dn+fhli486Qqa5MTfTmZFkEGP7x+XRuvK1krSvs6VfDVXZ1x+xdrsWL/Wdzz9QZ8eltHlwU70jpaxuJsO5aCbUdT1K0teJg0tKXKUlQG+f7fvrGdas0sTTOualkdg1rHF3pMSmauKnOUSYBPpWWrbdKh7+bOdXBnj3rFNrCQIEjKKptWC8fEX7apz+B1H6/E56M7VajpxWfLDuKPrUnQ+2nNB6pHsvmAR8kzwt9inUiapWtERE7BQId8ht7fT528yrL3ZJpqJ/3f7tOY0L+JanzgK2Sg+vQ7L8PoL9eqLnUPfLtRnSg7OxtQVFAjHfhMRZRyjexSB6MqeayJBFUSDEvW5Zm529G5frTKlEh54xfLEzB7/RGV/RPVIgJxZ4/6uOWyOogMLluntus61kL92FBVaibB3DUfLsfUUR3VnFP2Wr7vTH7J5fNDW6BzPTYf8DjGVHVjgQ66gDBX7w0ReYl69eph/PjxaiEGOuSjmlQLx8vDW8NXSaneF3d0wl0z1uHf3acwbtYmfDyyAwz+fpUe1MSEBqiueG1qRarb1jUjHdpG2h4yoa10SZNysEd/2KxKz/7angTbbjerHq7K04a2rVGuOWo61KmC38b1wD0zN6ifiYzvmTyspV2d/iTwGvfdRrVPN3SshVFeXnLptXK0zCUCwwA/DpclInIGBjpEPqp7w6r4/PbOuOurdaqcSgbLv39L+0LBjowDkbbb6cY81S5ZbqVbXpr1Nt2Yq923fk0Gxe87mWZXUBMfGeQ2k7NKVuutG9ti2IcrsGzfmfztvRpXxT29G6Bno6oV3tf4yGDMvrcbnvx5q+rS9syc7dhzIg3PDWlRaqApDSSk3FBas7etFYmXhrdym58d2UdnzegggONziIichYEOkQ/r2biqGqMjGQaZbHLgu0th8POzBjNaEFPeJmHuHtSU1KHu2aub462FezGgRXWM7VUfzeMjHN4d8P2b26kMkcx/M3PVYew7ma6yasU1X5Cg8/9+2aoyZVXDAlTZm7s1kqByZHTYcY2IrD799FO88MILOHr0KPwKZHqHDRuGmJgYPPPMM5gwYQJWr16NjIwMNG/eHFOmTEG/fv3K9X5vv/02pk+fjoMHDyI6OhpDhw7F66+/jrCwC+W0K1asUO+7du1aBAYG4rLLLsP333+PKlWqwGw2480331T7feTIEVSrVg333nuvery7YKBD5OP6NI3DtNs6qLEj0nWsKNKnQbqEhQcZ1G1YkD7/NsK2HmhAaKC/GmDvKUFNce7oUV8tziQ/mwevaITGcWGqTG7VwbMY9tEK1aRASisvJuOEpFGCNB/46NYOLivvIwcxaoGOJTACnvm/hMjDyGRjudZJeiubIUT+6Jf6sBtuuAEPPfQQ/vvvP/Tt21dtO3fuHObPn4958+YhPT0dgwcPxiuvvKKCjpkzZ6rgZM+ePahTx/5xrX5+fnj//fdRv359Few88MADePLJJ/Hxxx+rr2/evFntx1133YX33nsPer1e7ZvJpI1XnThxIj777DO888476NmzJ5KSkrB7tzZ+1F0w0CEiNUnmogl9sDMppVAwE269lclUPTVocXcDWlbHzw90x9iv1iPxXCZGfLwS793cDn2bX5i4dOX+M5hibT4g2SaZ+4e8JKPD0jWiyiFBzquOn7S5TJ4+DgSU3vRIsiSDBg3CrFmz8gOdn376CVWrVsUVV1yhApO2bdvmP/6ll17CnDlz8Ntvv2HcuHF279b4Ag0LpInByy+/jPvuuy8/0JHsTqdOnfLvi5Yttfna0tLSVPDz4YcfYvTo0Wpbw4YNVcDjTjgCkogUmSz1qlbx6NGoqpoLpmFsGOIighASoGeQ42TNqkfgt3E91WSjUi44duZ6TF18QJWrHT0vzQc2qTFPIzrUxOju9Vy9u+QAumzrGJ0gBjpEdMHIkSPx888/Iztbm8bg22+/xc0336yCHMnoPP7446pkLSoqSpWY7dq1C4mJieV6r3/++UcFVDVr1kR4eDhuu+02nD17FpmZmYUyOkWR95V9LO7r7oIZHSIiNxAdGoBvxnbBC7/tUJPbyiSje06kYv/pdJzLyFHlgK9e25pBp7fIZkaHqNLLxySz4qr3LiMpRZOLXH/++Sc6d+6MZcuWqdIwIUHOwoUL1biYRo0aITg4GNdffz1ycnLs3qVDhw5hyJAhuP/++1UpnIzRWb58OcaMGaNeLyQkRL1+cUr6mjthoENE5Cak69or17ZGs/gIFfDM3Xw8Pwia5sKJXckJrBkdC5sREFUOuUhUhvIxVwsKCsKIESNUJmf//v1o2rQpOnTokN8Y4I477sC1116r7kuGRwKW8tiwYYNqJvDWW2/lNz6YPXt2oce0adMGixYtwuTJky95fuPGjVWwI18fO3Ys3BUDHSIiN3Nb17poGBuqJnPNyM7Dh7e2R002H/AqbC9NRCWVr0m2ZceOHRg1alSh4OKXX35RWR/J7j/33HMqWCmPRo0aITc3Fx988IF6PQmipk2bVugx0mygdevWqkmBjN0JCAhQzQikaYKMG3rqqadU8wLZ3qNHD5w+fVrts2SF3AXH6BARuek8R0ufvAJLnrhCrZN3MXV7CCsbPglzyxGu3hUicjNXXnmlKiWTbmq33nproXbQ0rCge/fuKjgZOHBgfrbHXm3btlWv99prr6FVq1YqgyStqgtq0qQJFixYgC1btqi20t26dcOvv/6quq8JCbQee+wxTJo0SY0buummm3Dq1Cm4E51FCgHdXGpqKiIjI5GSkoKICPvT/BKxSls+aclnMBicso9E5cXPJ3nz319vxeMSeTtP/owajUYkJCSotslSCkaeqaTfY1n/BjOjQ0REREREXoeBDhERERGRF5FStLCwsCIX21w4voDNCIiIiIiIvMg111yDLl26FPk1TytFrAgGOkREREREXkQmAA0PZ1dHlq4REREREZHXYaBDRERERF7HAxoLk5N/fwx0iIiIiMhr2MagZGZmunpXqAJsv7+KjCniGB0iIiIi8hr+/v6IiorKn7wyJCQEOp3O1btFdmRyJMiR35/8HuX3WV4MdIiIiIjIq1SvXl3d2oId8jwS5Nh+j+XFQIeIiIiIvIpkcOLj4xEXF4fc3FxX7w7ZScrVKpLJsWGgQ0REREReSU6WHXHCTJ6JzQiIiIiIiMjrMNAhIiIiIiKvw0CHiIiIiIi8jt6TJgxKTU0t1/NlEJq0qZPnV6QXN5Ez8PNJ7sz2d5cT7xXG4xJ5O35GyRuOTR4R6KSlpanb2rVru3pXiIh8kvwdjoyMdPVuuA0el4iI3P/YpLN4wGU6s9mM48ePIzw8vFwTPknUJwejI0eOICIiwin7SFRe/HySO5NDhBxIatSoAT8/Vjvb8LhE3o6fUfKGY5NHZHTkG6hVq1aFX0f+o/I/K7krfj7JXTGTcykel8hX8DNKnnxs4uU5IiIiIiLyOgx0iIiIiIjI6/hEoBMYGIjnn39e3RK5G34+iXwP/9+Tu+NnlLyBRzQjICIiIiIisodPZHSIiIiIiMi3MNAhIiIiIiKvw0CHiIiIiIi8jk8FOjKp29y5c129G0RF4ueTyDfx/z65M34+yZN5XaDz0UcfoV69eggKCkKXLl2wdu1aV+8SkfLCCy+oA0bBpVmzZq7eLSKqBDw2kbvisYm8mVcFOj/88AMmTJig2iFu3LgRbdu2xcCBA3Hq1ClX7xqR0rJlSyQlJeUvy5cvd/UuEZGT8dhE7o7HJvJWXhXovP3227j77rtx5513okWLFpg2bRpCQkLw5ZdfFvl4OejEx8dj69atlb6v5Jv0ej2qV6+ev1StWrXYx/LzSeQdeGwid8djE3krrwl0cnJysGHDBvTr1y9/m5+fn7q/atWqQo+VqYMeeughzJw5E8uWLUObNm1csMfki/bt24caNWqgQYMGGDlyJBITEy95DD+fRN6DxybyBDw2kbfSw0ucOXMGJpMJ1apVK7Rd7u/evTv/fl5eHkaNGoVNmzap1GzNmjVdsLfki6Quf8aMGWjatKkqDZg8eTJ69eqF7du3Izw8XD2Gn08i78JjE7k7HpvIm3lNoFNWjz76KAIDA7F69eoSU7NEjjZo0KD8dbkSJgeXunXrYvbs2RgzZozazs8nkW/i/31yFR6byJt5Tema/Mfz9/fHyZMnC22X+1JvatO/f38cO3YMf//9twv2kuiCqKgoNGnSBPv378/fxs8nkXfhsYk8DY9N5E28JtAJCAhAx44dsWjRovxtZrNZ3e/WrVv+tmuuuQazZs3C2LFj8f3337tob4mA9PR0HDhwQA3qtOHnk8i78NhEnobHJvImXlW6Ju07R48ejU6dOuGyyy7Du+++i4yMDNXppqBrr70WX3/9NW677TbVaeT666932T6T73j88ccxdOhQVRJw/Phx1blGrvTecssthR7HzyeRd+GxidwZj03kzbwq0Lnppptw+vRpTJo0CSdOnEC7du0wf/78SwaBCvkPKlfV5D+sdMAZMWKES/aZfMfRo0fVgePs2bOIjY1Fz549Vb2zrF+Mn08i78FjE7kzHpvIm+ks0i+QiIiIiIjIi3jNGB0iIiIiIiIbBjpEREREROR1GOgQEREREZHXYaBDREREREReh4EOERERERF5HQY6RERERETkdRjoEBERERGR12GgQ0REREREXoeBDpGD3HHHHRg+fLird4OIiEjhcYl8HQMdIiIiIiLyOgx0iOz0008/oXXr1ggODkZMTAz69euHJ554Al999RV+/fVX6HQ6tSxevFg9/siRI7jxxhsRFRWF6OhoDBs2DIcOHbrkitvkyZMRGxuLiIgI3HfffcjJyXHhd0lERJ6CxyWioumL2U5ERUhKSsItt9yC119/Hddeey3S0tKwbNky3H777UhMTERqaiqmT5+uHisHj9zcXAwcOBDdunVTj9Pr9Xj55Zdx1VVXYevWrQgICFCPXbRoEYKCgtRBSA42d955pzpYvfLKKy7+jomIyJ3xuERUPAY6RHYeUPLy8jBixAjUrVtXbZOraEKupGVnZ6N69er5j//mm29gNpvx+eefq6tpQg44chVNDh4DBgxQ2+TA8uWXXyIkJAQtW7bEiy++qK7GvfTSS/DzY+KViIiKxuMSUfH4SSWyQ9u2bdG3b191ELnhhhvw2Wef4fz588U+fsuWLdi/fz/Cw8MRFhamFrmiZjQaceDAgUKvKwcTG7nSlp6ersoLiIiIisPjElHxmNEhsoO/vz8WLlyIlStXYsGCBfjggw/wzDPPYM2aNUU+Xg4KHTt2xLfffnvJ16TumYiIqCJ4XCIqHgMdIjtJqr9Hjx5qmTRpkioVmDNnjkrzm0ymQo/t0KEDfvjhB8TFxanBnCVdYcvKylJlBmL16tXqKlvt2rWd/v0QEZFn43GJqGgsXSOyg1whe/XVV7F+/Xo1yPOXX37B6dOn0bx5c9SrV08N5NyzZw/OnDmjBnyOHDkSVatWVR1tZNBnQkKCqoF++OGHcfTo0fzXlU42Y8aMwc6dOzFv3jw8//zzGDduHOugiYioRDwuERWPGR0iO8jVr6VLl+Ldd99VnWzkqtlbb72FQYMGoVOnTupgIbdSGvDff/+hT58+6vFPPfWUGigq3XBq1qyp6qkLXkmT+40bN0bv3r3VwFHpoPPCCy+49HslIiL3x+MSUfF0FovFUsLXicjJZL6C5ORkzJ0719W7QkRExOMSeQ3mH4mIiIiIyOsw0CEiIiIiIq/D0jUiIiIiIvI6zOgQEREREZHXYaBDREREREReh4EOERERERF5HQY6RERERETkdRjoEBERERGR12GgQ0REREREXoeBDhEREREReR0GOkRERERE5HUY6BAREREREbzN/wN9/niYWpR0IQAAAABJRU5ErkJggg==",
      "text/plain": [
       "<Figure size 1000x500 with 2 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "plot_learning_curves(history, sample_step=500)  #横坐标是 steps"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.818553Z",
     "start_time": "2025-06-26T01:45:37.816716Z"
    }
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:48:40.300725Z",
     "start_time": "2025-06-26T01:48:39.548524Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(87.14, 0.35521284489631655)"
      ]
     },
     "execution_count": 32,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 在测试集上评估模型\n",
    "test_accuracy = evaluate_model(model, test_loader, device, loss_fn)\n",
    "test_accuracy\n"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.12.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
