{
  "cells": [
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "-wLtZz7AJYjh",
        "outputId": "d21d8e58-a8f1-4a2a-bdfd-b755d0ec4a9e"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "--2024-11-09 09:00:04--  http://www.ehu.eus/ccwintco/uploads/6/67/Indian_pines_corrected.mat\n",
            "Resolving www.ehu.eus (www.ehu.eus)... 158.227.0.65, 2001:720:1410::65\n",
            "Connecting to www.ehu.eus (www.ehu.eus)|158.227.0.65|:80... connected.\n",
            "HTTP request sent, awaiting response... 301 Moved Permanently\n",
            "Location: https://www.ehu.eus/ccwintco/uploads/6/67/Indian_pines_corrected.mat [following]\n",
            "--2024-11-09 09:00:04--  https://www.ehu.eus/ccwintco/uploads/6/67/Indian_pines_corrected.mat\n",
            "Connecting to www.ehu.eus (www.ehu.eus)|158.227.0.65|:443... connected.\n",
            "HTTP request sent, awaiting response... 200 OK\n",
            "Length: 5953527 (5.7M)\n",
            "Saving to: ‘Indian_pines_corrected.mat’\n",
            "\n",
            "Indian_pines_correc 100%[===================>]   5.68M  6.41MB/s    in 0.9s    \n",
            "\n",
            "2024-11-09 09:00:05 (6.41 MB/s) - ‘Indian_pines_corrected.mat’ saved [5953527/5953527]\n",
            "\n",
            "URL transformed to HTTPS due to an HSTS policy\n",
            "--2024-11-09 09:00:06--  https://www.ehu.eus/ccwintco/uploads/c/c4/Indian_pines_gt.mat\n",
            "Resolving www.ehu.eus (www.ehu.eus)... 158.227.0.65, 2001:720:1410::65\n",
            "Connecting to www.ehu.eus (www.ehu.eus)|158.227.0.65|:443... connected.\n",
            "HTTP request sent, awaiting response... 200 OK\n",
            "Length: 1125 (1.1K)\n",
            "Saving to: ‘Indian_pines_gt.mat’\n",
            "\n",
            "Indian_pines_gt.mat 100%[===================>]   1.10K  --.-KB/s    in 0s      \n",
            "\n",
            "2024-11-09 09:00:06 (83.5 MB/s) - ‘Indian_pines_gt.mat’ saved [1125/1125]\n",
            "\n",
            "Collecting spectral\n",
            "  Downloading spectral-0.23.1-py3-none-any.whl.metadata (1.3 kB)\n",
            "Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from spectral) (1.26.4)\n",
            "Downloading spectral-0.23.1-py3-none-any.whl (212 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m212.9/212.9 kB\u001b[0m \u001b[31m7.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hInstalling collected packages: spectral\n",
            "Successfully installed spectral-0.23.1\n"
          ]
        }
      ],
      "source": [
        "! wget http://www.ehu.eus/ccwintco/uploads/6/67/Indian_pines_corrected.mat\n",
        "! wget http://www.ehu.eus/ccwintco/uploads/c/c4/Indian_pines_gt.mat\n",
        "! pip install spectral"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "id": "1UcPmfUkJ_kr"
      },
      "outputs": [],
      "source": [
        "class_num = 16\n",
        "windowSize = 25\n",
        "K = 30        #参考Hybrid-Spectral-Net\n",
        "rate = 16\n",
        "\n",
        "class HybridSN(nn.Module):\n",
        "  #定义各个层的部分\n",
        "  def __init__(self):\n",
        "    super(HybridSN, self).__init__()\n",
        "    self.S = windowSize\n",
        "    self.L = K;\n",
        "\n",
        "    #self.conv_block = nn.Sequential()\n",
        "    ## convolutional layers\n",
        "    self.conv1 = nn.Conv3d(in_channels=1, out_channels=8, kernel_size=(7, 3, 3))\n",
        "    self.conv2 = nn.Conv3d(in_channels=8, out_channels=16, kernel_size=(5, 3, 3))\n",
        "    self.conv3 = nn.Conv3d(in_channels=16, out_channels=32, kernel_size=(3, 3, 3))\n",
        "\n",
        "    #不懂 inputX经过三重3d卷积的大小\n",
        "    inputX = self.get2Dinput()\n",
        "    inputConv4 = inputX.shape[1] * inputX.shape[2]\n",
        "    # conv4 （24*24=576, 19, 19），64个 3x3 的卷积核 ==>（（64, 17, 17）\n",
        "    self.conv4 = nn.Conv2d(inputConv4, 64, kernel_size=(3, 3))\n",
        "\n",
        "    #self-attention\n",
        "    self.sa1 = nn.Conv2d(64, 64//rate, kernel_size=1)\n",
        "    self.sa2 = nn.Conv2d(64//rate, 64, kernel_size=1)\n",
        "\n",
        "    # 全连接层（256个节点） # 64 * 17 * 17 = 18496\n",
        "    self.dense1 = nn.Linear(18496, 256)\n",
        "    # 全连接层（128个节点）\n",
        "    self.dense2 = nn.Linear(256, 128)\n",
        "    # 最终输出层(16个节点)\n",
        "    self.dense3 = nn.Linear(128, class_num)\n",
        "\n",
        "    #让某个神经元的激活值以一定的概率p，让其停止工作，这次训练过程中不更新权值，也不参加神经网络的计算。\n",
        "    #但是它的权重得保留下来（只是暂时不更新而已），因为下次样本输入时它可能又得工作了\n",
        "    #参考: https://blog.csdn.net/yangfengling1023/article/details/82911306\n",
        "    #self.drop = nn.Dropout(p = 0.4)\n",
        "    #改成0.43试试\n",
        "    self.drop = nn.Dropout(p = 0.43)\n",
        "    self.soft = nn.Softmax(dim=1)\n",
        "    pass\n",
        "\n",
        "  #辅助函数，没怎么懂，求经历过三重卷积后二维的一个大小\n",
        "  def get2Dinput(self):\n",
        "    #torch.no_grad(): 做运算，但不计入梯度记录\n",
        "    with torch.no_grad():\n",
        "      x = torch.zeros((1, 1, self.L, self.S, self.S))\n",
        "      x = self.conv1(x)\n",
        "      x = self.conv2(x)\n",
        "      x = self.conv3(x)\n",
        "    return x\n",
        "    pass\n",
        "\n",
        "  #必须重载的部分，X代表输入\n",
        "  def forward(self, x):\n",
        "    #F在上文有定义torch.nn.functional，是已定义好的一组名称\n",
        "    out = F.relu(self.conv1(x))\n",
        "    out = F.relu(self.conv2(out))\n",
        "    out = F.relu(self.conv3(out))\n",
        "\n",
        "    # 进行二维卷积，因此把前面的 32*18 reshape 一下，得到 （576, 19, 19）\n",
        "    out = out.view(-1, out.shape[1] * out.shape[2], out.shape[3], out.shape[4])\n",
        "    out = F.relu(self.conv4(out))\n",
        "\n",
        "    # Squeeze 第三维卷成1了\n",
        "    weight = F.avg_pool2d(out, out.size(2))    #参数为输入，kernel\n",
        "    #参考: https://blog.csdn.net/qq_21210467/article/details/81415300\n",
        "    #参考: https://blog.csdn.net/u013066730/article/details/102553073\n",
        "\n",
        "    # Excitation: sa（压缩到16分之一）--Relu--fc（激到之前维度）--Sigmoid（保证输出为0至1之间）\n",
        "    weight = F.relu(self.sa1(weight))\n",
        "    weight = F.sigmoid(self.sa2(weight))\n",
        "    out = out * weight\n",
        "\n",
        "    # flatten: 变为 18496 维的向量，\n",
        "    out = out.view(out.size(0), -1)\n",
        "\n",
        "    out = F.relu(self.dense1(out))\n",
        "    out = self.drop(out)\n",
        "    out = F.relu(self.dense2(out))\n",
        "    out = self.drop(out)\n",
        "    out = self.dense3(out)\n",
        "\n",
        "    #添加此语句后出现LOSS不下降的情况，参考：https://www.e-learn.cn/topic/3733809\n",
        "    #原因是CrossEntropyLoss()=softmax+负对数损失（已经包含了softmax)。如果多写一次softmax，则结果会发生错误\n",
        "    #out = self.soft(out)\n",
        "    #out = F.log_softmax(out)\n",
        "\n",
        "    return out\n",
        "    pass\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "id": "VnFksUbbKHlh"
      },
      "outputs": [],
      "source": [
        "import numpy as np\n",
        "import matplotlib.pyplot as plt\n",
        "import scipy.io as sio\n",
        "from sklearn.decomposition import PCA\n",
        "from sklearn.model_selection import train_test_split\n",
        "from sklearn.metrics import confusion_matrix, accuracy_score, classification_report, cohen_kappa_score\n",
        "import spectral\n",
        "import torch\n",
        "import torchvision\n",
        "import torch.nn as nn\n",
        "import torch.nn.functional as F\n",
        "import torch.optim as optim"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "id": "LOKMVsifKQM7"
      },
      "outputs": [],
      "source": [
        "# 对高光谱数据 X 应用 PCA 变换\n",
        "def applyPCA(X, numComponents):\n",
        "    newX = np.reshape(X, (-1, X.shape[2]))\n",
        "    pca = PCA(n_components=numComponents, whiten=True)\n",
        "    newX = pca.fit_transform(newX)\n",
        "    newX = np.reshape(newX, (X.shape[0], X.shape[1], numComponents))\n",
        "    return newX\n",
        "\n",
        "# 对单个像素周围提取 patch 时，边缘像素就无法取了，因此，给这部分像素进行 padding 操作\n",
        "def padWithZeros(X, margin=2):\n",
        "    newX = np.zeros((X.shape[0] + 2 * margin, X.shape[1] + 2* margin, X.shape[2]))\n",
        "    x_offset = margin\n",
        "    y_offset = margin\n",
        "    newX[x_offset:X.shape[0] + x_offset, y_offset:X.shape[1] + y_offset, :] = X\n",
        "    return newX\n",
        "\n",
        "# 在每个像素周围提取 patch ，然后创建成符合 keras 处理的格式\n",
        "def createImageCubes(X, y, windowSize=5, removeZeroLabels = True):\n",
        "    # 给 X 做 padding\n",
        "    margin = int((windowSize - 1) / 2)\n",
        "    zeroPaddedX = padWithZeros(X, margin=margin)\n",
        "    # split patches\n",
        "    patchesData = np.zeros((X.shape[0] * X.shape[1], windowSize, windowSize, X.shape[2]))\n",
        "    patchesLabels = np.zeros((X.shape[0] * X.shape[1]))\n",
        "    patchIndex = 0\n",
        "    for r in range(margin, zeroPaddedX.shape[0] - margin):\n",
        "        for c in range(margin, zeroPaddedX.shape[1] - margin):\n",
        "            patch = zeroPaddedX[r - margin:r + margin + 1, c - margin:c + margin + 1]\n",
        "            patchesData[patchIndex, :, :, :] = patch\n",
        "            patchesLabels[patchIndex] = y[r-margin, c-margin]\n",
        "            patchIndex = patchIndex + 1\n",
        "    if removeZeroLabels:\n",
        "        patchesData = patchesData[patchesLabels>0,:,:,:]\n",
        "        patchesLabels = patchesLabels[patchesLabels>0]\n",
        "        patchesLabels -= 1\n",
        "    return patchesData, patchesLabels\n",
        "\n",
        "def splitTrainTestSet(X, y, testRatio, randomState=345):\n",
        "    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=testRatio, random_state=randomState, stratify=y)\n",
        "    return X_train, X_test, y_train, y_test\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "iE9R6cUbKRp0",
        "outputId": "8aa774d1-2ba8-4af6-8121-9668630d13ba"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Hyperspectral data shape:  (145, 145, 200)\n",
            "Label shape:  (145, 145)\n",
            "\n",
            "... ... PCA tranformation ... ...\n",
            "Data shape after PCA:  (145, 145, 30)\n",
            "\n",
            "... ... create data cubes ... ...\n",
            "Data cube X shape:  (10249, 25, 25, 30)\n",
            "Data cube y shape:  (10249,)\n",
            "\n",
            "... ... create train & test data ... ...\n",
            "Xtrain shape:  (1024, 25, 25, 30)\n",
            "Xtest  shape:  (9225, 25, 25, 30)\n",
            "before transpose: Xtrain shape:  (1024, 25, 25, 30, 1)\n",
            "before transpose: Xtest  shape:  (9225, 25, 25, 30, 1)\n",
            "after transpose: Xtrain shape:  (1024, 1, 30, 25, 25)\n",
            "after transpose: Xtest  shape:  (9225, 1, 30, 25, 25)\n"
          ]
        }
      ],
      "source": [
        "# 地物类别\n",
        "class_num = 16\n",
        "X = sio.loadmat('Indian_pines_corrected.mat')['indian_pines_corrected']\n",
        "y = sio.loadmat('Indian_pines_gt.mat')['indian_pines_gt']\n",
        "\n",
        "# 用于测试样本的比例\n",
        "test_ratio = 0.90\n",
        "\n",
        "# 每个像素周围提取 patch 的尺寸\n",
        "patch_size = 25\n",
        "# 使用 PCA 降维，得到主成分的数量\n",
        "pca_components = 30\n",
        "\n",
        "print('Hyperspectral data shape: ', X.shape)\n",
        "print('Label shape: ', y.shape)\n",
        "\n",
        "print('\\n... ... PCA tranformation ... ...')\n",
        "X_pca = applyPCA(X, numComponents=pca_components)\n",
        "print('Data shape after PCA: ', X_pca.shape)\n",
        "\n",
        "print('\\n... ... create data cubes ... ...')\n",
        "X_pca, y = createImageCubes(X_pca, y, windowSize=patch_size)\n",
        "print('Data cube X shape: ', X_pca.shape)\n",
        "print('Data cube y shape: ', y.shape)\n",
        "\n",
        "print('\\n... ... create train & test data ... ...')\n",
        "Xtrain, Xtest, ytrain, ytest = splitTrainTestSet(X_pca, y, test_ratio)\n",
        "print('Xtrain shape: ', Xtrain.shape)\n",
        "print('Xtest  shape: ', Xtest.shape)\n",
        "\n",
        "# 改变 Xtrain, Ytrain 的形状，以符合 keras 的要求\n",
        "Xtrain = Xtrain.reshape(-1, patch_size, patch_size, pca_components, 1)\n",
        "Xtest  = Xtest.reshape(-1, patch_size, patch_size, pca_components, 1)\n",
        "print('before transpose: Xtrain shape: ', Xtrain.shape)\n",
        "print('before transpose: Xtest  shape: ', Xtest.shape)\n",
        "\n",
        "# 为了适应 pytorch 结构，数据要做 transpose\n",
        "Xtrain = Xtrain.transpose(0, 4, 3, 1, 2)\n",
        "Xtest  = Xtest.transpose(0, 4, 3, 1, 2)\n",
        "print('after transpose: Xtrain shape: ', Xtrain.shape)\n",
        "print('after transpose: Xtest  shape: ', Xtest.shape)\n",
        "\n",
        "\n",
        "\"\"\" Training dataset\"\"\"\n",
        "class TrainDS(torch.utils.data.Dataset):\n",
        "    def __init__(self):\n",
        "        self.len = Xtrain.shape[0]\n",
        "        self.x_data = torch.FloatTensor(Xtrain)\n",
        "        self.y_data = torch.LongTensor(ytrain)\n",
        "    def __getitem__(self, index):\n",
        "        # 根据索引返回数据和对应的标签\n",
        "        return self.x_data[index], self.y_data[index]\n",
        "    def __len__(self):\n",
        "        # 返回文件数据的数目\n",
        "        return self.len\n",
        "\n",
        "\"\"\" Testing dataset\"\"\"\n",
        "class TestDS(torch.utils.data.Dataset):\n",
        "    def __init__(self):\n",
        "        self.len = Xtest.shape[0]\n",
        "        self.x_data = torch.FloatTensor(Xtest)\n",
        "        self.y_data = torch.LongTensor(ytest)\n",
        "    def __getitem__(self, index):\n",
        "        # 根据索引返回数据和对应的标签\n",
        "        return self.x_data[index], self.y_data[index]\n",
        "    def __len__(self):\n",
        "        # 返回文件数据的数目\n",
        "        return self.len\n",
        "\n",
        "# 创建 trainloader 和 testloader\n",
        "trainset = TrainDS()\n",
        "testset  = TestDS()\n",
        "train_loader = torch.utils.data.DataLoader(dataset=trainset, batch_size=128, shuffle=True, num_workers=2)\n",
        "test_loader  = torch.utils.data.DataLoader(dataset=testset,  batch_size=128, shuffle=False, num_workers=2)\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "y0z1hgZKKUo8",
        "outputId": "8718ea2f-0cb5-4d69-b276-ab2d90de166b"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "[Epoch: 1]   [loss avg: 21.2164]   [current loss: 2.5170]\n",
            "[Epoch: 2]   [loss avg: 20.6201]   [current loss: 2.5016]\n",
            "[Epoch: 3]   [loss avg: 20.2622]   [current loss: 2.3412]\n",
            "[Epoch: 4]   [loss avg: 19.9118]   [current loss: 2.2952]\n",
            "[Epoch: 5]   [loss avg: 19.6088]   [current loss: 2.4777]\n",
            "[Epoch: 6]   [loss avg: 19.2350]   [current loss: 2.1339]\n",
            "[Epoch: 7]   [loss avg: 18.8081]   [current loss: 2.0650]\n",
            "[Epoch: 8]   [loss avg: 18.2922]   [current loss: 1.6383]\n",
            "[Epoch: 9]   [loss avg: 17.7322]   [current loss: 1.4445]\n",
            "[Epoch: 10]   [loss avg: 17.1411]   [current loss: 1.3012]\n",
            "[Epoch: 11]   [loss avg: 16.5059]   [current loss: 1.1375]\n",
            "[Epoch: 12]   [loss avg: 15.8654]   [current loss: 1.0186]\n",
            "[Epoch: 13]   [loss avg: 15.1858]   [current loss: 0.8005]\n",
            "[Epoch: 14]   [loss avg: 14.5252]   [current loss: 0.6921]\n",
            "[Epoch: 15]   [loss avg: 13.8774]   [current loss: 0.5441]\n",
            "[Epoch: 16]   [loss avg: 13.2445]   [current loss: 0.3835]\n",
            "[Epoch: 17]   [loss avg: 12.6371]   [current loss: 0.3206]\n",
            "[Epoch: 18]   [loss avg: 12.0793]   [current loss: 0.2365]\n",
            "[Epoch: 19]   [loss avg: 11.5729]   [current loss: 0.2215]\n",
            "[Epoch: 20]   [loss avg: 11.0926]   [current loss: 0.2498]\n",
            "[Epoch: 21]   [loss avg: 10.6367]   [current loss: 0.1803]\n",
            "[Epoch: 22]   [loss avg: 10.2166]   [current loss: 0.1278]\n",
            "[Epoch: 23]   [loss avg: 9.8260]   [current loss: 0.2018]\n",
            "[Epoch: 24]   [loss avg: 9.4665]   [current loss: 0.1437]\n",
            "[Epoch: 25]   [loss avg: 9.1244]   [current loss: 0.0355]\n",
            "[Epoch: 26]   [loss avg: 8.8029]   [current loss: 0.1145]\n",
            "[Epoch: 27]   [loss avg: 8.5000]   [current loss: 0.0473]\n",
            "[Epoch: 28]   [loss avg: 8.2250]   [current loss: 0.1588]\n",
            "[Epoch: 29]   [loss avg: 7.9590]   [current loss: 0.0467]\n",
            "[Epoch: 30]   [loss avg: 7.7127]   [current loss: 0.0235]\n",
            "[Epoch: 31]   [loss avg: 7.4773]   [current loss: 0.0208]\n",
            "[Epoch: 32]   [loss avg: 7.2583]   [current loss: 0.0854]\n",
            "[Epoch: 33]   [loss avg: 7.0527]   [current loss: 0.0210]\n",
            "[Epoch: 34]   [loss avg: 6.8587]   [current loss: 0.0327]\n",
            "[Epoch: 35]   [loss avg: 6.6759]   [current loss: 0.0209]\n",
            "[Epoch: 36]   [loss avg: 6.5031]   [current loss: 0.0711]\n",
            "[Epoch: 37]   [loss avg: 6.3358]   [current loss: 0.0973]\n",
            "[Epoch: 38]   [loss avg: 6.1777]   [current loss: 0.0576]\n",
            "[Epoch: 39]   [loss avg: 6.0264]   [current loss: 0.0421]\n",
            "[Epoch: 40]   [loss avg: 5.8852]   [current loss: 0.0573]\n",
            "[Epoch: 41]   [loss avg: 5.7502]   [current loss: 0.0410]\n",
            "[Epoch: 42]   [loss avg: 5.6189]   [current loss: 0.0568]\n",
            "[Epoch: 43]   [loss avg: 5.4952]   [current loss: 0.0314]\n",
            "[Epoch: 44]   [loss avg: 5.3743]   [current loss: 0.0162]\n",
            "[Epoch: 45]   [loss avg: 5.2582]   [current loss: 0.0293]\n",
            "[Epoch: 46]   [loss avg: 5.1540]   [current loss: 0.1266]\n",
            "[Epoch: 47]   [loss avg: 5.0523]   [current loss: 0.0247]\n",
            "[Epoch: 48]   [loss avg: 4.9544]   [current loss: 0.0217]\n",
            "[Epoch: 49]   [loss avg: 4.8587]   [current loss: 0.0275]\n",
            "[Epoch: 50]   [loss avg: 4.7654]   [current loss: 0.0194]\n",
            "[Epoch: 51]   [loss avg: 4.6767]   [current loss: 0.0109]\n",
            "[Epoch: 52]   [loss avg: 4.5901]   [current loss: 0.0245]\n",
            "[Epoch: 53]   [loss avg: 4.5058]   [current loss: 0.0033]\n",
            "[Epoch: 54]   [loss avg: 4.4252]   [current loss: 0.0547]\n",
            "[Epoch: 55]   [loss avg: 4.3480]   [current loss: 0.0191]\n",
            "[Epoch: 56]   [loss avg: 4.2717]   [current loss: 0.0068]\n",
            "[Epoch: 57]   [loss avg: 4.2000]   [current loss: 0.0134]\n",
            "[Epoch: 58]   [loss avg: 4.1294]   [current loss: 0.0106]\n",
            "[Epoch: 59]   [loss avg: 4.0617]   [current loss: 0.0372]\n",
            "[Epoch: 60]   [loss avg: 3.9963]   [current loss: 0.0021]\n",
            "[Epoch: 61]   [loss avg: 3.9342]   [current loss: 0.0014]\n",
            "[Epoch: 62]   [loss avg: 3.8748]   [current loss: 0.0429]\n",
            "[Epoch: 63]   [loss avg: 3.8161]   [current loss: 0.0056]\n",
            "[Epoch: 64]   [loss avg: 3.7595]   [current loss: 0.0069]\n",
            "[Epoch: 65]   [loss avg: 3.7041]   [current loss: 0.0162]\n",
            "[Epoch: 66]   [loss avg: 3.6499]   [current loss: 0.0121]\n",
            "[Epoch: 67]   [loss avg: 3.5976]   [current loss: 0.0088]\n",
            "[Epoch: 68]   [loss avg: 3.5465]   [current loss: 0.0338]\n",
            "[Epoch: 69]   [loss avg: 3.4975]   [current loss: 0.0163]\n",
            "[Epoch: 70]   [loss avg: 3.4510]   [current loss: 0.0174]\n",
            "[Epoch: 71]   [loss avg: 3.4041]   [current loss: 0.0228]\n",
            "[Epoch: 72]   [loss avg: 3.3590]   [current loss: 0.0187]\n",
            "[Epoch: 73]   [loss avg: 3.3151]   [current loss: 0.0310]\n",
            "[Epoch: 74]   [loss avg: 3.2715]   [current loss: 0.0010]\n",
            "[Epoch: 75]   [loss avg: 3.2293]   [current loss: 0.0054]\n",
            "[Epoch: 76]   [loss avg: 3.1880]   [current loss: 0.0227]\n",
            "[Epoch: 77]   [loss avg: 3.1479]   [current loss: 0.0128]\n",
            "[Epoch: 78]   [loss avg: 3.1091]   [current loss: 0.0009]\n",
            "[Epoch: 79]   [loss avg: 3.0705]   [current loss: 0.0022]\n",
            "[Epoch: 80]   [loss avg: 3.0332]   [current loss: 0.0482]\n",
            "[Epoch: 81]   [loss avg: 2.9966]   [current loss: 0.0345]\n",
            "[Epoch: 82]   [loss avg: 2.9609]   [current loss: 0.0364]\n",
            "[Epoch: 83]   [loss avg: 2.9273]   [current loss: 0.0363]\n",
            "[Epoch: 84]   [loss avg: 2.8937]   [current loss: 0.0106]\n",
            "[Epoch: 85]   [loss avg: 2.8631]   [current loss: 0.0903]\n",
            "[Epoch: 86]   [loss avg: 2.8314]   [current loss: 0.0168]\n",
            "[Epoch: 87]   [loss avg: 2.8005]   [current loss: 0.0038]\n",
            "[Epoch: 88]   [loss avg: 2.7706]   [current loss: 0.0355]\n",
            "[Epoch: 89]   [loss avg: 2.7403]   [current loss: 0.0061]\n",
            "[Epoch: 90]   [loss avg: 2.7114]   [current loss: 0.0308]\n",
            "[Epoch: 91]   [loss avg: 2.6821]   [current loss: 0.0024]\n",
            "[Epoch: 92]   [loss avg: 2.6549]   [current loss: 0.0037]\n",
            "[Epoch: 93]   [loss avg: 2.6276]   [current loss: 0.0015]\n",
            "[Epoch: 94]   [loss avg: 2.6009]   [current loss: 0.0145]\n",
            "[Epoch: 95]   [loss avg: 2.5743]   [current loss: 0.0157]\n",
            "[Epoch: 96]   [loss avg: 2.5485]   [current loss: 0.0126]\n",
            "[Epoch: 97]   [loss avg: 2.5244]   [current loss: 0.0044]\n",
            "[Epoch: 98]   [loss avg: 2.5001]   [current loss: 0.0561]\n",
            "[Epoch: 99]   [loss avg: 2.4758]   [current loss: 0.0132]\n",
            "[Epoch: 100]   [loss avg: 2.4521]   [current loss: 0.0026]\n",
            "[Epoch: 101]   [loss avg: 2.4285]   [current loss: 0.0018]\n",
            "[Epoch: 102]   [loss avg: 2.4056]   [current loss: 0.0251]\n",
            "[Epoch: 103]   [loss avg: 2.3843]   [current loss: 0.0016]\n",
            "[Epoch: 104]   [loss avg: 2.3618]   [current loss: 0.0026]\n",
            "[Epoch: 105]   [loss avg: 2.3401]   [current loss: 0.0036]\n",
            "[Epoch: 106]   [loss avg: 2.3185]   [current loss: 0.0079]\n",
            "[Epoch: 107]   [loss avg: 2.2984]   [current loss: 0.0844]\n",
            "[Epoch: 108]   [loss avg: 2.2775]   [current loss: 0.0032]\n",
            "[Epoch: 109]   [loss avg: 2.2570]   [current loss: 0.0031]\n",
            "[Epoch: 110]   [loss avg: 2.2376]   [current loss: 0.0054]\n",
            "[Epoch: 111]   [loss avg: 2.2180]   [current loss: 0.0024]\n",
            "[Epoch: 112]   [loss avg: 2.1999]   [current loss: 0.0140]\n",
            "[Epoch: 113]   [loss avg: 2.1810]   [current loss: 0.0100]\n",
            "[Epoch: 114]   [loss avg: 2.1628]   [current loss: 0.0001]\n",
            "[Epoch: 115]   [loss avg: 2.1450]   [current loss: 0.0015]\n",
            "[Epoch: 116]   [loss avg: 2.1270]   [current loss: 0.0098]\n",
            "[Epoch: 117]   [loss avg: 2.1093]   [current loss: 0.0117]\n",
            "[Epoch: 118]   [loss avg: 2.0920]   [current loss: 0.0079]\n",
            "[Epoch: 119]   [loss avg: 2.0747]   [current loss: 0.0005]\n",
            "[Epoch: 120]   [loss avg: 2.0581]   [current loss: 0.0047]\n",
            "[Epoch: 121]   [loss avg: 2.0413]   [current loss: 0.0038]\n",
            "[Epoch: 122]   [loss avg: 2.0249]   [current loss: 0.0140]\n",
            "[Epoch: 123]   [loss avg: 2.0089]   [current loss: 0.0196]\n",
            "[Epoch: 124]   [loss avg: 1.9932]   [current loss: 0.0506]\n",
            "[Epoch: 125]   [loss avg: 1.9779]   [current loss: 0.0014]\n",
            "[Epoch: 126]   [loss avg: 1.9624]   [current loss: 0.0001]\n",
            "[Epoch: 127]   [loss avg: 1.9473]   [current loss: 0.0047]\n",
            "[Epoch: 128]   [loss avg: 1.9336]   [current loss: 0.0007]\n",
            "[Epoch: 129]   [loss avg: 1.9187]   [current loss: 0.0106]\n",
            "[Epoch: 130]   [loss avg: 1.9046]   [current loss: 0.0054]\n",
            "[Epoch: 131]   [loss avg: 1.8905]   [current loss: 0.0477]\n",
            "[Epoch: 132]   [loss avg: 1.8768]   [current loss: 0.0012]\n",
            "[Epoch: 133]   [loss avg: 1.8629]   [current loss: 0.0046]\n",
            "[Epoch: 134]   [loss avg: 1.8495]   [current loss: 0.0008]\n",
            "[Epoch: 135]   [loss avg: 1.8366]   [current loss: 0.0336]\n",
            "[Epoch: 136]   [loss avg: 1.8259]   [current loss: 0.2747]\n",
            "[Epoch: 137]   [loss avg: 1.8147]   [current loss: 0.0070]\n",
            "[Epoch: 138]   [loss avg: 1.8026]   [current loss: 0.0068]\n",
            "[Epoch: 139]   [loss avg: 1.7905]   [current loss: 0.0084]\n",
            "[Epoch: 140]   [loss avg: 1.7781]   [current loss: 0.0109]\n",
            "[Epoch: 141]   [loss avg: 1.7665]   [current loss: 0.0008]\n",
            "[Epoch: 142]   [loss avg: 1.7545]   [current loss: 0.0026]\n",
            "[Epoch: 143]   [loss avg: 1.7426]   [current loss: 0.0141]\n",
            "[Epoch: 144]   [loss avg: 1.7310]   [current loss: 0.0055]\n",
            "[Epoch: 145]   [loss avg: 1.7196]   [current loss: 0.0530]\n",
            "[Epoch: 146]   [loss avg: 1.7082]   [current loss: 0.0028]\n",
            "[Epoch: 147]   [loss avg: 1.6970]   [current loss: 0.0003]\n",
            "[Epoch: 148]   [loss avg: 1.6860]   [current loss: 0.0005]\n",
            "[Epoch: 149]   [loss avg: 1.6751]   [current loss: 0.0019]\n",
            "[Epoch: 150]   [loss avg: 1.6642]   [current loss: 0.0012]\n",
            "[Epoch: 151]   [loss avg: 1.6534]   [current loss: 0.0104]\n",
            "[Epoch: 152]   [loss avg: 1.6430]   [current loss: 0.0037]\n",
            "[Epoch: 153]   [loss avg: 1.6326]   [current loss: 0.0097]\n",
            "[Epoch: 154]   [loss avg: 1.6224]   [current loss: 0.0153]\n",
            "[Epoch: 155]   [loss avg: 1.6122]   [current loss: 0.0099]\n",
            "[Epoch: 156]   [loss avg: 1.6024]   [current loss: 0.0025]\n",
            "[Epoch: 157]   [loss avg: 1.5929]   [current loss: 0.0056]\n",
            "[Epoch: 158]   [loss avg: 1.5830]   [current loss: 0.0062]\n",
            "[Epoch: 159]   [loss avg: 1.5732]   [current loss: 0.0052]\n",
            "[Epoch: 160]   [loss avg: 1.5634]   [current loss: 0.0017]\n",
            "[Epoch: 161]   [loss avg: 1.5540]   [current loss: 0.0022]\n",
            "[Epoch: 162]   [loss avg: 1.5447]   [current loss: 0.0027]\n",
            "[Epoch: 163]   [loss avg: 1.5354]   [current loss: 0.0060]\n",
            "[Epoch: 164]   [loss avg: 1.5263]   [current loss: 0.0011]\n",
            "[Epoch: 165]   [loss avg: 1.5174]   [current loss: 0.0184]\n",
            "[Epoch: 166]   [loss avg: 1.5084]   [current loss: 0.0036]\n",
            "[Epoch: 167]   [loss avg: 1.4997]   [current loss: 0.0042]\n",
            "[Epoch: 168]   [loss avg: 1.4909]   [current loss: 0.0002]\n",
            "[Epoch: 169]   [loss avg: 1.4830]   [current loss: 0.0001]\n",
            "[Epoch: 170]   [loss avg: 1.4753]   [current loss: 0.0168]\n",
            "[Epoch: 171]   [loss avg: 1.4671]   [current loss: 0.0070]\n",
            "[Epoch: 172]   [loss avg: 1.4590]   [current loss: 0.0084]\n",
            "[Epoch: 173]   [loss avg: 1.4507]   [current loss: 0.0009]\n",
            "[Epoch: 174]   [loss avg: 1.4429]   [current loss: 0.0004]\n",
            "[Epoch: 175]   [loss avg: 1.4354]   [current loss: 0.0054]\n",
            "[Epoch: 176]   [loss avg: 1.4277]   [current loss: 0.0035]\n",
            "[Epoch: 177]   [loss avg: 1.4198]   [current loss: 0.0024]\n",
            "[Epoch: 178]   [loss avg: 1.4121]   [current loss: 0.0002]\n",
            "[Epoch: 179]   [loss avg: 1.4043]   [current loss: 0.0005]\n",
            "[Epoch: 180]   [loss avg: 1.3969]   [current loss: 0.0012]\n",
            "[Epoch: 181]   [loss avg: 1.3894]   [current loss: 0.0008]\n",
            "[Epoch: 182]   [loss avg: 1.3818]   [current loss: 0.0002]\n",
            "[Epoch: 183]   [loss avg: 1.3746]   [current loss: 0.0005]\n",
            "[Epoch: 184]   [loss avg: 1.3672]   [current loss: 0.0028]\n",
            "[Epoch: 185]   [loss avg: 1.3599]   [current loss: 0.0027]\n",
            "[Epoch: 186]   [loss avg: 1.3528]   [current loss: 0.0008]\n",
            "[Epoch: 187]   [loss avg: 1.3457]   [current loss: 0.0012]\n",
            "[Epoch: 188]   [loss avg: 1.3386]   [current loss: 0.0004]\n",
            "[Epoch: 189]   [loss avg: 1.3316]   [current loss: 0.0007]\n",
            "[Epoch: 190]   [loss avg: 1.3248]   [current loss: 0.0012]\n",
            "[Epoch: 191]   [loss avg: 1.3179]   [current loss: 0.0032]\n",
            "[Epoch: 192]   [loss avg: 1.3111]   [current loss: 0.0009]\n",
            "[Epoch: 193]   [loss avg: 1.3045]   [current loss: 0.0005]\n",
            "[Epoch: 194]   [loss avg: 1.2980]   [current loss: 0.0005]\n",
            "[Epoch: 195]   [loss avg: 1.2913]   [current loss: 0.0002]\n",
            "[Epoch: 196]   [loss avg: 1.2850]   [current loss: 0.0000]\n",
            "[Epoch: 197]   [loss avg: 1.2786]   [current loss: 0.0121]\n",
            "[Epoch: 198]   [loss avg: 1.2729]   [current loss: 0.0326]\n",
            "[Epoch: 199]   [loss avg: 1.2666]   [current loss: 0.0082]\n",
            "[Epoch: 200]   [loss avg: 1.2605]   [current loss: 0.0005]\n",
            "Finished Training\n"
          ]
        }
      ],
      "source": [
        "# 使用GPU训练，可以在菜单 \"代码执行工具\" -> \"更改运行时类型\" 里进行设置\n",
        "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n",
        "\n",
        "# 网络放到GPU上\n",
        "net = HybridSN().to(device)\n",
        "criterion = nn.CrossEntropyLoss()\n",
        "optimizer = optim.Adam(net.parameters(), lr=0.00037)\n",
        "\n",
        "# 开始训练\n",
        "net.train()\n",
        "total_loss = 0\n",
        "for epoch in range(200):\n",
        "    for i, (inputs, labels) in enumerate(train_loader):\n",
        "        inputs = inputs.to(device)\n",
        "        labels = labels.to(device)\n",
        "\n",
        "        # 优化器梯度归零\n",
        "        optimizer.zero_grad()\n",
        "        # 正向传播 +　反向传播 + 优化\n",
        "        outputs = net(inputs)\n",
        "\n",
        "        loss = criterion(outputs, labels)\n",
        "        loss.backward()\n",
        "        optimizer.step()\n",
        "        total_loss += loss.item()\n",
        "    print('[Epoch: %d]   [loss avg: %.4f]   [current loss: %.4f]' %(epoch + 1, total_loss/(epoch+1), loss.item()))\n",
        "\n",
        "print('Finished Training')\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "mUVHbmjYLM1G",
        "outputId": "eb64048b-94a0-480b-d4a1-c97cf5c35f3c"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "              precision    recall  f1-score   support\n",
            "\n",
            "         0.0     1.0000    1.0000    1.0000        41\n",
            "         1.0     0.9863    0.9510    0.9683      1285\n",
            "         2.0     0.9946    0.9866    0.9906       747\n",
            "         3.0     0.9953    1.0000    0.9977       213\n",
            "         4.0     0.9886    0.9931    0.9908       435\n",
            "         5.0     0.9819    0.9909    0.9864       657\n",
            "         6.0     1.0000    1.0000    1.0000        25\n",
            "         7.0     1.0000    1.0000    1.0000       430\n",
            "         8.0     0.9000    1.0000    0.9474        18\n",
            "         9.0     0.9942    0.9863    0.9902       875\n",
            "        10.0     0.9704    0.9946    0.9823      2210\n",
            "        11.0     0.9757    0.9794    0.9776       534\n",
            "        12.0     1.0000    0.9730    0.9863       185\n",
            "        13.0     0.9956    0.9965    0.9961      1139\n",
            "        14.0     0.9858    0.9971    0.9914       347\n",
            "        15.0     0.9467    0.8452    0.8931        84\n",
            "\n",
            "    accuracy                         0.9848      9225\n",
            "   macro avg     0.9822    0.9809    0.9811      9225\n",
            "weighted avg     0.9849    0.9848    0.9848      9225\n",
            "\n"
          ]
        }
      ],
      "source": [
        "count = 0\n",
        "net.eval()\n",
        "# 模型测试\n",
        "for inputs, _ in test_loader:\n",
        "    inputs = inputs.to(device)\n",
        "    outputs = net(inputs)\n",
        "    outputs = np.argmax(outputs.detach().cpu().numpy(), axis=1)\n",
        "    if count == 0:\n",
        "        y_pred_test =  outputs\n",
        "        count = 1\n",
        "    else:\n",
        "        y_pred_test = np.concatenate( (y_pred_test, outputs) )\n",
        "\n",
        "# 生成分类报告\n",
        "classification = classification_report(ytest, y_pred_test, digits=4)\n",
        "print(classification)\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "_QN2aQ2TLU7s",
        "outputId": "f502b693-ff80-40ba-91b4-af568616db83"
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "<ipython-input-9-935d004c0cb5>:23: DeprecationWarning: Conversion of an array with ndim > 0 to a scalar is deprecated, and will error in future. Ensure you extract a single element from your array before performing this operation. (Deprecated NumPy 1.25.)\n",
            "  outputs[i][j] = prediction+1\n"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "... ... row  0  handling ... ...\n",
            "... ... row  20  handling ... ...\n",
            "... ... row  40  handling ... ...\n",
            "... ... row  60  handling ... ...\n",
            "... ... row  80  handling ... ...\n",
            "... ... row  100  handling ... ...\n",
            "... ... row  120  handling ... ...\n",
            "... ... row  140  handling ... ...\n"
          ]
        }
      ],
      "source": [
        "# load the original image\n",
        "X = sio.loadmat('Indian_pines_corrected.mat')['indian_pines_corrected']\n",
        "y = sio.loadmat('Indian_pines_gt.mat')['indian_pines_gt']\n",
        "\n",
        "height = y.shape[0]\n",
        "width = y.shape[1]\n",
        "\n",
        "X = applyPCA(X, numComponents= pca_components)\n",
        "X = padWithZeros(X, patch_size//2)\n",
        "\n",
        "# 逐像素预测类别\n",
        "outputs = np.zeros((height,width))\n",
        "for i in range(height):\n",
        "    for j in range(width):\n",
        "        if int(y[i,j]) == 0:\n",
        "            continue\n",
        "        else :\n",
        "            image_patch = X[i:i+patch_size, j:j+patch_size, :]\n",
        "            image_patch = image_patch.reshape(1,image_patch.shape[0],image_patch.shape[1], image_patch.shape[2], 1)\n",
        "            X_test_image = torch.FloatTensor(image_patch.transpose(0, 4, 3, 1, 2)).to(device)\n",
        "            prediction = net(X_test_image)\n",
        "            prediction = np.argmax(prediction.detach().cpu().numpy(), axis=1)\n",
        "            outputs[i][j] = prediction+1\n",
        "    if i % 20 == 0:\n",
        "        print('... ... row ', i, ' handling ... ...')"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 504
        },
        "id": "YMaPhMJyLZwt",
        "outputId": "98dee306-ef1b-435c-e54b-abe6d44d5b5c"
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "/usr/local/lib/python3.10/dist-packages/spectral/graphics/spypylab.py:796: UserWarning: Failed to create RectangleSelector object. Interactive pixel class labeling will be unavailable.\n",
            "  warnings.warn(msg)\n"
          ]
        },
        {
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAboAAAGwCAYAAADfbKDjAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABKu0lEQVR4nO3de5QU5YE+/udFYCCEmcmQZS4JRDbHs+AlCqJdRM9+TZwT1NZIZJPFM1HW+DYbARXJUUM2Q46MyQQ3mxANl9iVY/QsxMSzgRjmBH8EEokJ1eCMJPESJCdEic7AZgkzgmFE5v39Ud013TN9qe6u6rr08/HUyHRXV781l3rmfeu9CKWUAhERUUiN8boAREREbmLQERFRqDHoiIgo1Bh0REQUagw6IiIKNQYdERGFGoOOiIhCjUFHREShxqAjIqJQY9AREVGoeRZ069evx7nnnosJEyYgEolg3759XhWFiIhCTHgx1+UPf/hD3Hrrrdi0aRMikQjWrVuHp556CgcPHsTUqVMLvn5oaAhvvvkmJk+eDCFEBUpMRER+oZTCW2+9hZaWFowZY6O+pjxw+eWXq2XLllmfnz17VrW0tKjOzk5brz9y5IgCwI0bN27cqng7cuSIrcwYiwp755130N3djVWrVlmPjRkzBq2trdi7d2/W1wwODmJwcND6XFmV0CMAal0srcv66zI/T37aD6DO+jD6Nf111q5UJXbdCqy9+BPAM/O9LkrFzcczkP/fU6gb8VPfj348jzvx+K4nPCqZi67ehUf6r0Ydf9Hzmjx5sq39Kh50f/3rX3H27Fk0NjZmPN7Y2Ig//OEPWV/T2dmJBx54IMsztQh00CWLrpKtryL94dq0HUa8JsBnTCWaNB4YN3EcMG6i10WpuIkYh9osP/W1qMUkjMf4SR4UynWTUMtf9ILs3roKRK/LVatWob+/39qOHDnidZEco4QZcLzTSLZFuzL/n+t5N9/b62MQFaHiQff+978f55xzDo4ePZrx+NGjR9HU1JT1NTU1Naitrc3YAkuJ4a1Sb5lnowDqimb+P9fzpSgUQl1RBlWZpCa9LkLVqXjT5fjx43HppZdi165dWLBgAQCzF+WuXbuwfPnyShfHMyObK4Hh4BEOJpBSQCKW/TktrswSsDpJKXZCspwgTb0+2lX+cQJKN3Svi1B1Kh50ALBy5UosXrwYc+fOxeWXX45169bh1KlTuO2227wojieyZYuTAZeSiAF6jj8gDRFDRDHnyANVFHJSkww3j3kSdP/6r/+K//3f/8Xq1avR19eHSy65BDt27BjVQSVU0poqVZZKVLJu5U7aAdB1Y/SDhgYNKm8zarayEpF9DDnveRJ0ALB8+fKqaqoEsnc8UTCzzaswMUQMOrKEIMxfUIE4GHXkmipuwqTK8SzoqsKImlLeuHCpJpePrhs5mzUBwNCAiBLg5DM+5VZIuHHcXMdkyFEFMOgqIFvHkyDQDR2IASpX/8xkFTVo5xUUKvlRKAAiBmzozdzBrZBw47gMNPIQg85lQb/HpUtAlznOQBjQEUGwz9DHFBBLCEQ3RtGxwevCEAUXg84NFRwjV4gWVzBEDBIl3BBPa9dM3VBP79RiQEdcaaNyzj9nH3y6biDa2+N1MYgCjUHntGTIFWqudGPMXHYCEQfeQ4MZmOn39HRDhy4MSKll7Kvi4H09t7ETB5FtDDqXFLrOV6zviYNNpzqy9FwxtFF1RW14sAS5hSFHZFuwg66/LvsMxx70YCw0Ti5jVzg7Zm7kUbKN0Sv57VIHUwIyBuhSwtCStbksY/Ok1GDERDLsshVWBP6+ZcWx9kZUlmAH3cglLDyevLHQBM2ujJnLdc5i9PNGDNCk/Xn2DF2HlkplAUQAIK5DUwoykeeeny4hkchRLgPIcl+P8sgXcgzBrPw4G4kfy1Qtgh10IyiRrLVYN8hcTr5ixskhWRy3ypQ6rvVFyPwaCAVAxmBoWV+dlWZIQAwPGBfJDwpALPsY84Jj86TUINi06ZxsIcfw82Wg+LFM1SIQy/QUo8ILA9h6Ty9XClBKICEFjJjwz6zpiRhUnv+8rpkHXpWHHNFIoarReVFHsHU/rhIXbqsmN4JQ0HM1I3pEQkcsx1+3UpNQQkGwxkdEDglV0I2SXs1ydO2bEi7CbqddgeOXNI7OBVknl05naNAi8bxf4rBHoPWdFAqQ/vojhSiIwht0wt2xalwZ3D1SJCAMCUPPEc56eO/xmS0AClIkAEMg2hP1tiky1/2+1OO8H0gBEN6gg3kpdKseFc7LbHmk1ArX2PLsbw08NzRI5F5HT+nh/foLZa4o0bGhF+jxQYAUWsXcqZBjYJKLQh10QNoFsZhmzAJNk74cB+aDaceKCbls+9t5vZQaElIka3VpfPlNKZ4SQELqaF+anvLZpwDTpER0Tof9gHAyTIo5lp19GXK2mG0ZlemtFaY75aEPOgBZx5TZfk1xTw1TRexbBvP6npzn3qOwK7YmVzZdwkAsY6YWHSoUv5gCyRqrsnNPNY7o9UCX3YM7GSbFHKvQvqzNFUGZP/tuD1XQjFANA6qOoEsaNc7OrfeBu6uFj37DHD0uK6SSIZdtnJ45G0vFiuA6YX3ITykB0WyYtTq7/BgofiyTTwkFbLi+F70uN2s3GxpUJDxz1lZV0AnYz7hSv78cAkaVIgAgHkF7LH+dTpMSErrrF0civ6qqoAPcrYhXbMwcEWBV/QpNdqN0HcKQiKK3wJ5E4VR1QecmV6f4Itfl+84FuQVHAEAkji49ATTn33dDb4fZkhiS5sRc80ty3snqwqBzAPMtDBQSUuSc9FpF9IDfrxBQcVt7AbEEojl6ejqmQh1QcoUZQ666MOjKxJALB6UEEkJCyuwXwETQO7vYHX2hBKQEejs2uFuekNQYAdYOg4BBF3Bhay6VUoPUJLQiOnLqkIjr8dIGFyS/fgKANETONfY0KaHiOkSur3VIxvEBgNQBdGWv0emGjjkdUXQhPEFVLoac/zHoypTRFdyjvPFq/JxbNGUuDVTcaZVwsVFIDkuIAQLIN0pCQkdCmOP3somEZSLq5LqDucbxKQCiuR3RXpebNsuUqmWxtkUAgy480tejC7hKziOaMS4vx5xjqVqeLrMPjpdyeCLq4H/184/jE8o836KGKhTTTOnQvbtUuNkJOYZh+DHoyLeS872YU35locWTQ/MrmC65BsdLkYBQKuffGWlL1waaeX65vycZdGnO2VkMD+7dMeTCj0FHvmVNcNybo2NELAEVr2x85JzuzNAgE7lXXkdEhSHnhgPbTjaoODZcf739KcqIXMKgc5A5E5dLa+D5mJtzXeqGju05mhQ39naZ3eFHVKPsLIYLINmxJPt6b7nOye5jI49lxAS00UW1hCADR1ECiBld6NLb8+5njd0DKlajY3NldWHQOSh1byNkHSELquiEzgCu752D7c09Zs+/WALpQ9+kJguv46MAkZCQWgIwtKy9UBw/J10CEhA5Dhv8cXrZCMQjQDxSYO4WodDTLio6RRlDrrow6FzgyfUqBJ1QrPXoCtje3JP2/56MTiTXR+egvcCYNwEFCQFdyvxdLR1idXjJcW2VUgvVTPEptienhgA0A9jQYXZGGcnjtk/W/oKPQRdw6cv0jHhwlGJ/WQ1NIqJERWoahq5bs5LoClDJsW26nkB8REKkanS56LoBhYgV/kIo/49zS8SgFDLH6fm9zA7SVQSGyD4/mQ7p6ZeBIRd8DLqAy9qbL8dVYU5HtKhbIJqUUCKe+4BJ5d6jSw3IhhYHlIAR07ExGkV0I4De6Kj5GfOFHAD0bujAxi4A10fR094FAzFoPluvbtTq6tnG6cWSY9oqX7yKMlcVETmb+5XQSxklSWRh0LmpmObEAN/Uc+p+loJZe9QMiWjHHGzv6QEKhNpI25t7gJ5moNms+UU7zL/IjZiefA8gERO5hsxVTPrXLH2cXgYJIF7gfmNI5B27V+C1hZoW2fRIDDqXFMq4SuWaUMmGTaGgbQeMmP2rZld8O0Q8GM1ndpozNRUBYI55g837gV4yNAASiJh3sRCIb4QHCoUYQ44YdC4pdEmq2KLgIq15U6Dg2mUZ9OBcWAs1Z6bGuZn/dr04jrBm95ACSlOMOaISMeg8lBF22aqARSahK8MafHx1HVmLS//8+t45QLQLPXO6zCEHMIOj0kMhypEqq5RacvJub8vjO0pAS0hIGzOAS+ie1OzYrOoPVRd0frkTJkb9Y1hJgZUcG2Y4eDHUIaH7uDvEyFrc9uYeK+y2N/dgYxew9A5ldmRQgEzwghIqAoDSoSP/QntKCXPcZGVKlYHNqv5QVUFnzYjhYdoVNdytyM4s0hDoiDq3jlgUHVBd212LObvj5oqRHn5Lm9utnpYKQEKTnndCIeeIzA95dzSknnPS7hSGTnhVVdAFhdnd2sEDVmg155JohtnVPh4HlG4GkgA6SijuyKbMDb0dEAkJhbjZ09IofLGjcIro+SeiNpuJGXRhxaDzSLZKpcjx77L5NOSkJhFRmjltWlwgJgxI6OiJFjnjfdLIpswuRBHt6cLG6PVAbxR6gTkXKZxSnbFyLLFntvBI3ZOmTaoMBl2F5VuguhqJVC8LocyB3VIiOqej5Gmfru+dk/lAV9Q8VLQLKHOtUDcnryb35foVUwJI2GjaTMdmzmBh0PlIoVoe2dPcvhRSN5uj5kQ7zMmfHcCQCycBALqy1VEtIQWbvwNojNMH7OzsxGWXXYbJkydj6tSpWLBgAQ4ePJixz+nTp7Fs2TJMmTIF733ve7Fw4UIcPXrU6aJQlZI6oOmAjgiWNrO5kuwQtv6LxM3eyBQsjtfonn32WSxbtgyXXXYZ3n33XXzpS1/CJz7xCbz88suYNGkSAOCee+5BV1cXnnrqKdTV1WH58uW46aab8Otf/9rp4gRGtiZNlVryx4k38HOHlBwKzXaSvh+iXeYpNs+BGW0qhOsBkNeEUIARyzt2z80xewoCiS6JnvbSjq/rRub6f1XC8aDbsWNHxuff//73MXXqVHR3d+Of//mf0d/fj+9973vYsmULPv7xjwMAHnvsMcyaNQuGYUDT/D81U6WNzMCSLt4B/Mm2E3IpzXO6cMdShTtSY6akgNCN5P2+4J07+ZOCgIgAeraeLcL84yrhZu9NAWiImxOgF0kpAR060OFCuXzO9Xt0/f39AICGhgYAQHd3N86cOYPW1lZrn5kzZ2L69OnYu3dv1qAbHBzE4OCg9fnAwIDLpfYH12t5HsgYO+fgvQ5z9pPkfGcG0NuxAVH0uB5y7KBSXfIP2zN/OXVh2J5KtdiaXzlzngoBQMUBXG8t+1ctfwO6GnRDQ0NYsWIFrrjiClx44YUAgL6+PowfPx719fUZ+zY2NqKvry/rcTo7O/HAAw+4WVTfcGwOTD83VSbHzkHqUHrcnNUFKCu9NSmBeDzZezWG9q4e6IZuLU1kNms6U/x0DDlKJwBARSBjhZNO181ArGQPTgUB0S7NqccksKGrOpoxXQ26ZcuW4cUXX8Rzzz1X1nFWrVqFlStXWp8PDAxg2rRp5RbPlxybtcXHP726iiCeDHQDAhAS0hDmYt/pq4XbvEcHANHeHmBjBzZGAUPvgpacR00iga5md0KOaBQB2Jl+2/zZ1yreg1MIQEGHQBxST8ChDsm+51rQLV++HNu3b8eePXvwwQ9+0Hq8qakJ77zzDk6cOJFRqzt69CiampqyHqumpgY1NTVuFZU8kJr9RRgSgMz6C58+d2U+1n49zbgeczAHXdYb6InMYxdqamRTJJXLVsOEADSlYOgxa9LxdDrSxpg6zJwSTyAWlGU8HOB40CmlcOedd2Lr1q345S9/iRkzZmQ8f+mll2LcuHHYtWsXFi5cCAA4ePAgXn/9dcybN8/p4lCAWL3B0mqjdmt0dvcrFGIMOaocgUiOFpwIdARkKchAcDzoli1bhi1btuAnP/kJJk+ebN13q6urw8SJE1FXV4fbb78dK1euRENDA2pra3HnnXdi3rx57HFZxTb0Jgd2lzgjClHgMMgqxvGg27hxIwDgqquuynj8sccew7/9278BAL71rW9hzJgxWLhwIQYHBzF//nxs2ODcrPtkk0cdVoyYgKbMphmpJTJaLYsZUpCSMe1XtAvaHSpQi8YSeUFKDboEst6+1gwIHy/RVSxXmi4LmTBhAtavX4/169c7/fZUSHq4ZQu5SoSfLiF1CSABGBqQo7mwmM4oqQHjS3vazXt/zpWWKHQEAMSRd/LdMP0eca7LalMoxFwKufTxc2Z3aj3zueS6POm1s2Jrd0ub281j6X5ZXtceN9blIyoo35ieEIUcwKDzHcfG0fmM1CQ0AzD07GOGtPZmSG2OtUyPUxMxB0ZqbGEOOrOQqGQMOp8JY8gBgGYAiOiI5LhgK92s5QkFICERrbJpilJjC3ML09/XRJXFoPOZMK1Ll15DkToQ0WxcrgVgSB0d0SiuT3882aRaSmeVoAjRt57IVxh0PhWWi57UgYiemivC3llpuho1k7VcmkDvhg6gh1OcEFFxHF+PjihFahKRuLnSV3HRLcy5itK2OCJuFZOIQo41umpVzjCC5NRassBctJqB5E1HB+qnSqA5AYie4aV3RjZjbujtwFLdQPELmHiDvS2JKoNBV62sKf1LCDsBQOnZ1+QaxaGZ2QWgQ0EpgYSQqdEIw7qi6AIQndMBobYHpu03Nc9hHIEpMlHgVGXQhanDR1bpAZYvzEqs0ZW7pE7J7wlhrqllIKNXZnrN7vquKIwegfS6kts1p2Lmx8xWFh2SDbNELqqqoDNn7fa/ojJEDS/EqseM0YuN+ni5Hrep5PQPiUJtrCXSpIQso8aabdZ6InJeVQUdEL7mISUUYkhAGgJzeqNVNdC60BRhqVqgrRbWUuhxABIoMq8YcESVJZSdySl9ZmBgAHV1dV4XwxcUkkEHHZqU2NDr3Ejrpc3tUHok2WvSPwwJdMzJHujRLmDp9u1mb023JQe3G0W2jOojklE3dKiIXpEiE4VJf38/amtrC+7HoAu69CFnQjk6s4o5HZkfJ70bPc4uU2XKbDYZmx1kyiGEQqhm0CWqELtBV3VNl6GTcX109mLpRacTe/wRChkdZMo9kg/OhyisGHREAaZgb37UYiqMWY+Z68V53tvRSqrdEw055bsbCcHAoCMKKmU2nRqxwpc+AXsD91Xy48hjallfX+C9nZorAEAiBgCx8g8WcEKpytx/DhkGHVFQCQUYMWQs0Z6FDgml7DexJqTIOGa+1+u6kXWIhQ6JiO7cCtW6bmBO2uDJahw1E+0CjFjmGFGyh3Nd5qB8vBFVmtnpBhlbpX4YBYA4Ilja3oyervbKvCmFCmt0Ofj5dkCuex8+LnKoVGPDkRQJxJA5A4xEomLvLyCgNABKRywBoKcKq3RUMgZdPn5MuwJd2f1Y5DAJ/fRxWZQz+4uTzF7ACtKowm8ClYVNl0REFGoMOiIiCjU2XeZgzQqSDdsHicqWbWgcJ4ghN4Q36IodYKpExi9Z1llBmG9EjkmNAdRk+vCIOAdFk+PCG3SiuDkIhY2VsM1aHrL3SGAtj6goSgnEhAFDH+7skpACEX/0faEQ4T26NEIVHrOmROZGRM4xa3eqouP0qomyPhbYCs2bHjDhrdGVQqgsrZUiszlz5POClTkip0hNIoYEIAwo+G+JqKATUDBk4a+pBuXTlUtKw6BLytbMKVLL3tj5ZrM5kyhDavWhYqTG7I1cs4/KpwBACSRiGHFfNAfbFz//Y9NlIVnaM9PjS2B0cyabNIkAJGIQSuXcEpyjuSIUklO2QSUn9y7MiInktS7HfwFr2mSNzo4RNbORVXrmGtFoUpNAnmnC9BFTipE7RDLkDClgFPHHhRETSOSo+GkI1mLBDLo8cvXaFEJh5N28Uff28o3Dy3rQIP19RJSfX6YNq2bWCKtkq5St5sqkQvsaMQFNt9c27Yc8ZNNliUSyGSC1ZWvShMjc2KRJRBWTrMlBwdaahSUf30YPTq+xRleCXB1XbC1smd7kqbI8wZodEZVJAYAwmys1KYuqzdmhSQk7N1kNXQd05fkkAKzROajQOLxszZus5RGRU6xxcso/fzML62LoXY2PNTonFRiHB3BWMSJyT2qcXCIGGJ7fGTMZMYEEkHP8nnPr0OfGoHNIqePwUsMTgCzTi/nlTzIi8r2ix8lVQMFOLbpekfF6bLp0W3p7Zq5dMNx5hc2ZRFSMUsbJVRsGXSUIs8Hczo8gM46IiiGU2WSZkMKd3pUhwKZLF6U3Z4oimiFTzZlFjcMjykOkBvjaoCUkjJAOg+vqaYeUWnIwe36alNjQ24GuaAUKVoJyxslVGwadT3E9PHKSFAnoRuELoUBqRpPwJV20C2jv0hGxOdBZCgn4NOQAjBonx6DLjUFHFHKpWUqkZvcV/g+5aFdpr9MhoaFwzgkFwNCxoaP093Kdi+PkwoZBFyBW70xvi0HkHSWgQ8FoLu234A5sR9zuxA5KR6K5pLdxTSSOIPwd4juuB93Xv/51rFq1CnfffTfWrVsHADh9+jS+8IUv4Mknn8Tg4CDmz5+PDRs2oLGx0e3iBBoDjqpesk3fduU0+wEc37ViGHIlcbXX5f79+/Hd734XH/nIRzIev+eee/DTn/4UTz31FJ599lm8+eabuOmmm5x9cyXMcWx5NiKiYuWb2bGYfQtt5BzXanQnT55EW1sb4vE4HnzwQevx/v5+fO9738OWLVvw8Y9/HADw2GOPYdasWTAMA5pW+t9q6VTW3hwmq6cShQJXeadcCk7DZ/M41uuS145cP28KAiJt59R+xXb71+LJQdR+rFUGkGtBt2zZMkSjUbS2tmYEXXd3N86cOYPW1lbrsZkzZ2L69OnYu3dv1qAbHBzE4OCg9fnAwEDB98/38+GHZSPIOQw5sit9Do6SfmyEgjBiZo/MrE8Pv4MCEBOJZGeg4jqLSD2BuOJ1yimuBN2TTz6Jnp4e7N+/f9RzfX19GD9+POrr6zMeb2xsRF9fX9bjdXZ24oEHHnCsfAIwmzYB6yqZa+25oLHVm4zK4v6EReSk1M98rt+NQr8T2V6Xbb09PUuYpe+nFzM40cY4P7LP8Xt0R44cwd13343NmzdjwoQJjhxz1apV6O/vt7YjR46Uf9DUvFshCThg1PJ3ozZyTjn3Xni/pjrwj0r/cLxG193djWPHjmHOnDnWY2fPnsWePXvwne98B8888wzeeecdnDhxIqNWd/ToUTQ1NWU9Zk1NDWpqapwuKoDqa8Ycmev8ZSyOF18vJViLLIVffrSLqsmRKxwPuquvvhq///3vMx677bbbMHPmTNx///2YNm0axo0bh127dmHhwoUAgIMHD+L111/HvHnznC4OpRHI8cuf7eodopouVR+/hBz5g+NBN3nyZFx44YUZj02aNAlTpkyxHr/99tuxcuVKNDQ0oLa2FnfeeSfmzZvnWI9Lym1UfGWbbFp5uxow0UgMLiqHJzOjfOtb38KYMWOwcOHCjAHj5A9CKKSWkGXTJvkJfx6pFBUJul/+8pcZn0+YMAHr16/H+vXrK/H2vlHMDPL55L6vqEatmDByhfPRrxj9bOp+kALMGl/ymKzlEVEQca7LCnFq+IK5avnopEutLpy+8OLwMh55jpfvvax/DAd0xmroREQBwKALu7RmyIyH7b4+VZsbcRzORkJEQcGgCzmV9jGlqIZIMbx/WTNKEBF5hEEXYm6NGkjdv/PjCATWMoloJAZdFVLJlR0yHyxu5LzfMo75RkS5MOiqUbaxc6mnKloQ5+QcDE9EVc/V9ejIf4TKvoWFEqM3Ir+SnLy5IlijI1NyCEF65tkZh+cn2coZogynEOI8mJXBoCMAqVpdZizYGYcXBNlqdWGqxRJRfmy6pPyEylgvJmjLxuTKaDZpElUP1ugor9Q0YOmyzcziZyOLGqSgJqLyMegop5zj8MLQngk2afpRtlW6icrFoKOyjcwGv0cgO634l27oQCRedQsik7t4j47Kki0gghgavKj6g6Hr5rRzSmSdo5WoFKzRkSOE1UUTo1Y6sPapbJFKwuZMf2BnIXISg46cIRQMKaBJCSTM+yzCGH5aaXHfj8ljkyZRODHokkbN/Ui2mSMQBGLCgJRa1n0SUkckoGNjnaxd8Mes+khNcmC4x3iPDsg+b5SPNz816wgoKKEgoCCR+5dZkxIZA/KSG6/7FHYMOe9VfY1OWB+Cwy/FTd2Wi4kEDJH/l9nQgBgSGZ3HdUOH0Pw9VMGpkjHQibxT9UFH5TFiAkDhphnz+cx9pGbkqQOGS7Y1/NiMSVQZoWy6tBrFVHg2lTYHV1iujxI6DClGn2dYTnAE/9ZbicItlDU6oQApEpCJ7B0jgigBCYU4hFCBm4IrJetNeV3C0GPWpwkgec8vgCdoA9fNI6q8UAZdiq4bhXcKCAkdCSmgdJXsIRq8IBgZctmaO6Umc049FrwzpmJpUkLFhbVEFJETQh10YaIbOqQmEUMCUJFQX/WlSEA3Rs556P9xeFQ+qUkIA2aTdhyh/jmnygnlPbow0nXD7KJvaDBiIrTNX7qhA4YGCd3aDA1IyPBd8eyOKKkmqe+3rhvsrUOOYdBRIGhSYmT/oqCOwyslu6os74gcxaZLCgRDMzvkZCzjYsQgIghkCgSwyESBxaDzMSm1UHWoKVVmp5Xhf5vj8Hgjh4jyY9OljzHk8qu2cXhEVBoGnUNyTWZMpZOajdWmdQlDxKwtgRhzjnzF1s8xuYpNlw5h7ct59qYVyyQ1CQE1qrsix+GRVzips/cYdBQ6ucbhMeqIqhODLiDYNGpPavLo9JhLzbbCWh1RdeI9uoCRmoQW512ooiViAFQoxuERUXFYowuS5LI2rJkUzxyHFwPEcF3PkAKCLZpEoRfaoNMNPWPF63ydRVLNgn7qUJJrDJ1u6IhrEQ9KFFy5OgPokGDSEYVfKIPO7HAXh1BxIBHLCLxs/BRwKX4sU9gYGgApsjQFs8pMFCahvEcngOE12yJxW68pt7MHO4s4ryLjj0aMwzMEx+ERhU0oa3RA8j6WEuYinoaELDSURTdQ1mW13NcXOryUMKQGPVI93QfdHn+Uaxye1UUzTZV8yYlCKbRBJ9I/RABdBX/QphYBlODl1m0xJIAEx+ERhUVogy5lOPDCIUSn4kv5xuHxi09OKdQsH/w/y/0l1EEX1HstvJ76UCKWnDB6+LsjmH5UBs1Azj4EbCp3liudUd544w189rOfxZQpUzBx4kRcdNFFeP75563nlVJYvXo1mpubMXHiRLS2tuLQoUPOFkKZt1qCuAU1oMMsNQ4vIaS1mSsneF0yCioV0QEhsm6CPX8d5XiN7m9/+xuuuOIKfOxjH8PPfvYz/MM//AMOHTqE973vfdY+Dz30EB5++GE8/vjjmDFjBtrb2zF//ny8/PLLmDBhgnOFEQG8Cin+dHtFajJrB5X0x9L3SY3DU2lXJH73qBDHehMrAcQAQw94Q2cFqq+OB93atWsxbdo0PPbYY9ZjM2bMsP6tlMK6devw5S9/GTfeeCMA4IknnkBjYyO2bduGRYsWOV0kIlvs9PIcGXpSCgiOw6Mi6IbuTNgJQAtBk4LS3f9lcbzp8umnn8bcuXPx6U9/GlOnTsXs2bMRjw+3Qx8+fBh9fX1obW21Hqurq0MkEsHevXuzHnNwcBADAwMZG1G5yr3YSOijxuFJkQjBpYec5NZ4UGF9DPZWib8JHa/R/elPf8LGjRuxcuVKfOlLX8L+/ftx1113Yfz48Vi8eDH6+voAAI2NjRmva2xstJ4bqbOzEw888IDTRQ0EhdEtsLxR7Yxyx+llH4cHjsMLMyVgxADNkFaAFZx5ievRec7xoBsaGsLcuXPxta99DQAwe/ZsvPjii9i0aRMWL15c0jFXrVqFlStXWp8PDAxg2rRpjpTX9xRgxEY/xqumf2Udh6d0fs9CwPyDRQE6IKXgVH0B4XjQNTc34/zzz894bNasWfif//kfAEBTUxMA4OjRo2hubrb2OXr0KC655JKsx6ypqUFNTY3TRQ0MTQ7/9agbOhQnIvat3OPw+NdJufzQt0wAUBAQyrw/JvXEiO/2aNmaLnVIRDharmIcD7orrrgCBw8ezHjs1VdfxYc+9CEAZseUpqYm7Nq1ywq2gYEBJBIJ3HHHHU4Xh8gfOA6vLALK/NKpytzTKSTVFB1H5koiGf0qBKApjpPzA8eD7p577sFHP/pRfO1rX8NnPvMZ7Nu3D48++igeffRRAIAQAitWrMCDDz6I8847zxpe0NLSggULFjhdHCJfyLUeXqTMFk2zthDumkHqPnVMJCBjAPTR90ArKX0wSf5S5E6zsM3Y5HeOB91ll12GrVu3YtWqVVizZg1mzJiBdevWoa2tzdrnvvvuw6lTp7BkyRKcOHECV155JXbs2OHsGDoKDavZFpk3/4Nykz/venhxfdQ1u5jrn27oQFqzdjq7nSWCQAlAGLo5qXlccLgpFcWVKcCuv/56XH/99TmfF0JgzZo1WLNmjRtvTyGkGYCKmEshJWIA9Aos4eMycxyentFvxVwbz167loKA0NRwjI1YkDeOZMeYgBNAajkSQAk2+1HRQj3XJYWHoeuAluoMoMxmwIBLjcPLeExPQLfZrVakfcz+vAIMHaFYKjHV1seQoxIw6Ig8krVJU5POjZ00e28AMvOVum6EojmTyK5QrjBO3ik0C0RFVg0vktdlSn9/CX14HF76VkrXemGuXxjRYW2ariBDUcUjso81OnJUoQ4ifuxA4nWZ0t8/1zi8UmcJGPkKBYHEqHdIe68qrOmxY0v4MeiIqogAAF2NqiAmqnCWj2TDrq39KNjYdElUdUTGf4BARPfBtCMesDPtMAUfa3RUFi1uToOEEfeZAO+bBKtBvniye5EWAJQS0BOZDZd+HoeXft4MIyqEQUclS/X4HjkNUkJqoRjn5nvKHEJgxLJf6lUcEMWkHTKnqwrLODwiBh2VJTXB7chHDT3449x8TyhIJIAcHUvsj8jLPiZPQBWYrpgoGBh0VLbUpBUZnxtA+gVYahISOpszXZB9XTwJTUqouJ5z2n9VaK5GJaBDQcYyhyP4bRxeEJox7d4B9Wv5g45BR44TI5MPqWYwIOwTEJdDatLRPwQMDYjBSNb6slH5w06YYZj5kLlAIofiFUdAQdkdx8C0cxyDjtyR7ZdaGBmDo8NQu3MynJz8egwfK/sxpSaRkBoiBd5y1HcxucK2juxDEbyu6aX+vPJTVljNx4UW1OOAPtcw6KgihFDJqa0iQCIGIyQ1gjCEdTFSs4pF9MwOSMKIeT7DDFEuHEdHlaGE2aQJAUSyL0ZZjYIWDgJmT86R4/D4PSU/Y9BRxSmRnIJKM0Zd6IN24S+XEzVCr79myYW/oRs6NAPWpkPmnGrMbSptI2LTJVWcOUBZA4Qa1UGl2poCneCPr5kAtMxY0RUgBcfhkfdYo6PKU1aDl6sX6UrUdLyuTfnF8Di84W1kj02vsFZHrNFR5YnhrtYxTQIuhV0lajr+qE35k7kyeMQX4/BKW/uBwoJBRxWnAMSEASBhDiL3ukA+4fQ4Os9xHB75RLiDLkTjUry+ABb6UtoeC6vMzRDD51Nq81+lmg3L+doXE15OdUzx+mclXdDG4VE4hTbogpxxYsS/1YhRvULp3rTDKAEhVHKOr2RZUNw9kNRLIyiuO7rQ47DuBCkBVWjwrQOE1Zmi9ItvpUPHTyGXDcfhkRdCG3QBzrlMwj/nYt5zEek5N/x4kUcq65xE2UewTTckJzZ2kMj8ACD5h1JEAVwpgVwS2qCrRqnZR1yRrCKPDDnyUHINOakVvzK4DkCPx6F077+bqXF4wki/X6sjHtF8+8NW6PfMp8WuWgy6kDBbhFz89RIZ/yMfMOfO1qEX2Qw8rFL1YjtGjMPz6V9UyvpQeL8RFVfyEIMuRPg7VV2yNQMWfwB/GLUeXoGcG/lcpcbKiWTS5VrsNkWLc0CDn4Qi6KplQGi5vzbWzO4q9SFAfPoXPvlD6kcjYD/VVCGBDzrz76Yi1noKKPP+W+lNTVaTSzLgcq9R5k96obXTqlSQL+z8XlKlBD7ohEremwpaDaVIysEajdns4v54q0JjuuyO+ZKaHDW7RliVMg4uiD/6SrgzW0m24wXwy0MO41yXVcKagVCJ5P0D9xW6YPt9zJcXqu1rovJsRE5h0FWTZNr5ZbJdonwYduQUBh1VVBBmv7BqFEFsEwyI1FRwuTZH3wu8H1jtGHRUUYFomlNAQgIwYjCq49YgOaBStwSoeAw6ohEEFKBLc2WFIAQz+UJqbB0Dz38YdEREDio0mJwqj0FHROQA1uT8K5xBF/LB40TkP0asxKE7vF65LpxBx95yROSBkpoteb1yXTiDjoiIKIlBR54rdmxdvtk0yt2g2JmAKGwCP9clBV/RXfgVkIgBmnRh8HkC0JNDC4goHKo36FSeiaDzPecmr943aIQCEIOU5YVRzgmUM1a6JhqNPSyDpXqDLl+geBU2DLmKqubB4PybqjRKwH4vSS6h6BuO36M7e/Ys2tvbMWPGDEycOBEf/vCH0dHRAaWGf6uUUli9ejWam5sxceJEtLa24tChQ04XhYhyYMiVRgAQwubmdWHJ4njQrV27Fhs3bsR3vvMdvPLKK1i7di0eeughPPLII9Y+Dz30EB5++GFs2rQJiUQCkyZNwvz583H69GnnCsKxKUREBBeaLn/zm9/gxhtvRDQaBQCce+65+MEPfoB9+/YBMGtz69atw5e//GXceOONAIAnnngCjY2N2LZtGxYtWuRMQfgnKxF5SCG5GoNQyYWTeU3yiuM1uo9+9KPYtWsXXn31VQDAb3/7Wzz33HO49tprAQCHDx9GX18fWltbrdfU1dUhEolg7969WY85ODiIgYGBjI2c5/YSOkFYoqdY/JpRLkIlh62wdclzjgfdF7/4RSxatAgzZ87EuHHjMHv2bKxYsQJtbW0AgL6+PgBAY2NjxusaGxut50bq7OxEXV2dtU2bNq24QqX/oBX7Q+fmD2mhY1f4F8Ttzhlh7PzBrxnlI9JrcUpk38h1jgfdj370I2zevBlbtmxBT08PHn/8cXzjG9/A448/XvIxV61ahf7+fms7cuRIcQdI/2HzU/NBobL4qaxEVBwBqFTXyyyb4O93xTh+j+7ee++1anUAcNFFF+G1115DZ2cnFi9ejKamJgDA0aNH0dzcbL3u6NGjuOSSS7Ies6amBjU1NfYLkforyYkfJP4wUpVjpaN0eb90qS8sv8Cuc7xG9/bbb2PMmMzDnnPOORgaGgIAzJgxA01NTdi1a5f1/MDAABKJBObNm+dMIYRiQBHlEYRrawCKWBaVrNWlb0H4vgSR4zW6G264AV/96lcxffp0XHDBBXjhhRfwzW9+E5/73OcAAEIIrFixAg8++CDOO+88zJgxA+3t7WhpacGCBQucLg4RZVHs34G8/jpPpH3MfIyc5njQPfLII2hvb8fSpUtx7NgxtLS04N///d+xevVqa5/77rsPp06dwpIlS3DixAlceeWV2LFjByZMmOB0cSgHHRJSS/ucnR6IKKQcD7rJkydj3bp1WLduXc59hBBYs2YN1qxZ4/Tbkx0CiCsNEArCiMHQCr+EiCiouExPFUpNYwQIIBK3HvfbmK0gjFHz29eMiEZj0AH+ujM/csyfEslgcr9zjd+aL90sT86VC4rkt68ZEY1WfUGXLdQq2UOzUKiOKItAciwOBG9UO4gBRVQ9qi/ovB52YOf9R8yY4JeAYzNd8fg1I/Je9QWd3yUDTgizHuenVlXWgorHrxmR9xh0PmTejxNcuNGHWEMjCp7qXWHcD3JV1xQTzq9YQyMKHgadx4RQowKPOUdE5Bw2XXotlWrps5p7XCS/YDMhETmBNbpKS+9NKRQUhw3kxGZCInJC9dXofNCNUQhlrVXlfWmIiMItPDW6YgLMD2HndQGIiKpE4INOCe/HgBeLNTkiosoJfNAJ60NwBKy4RESBVn336IhysNPLkz1BiYKHQUeUZKeXJ3uCEgUPg46qFmtnRNWBQUdVi7UzourAoCMiolBj0BERUagx6IiIKNQYdEREFGoMOiLKSyggYJMPEWUI/MwoROQeBhyFAYOOiLJLm0RWcDVgCjAGHVHAubUYh0g7MHOOgoz36MhRhWYb8eNsJF6XqZz3Fy5uXPWewoI1OnJUodlG/Dgbiddl8vr982HAURgw6IIu1SOu1EX5EjEnS0NE5DsMuoBTQiGBGIDSAsvQnC0PEZHfMOgCTijAEP5u/qLSeH3vkCgs2BmFyMc0A4DUs94rU9ZH9zeOp6MgY42OyKd0SCASQUSL59ynUgEkFNgzhQKLQUdVT2oSOiR0Q4eh+6cJWFMRKIjhNMsRNKX2Q7LLrXF6RJXCoCNXSE0G6r6hlBr0CBDxUeccpZsJw5whKg/v0ZHFyc4PQQo5ANDiyhoY7beNiMrDoCNL0MKJiMgOBh05ht3hiciPGHTkmGJqhLlCkWFJRE5jZxTyRK5Q9EXzqc8WGhVCOb58gBLu99Yk8gvW6MgSttpUyecjlK+2suYyzXOKRNWCNTqy+KI25aByz8cvYeC3cWwVG6Reofeh8GONjlxR6B5c2GqP1cKcIKUC046l/knkgKKDbs+ePbjhhhvQ0tICIQS2bduW8bxSCqtXr0ZzczMmTpyI1tZWHDp0KGOf48ePo62tDbW1taivr8ftt9+OkydPlnUi5L5iwqnQPTg7tS2GoT8pJVxvsR3+B1H5ig66U6dO4eKLL8b69euzPv/QQw/h4YcfxqZNm5BIJDBp0iTMnz8fp0+ftvZpa2vDSy+9hJ07d2L79u3Ys2cPlixZUvpZUEVUumkzbE2pROSNou/RXXvttbj22muzPqeUwrp16/DlL38ZN954IwDgiSeeQGNjI7Zt24ZFixbhlVdewY4dO7B//37MnTsXAPDII4/guuuuwze+8Q20tLSMOu7g4CAGBwetzwcGBootNhERVSlH79EdPnwYfX19aG1ttR6rq6tDJBLB3r17AQB79+5FfX29FXIA0NraijFjxiCRSGQ9bmdnJ+rq6qxt2rRpThabfITNlf6gxPDmu94wREVyNOj6+voAAI2NjRmPNzY2Ws/19fVh6tSpGc+PHTsWDQ0N1j4jrVq1Cv39/dZ25MgRJ4tNPsLmSh9RAiI1fk8oqFQHEd47o4AJRK/Lmpoa1NbWZmxErP25S4jhia7NB4Y7iFi1PaIAcDTompqaAABHjx7NePzo0aPWc01NTTh27FjG8++++y6OHz9u7UNkB2t/FZTelpm2MewoCBwNuhkzZqCpqQm7du2yHhsYGEAikcC8efMAAPPmzcOJEyfQ3d1t7bN7924MDQ0hEok4WRwicooAxMjN6zIR2VR00J08eRIHDhzAgQMHAJgdUA4cOIDXX38dQgisWLECDz74IJ5++mn8/ve/x6233oqWlhYsWLAAADBr1ixcc801iMVi2LdvH379619j+fLlWLRoUdYelxRchZoW2fQYDtkqe0R+UvTwgueffx4f+9jHrM9XrlwJAFi8eDG+//3v47777sOpU6ewZMkSnDhxAldeeSV27NiBCRMmWK/ZvHkzli9fjquvvhpjxozBwoUL8fDDDztwOuQnhZoW2fQYbObE0CNSLTU3J5GPFB10V111FZTK/aMshMCaNWuwZs2anPs0NDRgy5Ytxb41EfmIsD4MUxDJqbsYd+Qfgeh1SeXxUxOhX8ril3KEkRCKzZjkKwy6KuCnJkK/lMUv5QgbgWStDoKdVcg3GHRE5CiR3FLVOdbsyGsMOiJyhbUoelrgEXmBQUdErhCpjxxzRx7jCuNEZHGj1sWQI6+xRkdEGcw5nEWyUwlR8DHoiGhYKuDY3EghwqAjz3FMm7usMW0QBTfBJXgohHiPjjzHMW3us7t+quL4Nwoh1uhCgrWi4lXV10wJa3xboY0obBh0IcFaUfH4NSOqDgw6IiIKNd6jo6oV5qZLzkJCNIxBR1VNM8z/qzB2p1fJXpRKhPP8iGxi0yVZ3K7h+K0GpUPC0HUgEg9xCHBMHBGDjixud87wa+ePMDXzWSsFKI6JI0ph0BGFjLDaKUOU4ERlYNARhQzvxxFlYtAREVGoMeiIiCjUOLyAKOAyOtP4tN1SKQEB+51jhIIvz4OCiUFHFAbKv5Mxm5mlYIhYUa+LIMzDPqiSGHQhITVZdvd9J47hJL+Vx89838kyEUNPtLe41zQLgN9+cgCDLiScCAS/hYqb5ZGaDMc1NCiDACNx9LTbL6vUJDSlAD0g50e+xqCjqqQbOuCzmVqKZXeNOa+J5Edds3+PTgeghH+bYylY2OuSbPPbFF4UoDXkrLZV+1sgzosCgUFHtvmtaZOIyA4GXRVjDY2IqoFQSgVu5teBgQHU1dV5XQxfUFCIIYGQdK3ISVNx656NU+esQ0JXkbShZ6nGMlXEiK/KED4dH0fkpf7+ftTW1hbcj0EXdAq+uyi7IaP7vIPnbB1SZBwefpv4nzlHNJrdoGOvy6CrxgugC+csRv7bZ19UnxWHKFAYdBRITle4GCRE4cWgo+BRcH5RUbYNEoUWg44CSSnhzI205IhrwVmEiUKLQRdEqc4Yfusx4aLMXpFOHlgFY3oRP1KAqqKfQd9Ktkbwpzg3Bl0AKQAJIaEb1fSjLaE0/jL7iRIKMUNA1w2vi1LVpDCg85cjLwZdQOmGjjkdUa+LURFLm9sBGe5xgkGl6waiczq8LkZVk2iGLhQUp03LiTOjEBEFmCYlJBIQhkTwRkVXBmt0VN14f44CbkNvBxAFNnQAiWavS+NPrNEFiTI/CCjeFylGvjBjZwoKuK6ouQFIXhcU/DiNnZcYdEGhACMGGIhBJlgLySkVarnCjTU4CjFdAtIQZlMmo85SdNDt2bMHN9xwA1paWiCEwLZt26znzpw5g/vvvx8XXXQRJk2ahJaWFtx666148803M45x/PhxtLW1oba2FvX19bj99ttx8uTJsk8m7HTdQE+0F3M6omZzRcgtbW43O6IUI1VDS6+p5fo3UYh0RYHonA709kShy+RYUwJQQtCdOnUKF198MdavXz/qubfffhs9PT1ob29HT08PfvzjH+PgwYP45Cc/mbFfW1sbXnrpJezcuRPbt2/Hnj17sGTJktLPItQUFBSMGABD87owFWfoutnhMhL3uihEgbGhtwMiIQGloKpk4vd8ylq9QAiBrVu3YsGCBTn32b9/Py6//HK89tprmD59Ol555RWcf/752L9/P+bOnQsA2LFjB6677jr85S9/QUtLy6hjDA4OYnBw0Pp8YGAA06ZNK7XYwaIUZGJ4rFI11ORSetq7IHUgoqvMAeNODZhPza6iBGdGKYGCgpAJDi/woWjX8L97utpDO87ON6sX9Pf3QwiB+vp6AMDevXtRX19vhRwAtLa2YsyYMUgkEvjUpz416hidnZ144IEH3C6qr6SWijFiAhqMqgq4UZya7mskNmNSSHWlDbFt79Krfpydq51RTp8+jfvvvx8333yzlbp9fX2YOnVqxn5jx45FQ0MD+vr6sh5n1apV6O/vt7YjR464WWzvKSAhYX7QuQo4EZWO4+xcrNGdOXMGn/nMZ6CUwsaNG8s6Vk1NDWpqahwqmf8pADEJQJfQmHOls1MT5A17CjmOs3Mp6FIh99prr2H37t0ZbahNTU04duxYxv7vvvsujh8/jqamJjeKExzJ9koBQEJUd3Nluew2d3JSZwq5VDNmtMvsua2UBlFlTZmON12mQu7QoUP4+c9/jilTpmQ8P2/ePJw4cQLd3d3WY7t378bQ0BAikYjTxQkMhcxxchwQXibefyMaRZdALIGqG2dXdI3u5MmT+OMf/2h9fvjwYRw4cAANDQ1obm7Gv/zLv6Cnpwfbt2/H2bNnrftuDQ0NGD9+PGbNmoVrrrkGsVgMmzZtwpkzZ7B8+XIsWrQoa4/LaiGUOQv5nGiHOU4OrM1ZGFpEZeuKAlGY4+y6JKDiAqJKqnRF1+ief/55zJ49G7NnzwYArFy5ErNnz8bq1avxxhtv4Omnn8Zf/vIXXHLJJWhubra23/zmN9YxNm/ejJkzZ+Lqq6/GddddhyuvvBKPPvqoc2cVKNU9Ti4fXTeg6waEEUtOakREVLyia3RXXXUV8g29szMsr6GhAVu2bCn2rcNJAbFUU6UO1uRG0JOdcoQRg3L67wC3hi0Qka9w9QKPpC6vieQ4ORot1Rmn6GnA7GLIEVUFBp0XFCCgYMTMmpxMNlmyAwoRkfMYdF4QChIJABISOrh2NhGRe8qa69IrAwMDqKur87oYpUuuK2fEhrs8adIcGS45QjwrXUWgRHLcj1NzXaZwrssiKSglkIgBPb3RjOmmKBi6etqh4lrge136Zq5LykKYy6emU7oOoQBdsX6XjRKMIV9QABIxxBKADoOdpygQuPCqR0SWLeuD3IByQq7QrCecFaVohgb09kQRndPB2hwFAmt0FRS4NmIfSLVOllSjszN8gFOAEYUeg66C2Ju9SMn5+AAkpysqMpD4BSciMOgqjxdf5zhZE+NNQKLQ4j068jUhlNkjMj3U3LoXSJRH+qrdFCys0ZH/Zam5MZeo0tjxJrhYoyMiolBj0BERUagx6IiIKNQYdEREFGoMOiIiCjUGHRERhRqDjoiIQo1BR0REocagIyKiUGPQERFRqHEKMCKyTygAMa9LQUXINkdnc7tW+YJ4iEFHRAUpBSRigCZj0HSJKFcWD4ylze0woEGLD6+cYi7VWD0zxgqlVODWjRkYGEBdXZ3XxSieApfpKUXqF7J6fi99x5BAT+/wrMac4Dg42pc2Q1N6KH9/+vv7UVtbW3A/1uiIqCBNSmzo6GXA+Vi0y6y9SZnZLKkZsuqXW2TQERGFhNSBeDzzMR16VYccwKAjIgoNKTWIak+1LBh0NIoy71QX+SJhrgROROQzDDoaRUBBIlHUa/SEhGLYEZEPMegqLRBdes2wktBtv8La06Xzq/ab6URUOgZdJYlUhPibUALxIodCqIiAgHs5zpAjolIx6CosEBdsq/ZURGk51I2IfIpzXRIRUagx6IiIKNQYdEREFGoMurBTqQ8B34LQi4dCLdsqABQM7IwSckooCCP4y6oooQAIdnYhz3Cez+Bi0FUBqcmixsT5jQ4JoQIyBJGIfIdNl0REFGoMOiIiCjU2XRJRYZE4etoFmr0uB+XUMwdYGlGAzjb+kRh0RFSQEgKCXV99T7HDVlZFN13u2bMHN9xwA1paWiCEwLZt23Lu+/nPfx5CCKxbty7j8ePHj6OtrQ21tbWor6/H7bffjpMnTxZbFCKqkOEp4bj5eWPIZVd00J06dQoXX3wx1q9fn3e/rVu3wjAMtLS0jHqura0NL730Enbu3Int27djz549WLJkSbFFISIiKqjopstrr70W1157bd593njjDdx555145plnEI1mDj555ZVXsGPHDuzfvx9z584FADzyyCO47rrr8I1vfCNrMA4ODmJwcND6fGBgoNhiExFRlXK81+XQ0BBuueUW3HvvvbjgggtGPb93717U19dbIQcAra2tGDNmDBKJ7It9dnZ2oq6uztqmTZvmdLGJiCikHA+6tWvXYuzYsbjrrruyPt/X14epU6dmPDZ27Fg0NDSgr68v62tWrVqF/v5+azty5IjTxSYiopBytNdld3c3vv3tb6OnpwdCOHdbtKamBjU1NY4dj4iIqoejQferX/0Kx44dw/Tp063Hzp49iy984QtYt24d/vznP6OpqQnHjh3LeN27776L48ePo6mpycniUAl0SPP/hg5D98e0YRI64kKxRxkRlcTRoLvlllvQ2tqa8dj8+fNxyy234LbbbgMAzJs3DydOnEB3dzcuvfRSAMDu3bsxNDSESCTiZHGoRLqKANCg+ejboTgIlohKVHTQnTx5En/84x+tzw8fPowDBw6goaEB06dPx5QpUzL2HzduHJqamvBP//RPAIBZs2bhmmuuQSwWw6ZNm3DmzBksX74cixYtytrjkipPCfiu9uS38hBRcBTdGeX555/H7NmzMXv2bADAypUrMXv2bKxevdr2MTZv3oyZM2fi6quvxnXXXYcrr7wSjz76aLFFISIiKqjoGt1VV10FpexPBfTnP/951GMNDQ3YsmVLsW9NRERUNM51WSVSnUyIiKoNgy7khBKAUFA2Vy0VQkH34006IqISMejCTpgf7A9rZMgRUbgEcuHVYu4REhFRONnNgkAG3VtvveV1EYiIyGN2s0CoAFaPhoaGcPDgQZx//vk4cuQIamtrvS6SYwYGBjBt2rRQnRfPKRh4TsHAczJrcm+99RZaWlowZkzh+log79GNGTMGH/jABwAAtbW1oflmpwvjefGcgoHnFAzVfk51dXW2jxvIpksiIiK7GHRERBRqgQ26mpoafOUrXwnd8j1hPC+eUzDwnIKB51S8QHZGISIisiuwNToiIiI7GHRERBRqDDoiIgo1Bh0REYUag46IiEItsEG3fv16nHvuuZgwYQIikQj27dvndZFs6+zsxGWXXYbJkydj6tSpWLBgAQ4ePJixz+nTp7Fs2TJMmTIF733ve7Fw4UIcPXrUoxIX7+tf/zqEEFixYoX1WBDP6Y033sBnP/tZTJkyBRMnTsRFF12E559/3npeKYXVq1ejubkZEydORGtrKw4dOuRhifM7e/Ys2tvbMWPGDEycOBEf/vCH0dHRkTE5rt/Pac+ePbjhhhvQ0tICIQS2bduW8byd8h8/fhxtbW2ora1FfX09br/9dpw8ebKCZ5Ep3zmdOXMG999/Py666CJMmjQJLS0tuPXWW/Hmm29mHMNv5wQU/l6l+/znPw8hBNatW5fxuBPnFcig++EPf4iVK1fiK1/5Cnp6enDxxRdj/vz5OHbsmNdFs+XZZ5/FsmXLYBgGdu7ciTNnzuATn/gETp06Ze1zzz334Kc//SmeeuopPPvss3jzzTdx0003eVhq+/bv34/vfve7+MhHPpLxeNDO6W9/+xuuuOIKjBs3Dj/72c/w8ssv47/+67/wvve9z9rnoYcewsMPP4xNmzYhkUhg0qRJmD9/Pk6fPu1hyXNbu3YtNm7ciO985zt45ZVXsHbtWjz00EN45JFHrH38fk6nTp3CxRdfjPXr12d93k7529ra8NJLL2Hnzp3Yvn079uzZgyVLllTqFEbJd05vv/02enp60N7ejp6eHvz4xz/GwYMH8clPfjJjP7+dE1D4e5WydetWGIaBlpaWUc85cl4qgC6//HK1bNky6/OzZ8+qlpYW1dnZ6WGpSnfs2DEFQD377LNKKaVOnDihxo0bp5566ilrn1deeUUBUHv37vWqmLa89dZb6rzzzlM7d+5U/+///T919913K6WCeU7333+/uvLKK3M+PzQ0pJqamtR//ud/Wo+dOHFC1dTUqB/84AeVKGLRotGo+tznPpfx2E033aTa2tqUUsE7JwBq69at1ud2yv/yyy8rAGr//v3WPj/72c+UEEK98cYbFSt7LiPPKZt9+/YpAOq1115TSvn/nJTKfV5/+ctf1Ac+8AH14osvqg996EPqW9/6lvWcU+cVuBrdO++8g+7ubrS2tlqPjRkzBq2trdi7d6+HJStdf38/AKChoQEA0N3djTNnzmSc48yZMzF9+nTfn+OyZcsQjUYzyg4E85yefvppzJ07F5/+9KcxdepUzJ49G/F43Hr+8OHD6Ovryzinuro6RCIR357TRz/6UezatQuvvvoqAOC3v/0tnnvuOVx77bUAgnlO6eyUf+/evaivr8fcuXOtfVpbWzFmzBgkEomKl7kU/f39EEKgvr4eQHDPaWhoCLfccgvuvfdeXHDBBaOed+q8Ard6wV//+lecPXsWjY2NGY83NjbiD3/4g0elKt3Q0BBWrFiBK664AhdeeCEAoK+vD+PHj7d+iFMaGxvR19fnQSntefLJJ9HT04P9+/ePei6I5/SnP/0JGzduxMqVK/GlL30J+/fvx1133YXx48dj8eLFVrmz/Sz69Zy++MUvYmBgADNnzsQ555yDs2fP4qtf/Sra2toAIJDnlM5O+fv6+jB16tSM58eOHYuGhoZAnOPp06dx//334+abb7Zm+g/qOa1duxZjx47FXXfdlfV5p84rcEEXNsuWLcOLL76I5557zuuilOXIkSO4++67sXPnTkyYMMHr4jhiaGgIc+fOxde+9jUAwOzZs/Hiiy9i06ZNWLx4scelK82PfvQjbN68GVu2bMEFF1yAAwcOYMWKFWhpaQnsOVWTM2fO4DOf+QyUUti4caPXxSlLd3c3vv3tb6OnpwdCCFffK3BNl+9///txzjnnjOqtd/ToUTQ1NXlUqtIsX74c27dvxy9+8Qt88IMftB5vamrCO++8gxMnTmTs7+dz7O7uxrFjxzBnzhyMHTsWY8eOxbPPPouHH34YY8eORWNjY+DOqbm5Geeff37GY7NmzcLrr78OAFa5g/SzeO+99+KLX/wiFi1ahIsuugi33HIL7rnnHnR2dgII5jmls1P+pqamUR3X3n33XRw/ftzX55gKuddeew07d+7MWLctiOf0q1/9CseOHcP06dOta8Zrr72GL3zhCzj33HMBOHdegQu68ePH49JLL8WuXbusx4aGhrBr1y7MmzfPw5LZp5TC8uXLsXXrVuzevRszZszIeP7SSy/FuHHjMs7x4MGDeP311317jldffTV+//vf48CBA9Y2d+5ctLW1Wf8O2jldccUVo4Z9vPrqq/jQhz4EAJgxYwaampoyzmlgYACJRMK35/T222+PWpH5nHPOwdDQEIBgnlM6O+WfN28eTpw4ge7ubmuf3bt3Y2hoCJFIpOJltiMVcocOHcLPf/5zTJkyJeP5IJ7TLbfcgt/97ncZ14yWlhbce++9eOaZZwA4eF6l96HxzpNPPqlqamrU97//ffXyyy+rJUuWqPr6etXX1+d10Wy54447VF1dnfrlL3+pent7re3tt9+29vn85z+vpk+frnbv3q2ef/55NW/ePDVv3jwPS1289F6XSgXvnPbt26fGjh2rvvrVr6pDhw6pzZs3q/e85z3qv//7v619vv71r6v6+nr1k5/8RP3ud79TN954o5oxY4b6+9//7mHJc1u8eLH6wAc+oLZv364OHz6sfvzjH6v3v//96r777rP28fs5vfXWW+qFF15QL7zwggKgvvnNb6oXXnjB6oFop/zXXHONmj17tkokEuq5555T5513nrr55pu9OqW85/TOO++oT37yk+qDH/ygOnDgQMY1Y3Bw0LfnpFTh79VII3tdKuXMeQUy6JRS6pFHHlHTp09X48ePV5dffrkyDMPrItkGIOv22GOPWfv8/e9/V0uXLlXve9/71Hve8x71qU99SvX29npX6BKMDLogntNPf/pTdeGFF6qamho1c+ZM9eijj2Y8PzQ0pNrb21VjY6OqqalRV199tTp48KBHpS1sYGBA3X333Wr69OlqwoQJ6h//8R/Vf/zHf2RcMP1+Tr/4xS+y/v4sXrxYKWWv/P/3f/+nbr75ZvXe975X1dbWqttuu0299dZbHpyNKd85HT58OOc14xe/+IVvz0mpwt+rkbIFnRPnxfXoiIgo1AJ3j46IiKgYDDoiIgo1Bh0REYUag46IiEKNQUdERKHGoCMiolBj0BERUagx6IiIKNQYdEREFGoMOiIiCjUGHRERhdr/D8qgWC364UO4AAAAAElFTkSuQmCC",
            "text/plain": [
              "<Figure size 500x500 with 1 Axes>"
            ]
          },
          "metadata": {},
          "output_type": "display_data"
        }
      ],
      "source": [
        "predict_image = spectral.imshow(classes = outputs.astype(int),figsize =(5,5))"
      ]
    }
  ],
  "metadata": {
    "accelerator": "GPU",
    "colab": {
      "gpuType": "T4",
      "provenance": []
    },
    "kernelspec": {
      "display_name": "Python 3",
      "name": "python3"
    },
    "language_info": {
      "name": "python"
    }
  },
  "nbformat": 4,
  "nbformat_minor": 0
}
