{
  "nbformat": 4,
  "nbformat_minor": 0,
  "metadata": {
    "colab": {
      "name": "nn search",
      "provenance": [],
      "collapsed_sections": []
    },
    "kernelspec": {
      "name": "python3",
      "display_name": "Python 3"
    },
    "accelerator": "GPU"
  },
  "cells": [
    {
      "cell_type": "code",
      "metadata": {
        "id": "tr6P-dCTdd7y",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "# getting the data\n",
        "import torchvision\n",
        "import torchvision.datasets as datasets\n",
        "mnist_trainset = datasets.MNIST(root='./data', train=True, download=True, transform=None)\n",
        "\n",
        "# exmaple from https://github.com/pytorch/examples/blob/master/mnist/main.py\n",
        "from __future__ import print_function\n",
        "import argparse\n",
        "import torch\n",
        "import torch.nn as nn\n",
        "import torch.nn.functional as F\n",
        "import torch.optim as optim\n",
        "from torchvision import datasets, transforms\n",
        "\n",
        "\n",
        "class Net(nn.Module):\n",
        "    def __init__(self, activation):\n",
        "        super(Net, self).__init__()\n",
        "        self.conv1 = nn.Conv2d(1, 10, 5, 1)\n",
        "        self.conv2 = nn.Conv2d(10, 50, 5, 1)\n",
        "        self.fc1 = nn.Linear(4*4*50, 64)\n",
        "        self.fc2 = nn.Linear(64, 10)\n",
        "        if activation == 'relu':\n",
        "            self.activations = F.relu\n",
        "        else: # softmax\n",
        "            self.activations = F.softmax\n",
        "\n",
        "    def forward(self, x):\n",
        "        x = self.activations(self.conv1(x))\n",
        "        x = F.max_pool2d(x, 2, 2)\n",
        "        x = self.activations(self.conv2(x))\n",
        "        x = F.max_pool2d(x, 2, 2)\n",
        "        x = x.view(-1, 4*4*50)\n",
        "        x = self.activations(self.fc1(x))\n",
        "        x = self.fc2(x)\n",
        "        return F.log_softmax(x, dim=1)\n",
        "    \n",
        "def train(args, model, device, train_loader, optimizer, epoch):\n",
        "    model.train()\n",
        "    for batch_idx, (data, target) in enumerate(train_loader):\n",
        "        data, target = data.to(device), target.to(device)\n",
        "        optimizer.zero_grad()\n",
        "        output = model(data)\n",
        "        loss = F.nll_loss(output, target)\n",
        "        loss.backward()\n",
        "        optimizer.step()\n",
        "        if batch_idx % args.log_interval == 0:\n",
        "            print('Train Epoch: {} [{}/{} ({:.0f}%)]\\tLoss: {:.6f}'.format(\n",
        "                epoch, batch_idx * len(data), len(train_loader.dataset),\n",
        "                100. * batch_idx / len(train_loader), loss.item()))\n",
        "\n",
        "def test(args, model, device, test_loader):\n",
        "    model.eval()\n",
        "    test_loss = 0\n",
        "    correct = 0\n",
        "    with torch.no_grad():\n",
        "        for data, target in test_loader:\n",
        "            data, target = data.to(device), target.to(device)\n",
        "            output = model(data)\n",
        "            test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss\n",
        "            pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability\n",
        "            correct += pred.eq(target.view_as(pred)).sum().item()\n",
        "\n",
        "    test_loss /= len(test_loader.dataset)\n",
        "\n",
        "    print('\\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\\n'.format(\n",
        "        test_loss, correct, len(test_loader.dataset),\n",
        "        100. * correct / len(test_loader.dataset)))\n",
        "    \n",
        "    return correct / len(test_loader.dataset)\n",
        "\n",
        "  "
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "IjRU2_Pwdl39",
        "colab_type": "code",
        "outputId": "55fc976f-6e16-429d-f639-f928f7b4bd97",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 163
        }
      },
      "source": [
        "!pip install scikit-optimize"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Requirement already satisfied: scikit-optimize in /usr/local/lib/python3.6/dist-packages (0.7.4)\n",
            "Requirement already satisfied: joblib>=0.11 in /usr/local/lib/python3.6/dist-packages (from scikit-optimize) (0.14.1)\n",
            "Requirement already satisfied: numpy>=1.11.0 in /usr/local/lib/python3.6/dist-packages (from scikit-optimize) (1.18.2)\n",
            "Requirement already satisfied: scikit-learn>=0.19.1 in /usr/local/lib/python3.6/dist-packages (from scikit-optimize) (0.22.2.post1)\n",
            "Requirement already satisfied: scipy>=0.18.0 in /usr/local/lib/python3.6/dist-packages (from scikit-optimize) (1.4.1)\n",
            "Requirement already satisfied: pyaml>=16.9 in /usr/local/lib/python3.6/dist-packages (from scikit-optimize) (20.4.0)\n",
            "Requirement already satisfied: PyYAML in /usr/local/lib/python3.6/dist-packages (from pyaml>=16.9->scikit-optimize) (3.13)\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "lrMeW0VMdlKv",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "import skopt\n",
        "from skopt import gp_minimize, forest_minimize\n",
        "from skopt.space import Real, Categorical, Integer\n",
        "from skopt.utils import use_named_args\n",
        "\n",
        "from skopt.plots import *\n"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "NTSoG0YZdsa4",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "\n",
        "dim_num_batch_size_to_base = Integer(low=2, \n",
        "                                     high=7, \n",
        "                                     name='log_batch_size')\n",
        "dim_learning_rate = Real(low=1e-6, high=1e0,\n",
        "                         prior='log-uniform',\n",
        "                         name='lr')\n",
        "dim_activation = Categorical(categories=['relu', 'sigmoid'], \n",
        "                             name='activation')\n",
        "dimensions = [dim_num_batch_size_to_base,\n",
        "              dim_learning_rate,\n",
        "              dim_activation]\n",
        "\n",
        "default_parameters = [4, 1e-1, 'relu']\n"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "jlBElCg3dwUn",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "@use_named_args(dimensions=dimensions)\n",
        "def main(log_batch_size=6, lr=1e-2, activation='relu'):\n",
        "    # Training settings\n",
        "    class Args:\n",
        "        seed = 0\n",
        "        test_batch_size = 1000\n",
        "        epochs = 10\n",
        "        momentum = 0.5\n",
        "        log_interval = 15000\n",
        "        def __init__(self,\n",
        "                     log_batch_size, \n",
        "                     lr, \n",
        "                    activation):\n",
        "            self.activation = activation\n",
        "            self.batch_size = int(2**log_batch_size)\n",
        "            self.lr = lr\n",
        "        def __repr__(self):\n",
        "            return str(self.__dict__)\n",
        "    \n",
        "    args = Args(log_batch_size,lr,activation)\n",
        "    \n",
        "    torch.manual_seed(args.seed)\n",
        "\n",
        "    kwargs = {}\n",
        "    train_loader = torch.utils.data.DataLoader(\n",
        "        datasets.MNIST('./data', train=True, download=True,\n",
        "                       transform=transforms.Compose([\n",
        "                           transforms.ToTensor(),\n",
        "                           transforms.Normalize((0.1307,), (0.3081,))\n",
        "                       ])),\n",
        "        batch_size=args.batch_size, shuffle=True, **kwargs)\n",
        "    test_loader = torch.utils.data.DataLoader(\n",
        "        datasets.MNIST('./data', train=False, transform=transforms.Compose([\n",
        "                           transforms.ToTensor(),\n",
        "                           transforms.Normalize((0.1307,), (0.3081,))\n",
        "                       ])),\n",
        "        batch_size=args.test_batch_size, shuffle=True, **kwargs)\n",
        "\n",
        "    device = 'cuda'\n",
        "    model = Net(args.activation).to(device)\n",
        "    \n",
        "    optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum)\n",
        "    \n",
        "    print ('Args for this run:')\n",
        "    print (args)\n",
        "    for epoch in range(1, args.epochs + 1):\n",
        "        train(args, model, device, train_loader, optimizer, epoch)\n",
        "        finalacc = test(args, model, device, test_loader)\n",
        "    \n",
        "    return -finalacc # we will be miniming using scikit-optim"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "ndUryGlBeV_n",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "import pickle"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "PL2SnjWAdysT",
        "colab_type": "code",
        "outputId": "6109cb22-7570-46a7-9b4f-8645cc8933ee",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 1000
        }
      },
      "source": [
        "meth = \"EI\"\n",
        "search_result = gp_minimize(func=main,\n",
        "                            dimensions=dimensions,\n",
        "                            acq_func=meth,\n",
        "                            n_calls=11,\n",
        "                            x0=default_parameters)\n",
        "\n",
        "with open(f'dump{meth}.pkl', 'wb')as f:\n",
        "    pickle.dump(search_result, f)"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Args for this run:\n",
            "{'activation': 'relu', 'batch_size': 16, 'lr': 0.1}\n",
            "Train Epoch: 1 [0/60000 (0%)]\tLoss: 2.306580\n",
            "\n",
            "Test set: Average loss: 0.0706, Accuracy: 9806/10000 (98%)\n",
            "\n",
            "Train Epoch: 2 [0/60000 (0%)]\tLoss: 0.023539\n",
            "\n",
            "Test set: Average loss: 0.0769, Accuracy: 9791/10000 (98%)\n",
            "\n",
            "Train Epoch: 3 [0/60000 (0%)]\tLoss: 0.065227\n",
            "\n",
            "Test set: Average loss: 0.0906, Accuracy: 9768/10000 (98%)\n",
            "\n",
            "Train Epoch: 4 [0/60000 (0%)]\tLoss: 0.665407\n",
            "\n",
            "Test set: Average loss: 0.0808, Accuracy: 9796/10000 (98%)\n",
            "\n",
            "Train Epoch: 5 [0/60000 (0%)]\tLoss: 0.003527\n",
            "\n",
            "Test set: Average loss: 0.1118, Accuracy: 9726/10000 (97%)\n",
            "\n",
            "Train Epoch: 6 [0/60000 (0%)]\tLoss: 0.021806\n",
            "\n",
            "Test set: Average loss: 0.1145, Accuracy: 9772/10000 (98%)\n",
            "\n",
            "Train Epoch: 7 [0/60000 (0%)]\tLoss: 0.005830\n",
            "\n",
            "Test set: Average loss: 0.2506, Accuracy: 9584/10000 (96%)\n",
            "\n",
            "Train Epoch: 8 [0/60000 (0%)]\tLoss: 0.000772\n",
            "\n",
            "Test set: Average loss: 0.1999, Accuracy: 9602/10000 (96%)\n",
            "\n",
            "Train Epoch: 9 [0/60000 (0%)]\tLoss: 0.022891\n",
            "\n",
            "Test set: Average loss: 0.2854, Accuracy: 9522/10000 (95%)\n",
            "\n",
            "Train Epoch: 10 [0/60000 (0%)]\tLoss: 0.297966\n",
            "\n",
            "Test set: Average loss: 2.3023, Accuracy: 1028/10000 (10%)\n",
            "\n",
            "Args for this run:\n",
            "{'activation': 'sigmoid', 'batch_size': 16, 'lr': 1.2405731505851404e-06}\n",
            "Train Epoch: 1 [0/60000 (0%)]\tLoss: 2.297523\n"
          ],
          "name": "stdout"
        },
        {
          "output_type": "stream",
          "text": [
            "/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:28: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.\n",
            "/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:30: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.\n",
            "/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:33: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.\n"
          ],
          "name": "stderr"
        },
        {
          "output_type": "stream",
          "text": [
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 2 [0/60000 (0%)]\tLoss: 2.297998\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 3 [0/60000 (0%)]\tLoss: 2.308806\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 4 [0/60000 (0%)]\tLoss: 2.272793\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 5 [0/60000 (0%)]\tLoss: 2.306485\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 6 [0/60000 (0%)]\tLoss: 2.294720\n",
            "\n",
            "Test set: Average loss: 2.3049, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 7 [0/60000 (0%)]\tLoss: 2.300804\n",
            "\n",
            "Test set: Average loss: 2.3049, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 8 [0/60000 (0%)]\tLoss: 2.289621\n",
            "\n",
            "Test set: Average loss: 2.3049, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 9 [0/60000 (0%)]\tLoss: 2.309064\n",
            "\n",
            "Test set: Average loss: 2.3049, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 10 [0/60000 (0%)]\tLoss: 2.322193\n",
            "\n",
            "Test set: Average loss: 2.3049, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Args for this run:\n",
            "{'activation': 'sigmoid', 'batch_size': 16, 'lr': 0.0016627832401626518}\n",
            "Train Epoch: 1 [0/60000 (0%)]\tLoss: 2.297523\n",
            "\n",
            "Test set: Average loss: 2.3014, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 2 [0/60000 (0%)]\tLoss: 2.310721\n",
            "\n",
            "Test set: Average loss: 2.3011, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 3 [0/60000 (0%)]\tLoss: 2.303543\n",
            "\n",
            "Test set: Average loss: 2.3010, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 4 [0/60000 (0%)]\tLoss: 2.315909\n",
            "\n",
            "Test set: Average loss: 2.3010, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 5 [0/60000 (0%)]\tLoss: 2.315267\n",
            "\n",
            "Test set: Average loss: 2.3010, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 6 [0/60000 (0%)]\tLoss: 2.301607\n",
            "\n",
            "Test set: Average loss: 2.3010, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 7 [0/60000 (0%)]\tLoss: 2.300102\n",
            "\n",
            "Test set: Average loss: 2.3010, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 8 [0/60000 (0%)]\tLoss: 2.308984\n",
            "\n",
            "Test set: Average loss: 2.3011, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 9 [0/60000 (0%)]\tLoss: 2.306923\n",
            "\n",
            "Test set: Average loss: 2.3010, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 10 [0/60000 (0%)]\tLoss: 2.272882\n",
            "\n",
            "Test set: Average loss: 2.3010, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Args for this run:\n",
            "{'activation': 'sigmoid', 'batch_size': 8, 'lr': 0.000250259968171663}\n",
            "Train Epoch: 1 [0/60000 (0%)]\tLoss: 2.282034\n",
            "\n",
            "Test set: Average loss: 2.3030, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 2 [0/60000 (0%)]\tLoss: 2.296160\n",
            "\n",
            "Test set: Average loss: 2.3020, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 3 [0/60000 (0%)]\tLoss: 2.304039\n",
            "\n",
            "Test set: Average loss: 2.3015, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 4 [0/60000 (0%)]\tLoss: 2.313205\n",
            "\n",
            "Test set: Average loss: 2.3013, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 5 [0/60000 (0%)]\tLoss: 2.310725\n",
            "\n",
            "Test set: Average loss: 2.3011, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 6 [0/60000 (0%)]\tLoss: 2.311152\n",
            "\n",
            "Test set: Average loss: 2.3011, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 7 [0/60000 (0%)]\tLoss: 2.298070\n",
            "\n",
            "Test set: Average loss: 2.3011, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 8 [0/60000 (0%)]\tLoss: 2.306862\n",
            "\n",
            "Test set: Average loss: 2.3010, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 9 [0/60000 (0%)]\tLoss: 2.297445\n",
            "\n",
            "Test set: Average loss: 2.3010, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 10 [0/60000 (0%)]\tLoss: 2.271445\n",
            "\n",
            "Test set: Average loss: 2.3010, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Args for this run:\n",
            "{'activation': 'relu', 'batch_size': 16, 'lr': 7.876839921687535e-06}\n",
            "Train Epoch: 1 [0/60000 (0%)]\tLoss: 2.306580\n",
            "\n",
            "Test set: Average loss: 2.3008, Accuracy: 1263/10000 (13%)\n",
            "\n",
            "Train Epoch: 2 [0/60000 (0%)]\tLoss: 2.289078\n",
            "\n",
            "Test set: Average loss: 2.2924, Accuracy: 1576/10000 (16%)\n",
            "\n",
            "Train Epoch: 3 [0/60000 (0%)]\tLoss: 2.280120\n",
            "\n",
            "Test set: Average loss: 2.2845, Accuracy: 1967/10000 (20%)\n",
            "\n",
            "Train Epoch: 4 [0/60000 (0%)]\tLoss: 2.270039\n",
            "\n",
            "Test set: Average loss: 2.2767, Accuracy: 2507/10000 (25%)\n",
            "\n",
            "Train Epoch: 5 [0/60000 (0%)]\tLoss: 2.278897\n",
            "\n",
            "Test set: Average loss: 2.2690, Accuracy: 2915/10000 (29%)\n",
            "\n",
            "Train Epoch: 6 [0/60000 (0%)]\tLoss: 2.260105\n",
            "\n",
            "Test set: Average loss: 2.2610, Accuracy: 3210/10000 (32%)\n",
            "\n",
            "Train Epoch: 7 [0/60000 (0%)]\tLoss: 2.262949\n",
            "\n",
            "Test set: Average loss: 2.2525, Accuracy: 3451/10000 (35%)\n",
            "\n",
            "Train Epoch: 8 [0/60000 (0%)]\tLoss: 2.235998\n",
            "\n",
            "Test set: Average loss: 2.2432, Accuracy: 3722/10000 (37%)\n",
            "\n",
            "Train Epoch: 9 [0/60000 (0%)]\tLoss: 2.250264\n",
            "\n",
            "Test set: Average loss: 2.2332, Accuracy: 4192/10000 (42%)\n",
            "\n",
            "Train Epoch: 10 [0/60000 (0%)]\tLoss: 2.221809\n",
            "\n",
            "Test set: Average loss: 2.2220, Accuracy: 4635/10000 (46%)\n",
            "\n",
            "Args for this run:\n",
            "{'activation': 'sigmoid', 'batch_size': 64, 'lr': 0.004280209797910964}\n",
            "Train Epoch: 1 [0/60000 (0%)]\tLoss: 2.294867\n",
            "\n",
            "Test set: Average loss: 2.3019, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 2 [0/60000 (0%)]\tLoss: 2.303238\n",
            "\n",
            "Test set: Average loss: 2.3012, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 3 [0/60000 (0%)]\tLoss: 2.299560\n",
            "\n",
            "Test set: Average loss: 2.3011, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 4 [0/60000 (0%)]\tLoss: 2.310313\n",
            "\n",
            "Test set: Average loss: 2.3010, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 5 [0/60000 (0%)]\tLoss: 2.307085\n",
            "\n",
            "Test set: Average loss: 2.3010, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 6 [0/60000 (0%)]\tLoss: 2.295163\n",
            "\n",
            "Test set: Average loss: 2.3010, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 7 [0/60000 (0%)]\tLoss: 2.303569\n",
            "\n",
            "Test set: Average loss: 2.3010, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 8 [0/60000 (0%)]\tLoss: 2.308352\n",
            "\n",
            "Test set: Average loss: 2.3010, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 9 [0/60000 (0%)]\tLoss: 2.305216\n",
            "\n",
            "Test set: Average loss: 2.3010, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Train Epoch: 10 [0/60000 (0%)]\tLoss: 2.290966\n",
            "\n",
            "Test set: Average loss: 2.3010, Accuracy: 1135/10000 (11%)\n",
            "\n",
            "Args for this run:\n",
            "{'activation': 'relu', 'batch_size': 16, 'lr': 0.0001440741976650451}\n",
            "Train Epoch: 1 [0/60000 (0%)]\tLoss: 2.306580\n",
            "\n",
            "Test set: Average loss: 2.0427, Accuracy: 6063/10000 (61%)\n",
            "\n",
            "Train Epoch: 2 [0/60000 (0%)]\tLoss: 2.043445\n",
            "\n",
            "Test set: Average loss: 0.6924, Accuracy: 8306/10000 (83%)\n",
            "\n",
            "Train Epoch: 3 [0/60000 (0%)]\tLoss: 0.702769\n",
            "\n",
            "Test set: Average loss: 0.3863, Accuracy: 8901/10000 (89%)\n",
            "\n",
            "Train Epoch: 4 [0/60000 (0%)]\tLoss: 0.355774\n",
            "\n",
            "Test set: Average loss: 0.3016, Accuracy: 9136/10000 (91%)\n",
            "\n",
            "Train Epoch: 5 [0/60000 (0%)]\tLoss: 0.752067\n",
            "\n",
            "Test set: Average loss: 0.2595, Accuracy: 9255/10000 (93%)\n",
            "\n",
            "Train Epoch: 6 [0/60000 (0%)]\tLoss: 0.338356\n",
            "\n",
            "Test set: Average loss: 0.2279, Accuracy: 9345/10000 (93%)\n",
            "\n",
            "Train Epoch: 7 [0/60000 (0%)]\tLoss: 0.238181\n",
            "\n",
            "Test set: Average loss: 0.1986, Accuracy: 9441/10000 (94%)\n",
            "\n",
            "Train Epoch: 8 [0/60000 (0%)]\tLoss: 0.304795\n",
            "\n",
            "Test set: Average loss: 0.1818, Accuracy: 9492/10000 (95%)\n",
            "\n",
            "Train Epoch: 9 [0/60000 (0%)]\tLoss: 0.139509\n",
            "\n",
            "Test set: Average loss: 0.1594, Accuracy: 9556/10000 (96%)\n",
            "\n",
            "Train Epoch: 10 [0/60000 (0%)]\tLoss: 0.549375\n",
            "\n",
            "Test set: Average loss: 0.1466, Accuracy: 9569/10000 (96%)\n",
            "\n",
            "Args for this run:\n",
            "{'activation': 'sigmoid', 'batch_size': 32, 'lr': 2.8805992785087772e-06}\n",
            "Train Epoch: 1 [0/60000 (0%)]\tLoss: 2.299167\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 2 [0/60000 (0%)]\tLoss: 2.291761\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 3 [0/60000 (0%)]\tLoss: 2.307346\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 4 [0/60000 (0%)]\tLoss: 2.288673\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 5 [0/60000 (0%)]\tLoss: 2.299580\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 6 [0/60000 (0%)]\tLoss: 2.294148\n",
            "\n",
            "Test set: Average loss: 2.3049, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 7 [0/60000 (0%)]\tLoss: 2.301901\n",
            "\n",
            "Test set: Average loss: 2.3049, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 8 [0/60000 (0%)]\tLoss: 2.294024\n",
            "\n",
            "Test set: Average loss: 2.3049, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 9 [0/60000 (0%)]\tLoss: 2.308181\n",
            "\n",
            "Test set: Average loss: 2.3049, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 10 [0/60000 (0%)]\tLoss: 2.315027\n",
            "\n",
            "Test set: Average loss: 2.3049, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Args for this run:\n",
            "{'activation': 'sigmoid', 'batch_size': 32, 'lr': 1.1404608017651114e-06}\n",
            "Train Epoch: 1 [0/60000 (0%)]\tLoss: 2.299167\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 2 [0/60000 (0%)]\tLoss: 2.291747\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 3 [0/60000 (0%)]\tLoss: 2.307364\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 4 [0/60000 (0%)]\tLoss: 2.288636\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 5 [0/60000 (0%)]\tLoss: 2.299573\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 6 [0/60000 (0%)]\tLoss: 2.294133\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 7 [0/60000 (0%)]\tLoss: 2.301896\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 8 [0/60000 (0%)]\tLoss: 2.293969\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 9 [0/60000 (0%)]\tLoss: 2.308264\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Train Epoch: 10 [0/60000 (0%)]\tLoss: 2.315214\n",
            "\n",
            "Test set: Average loss: 2.3050, Accuracy: 982/10000 (10%)\n",
            "\n",
            "Args for this run:\n",
            "{'activation': 'relu', 'batch_size': 64, 'lr': 0.02146525992373627}\n",
            "Train Epoch: 1 [0/60000 (0%)]\tLoss: 2.306639\n",
            "\n",
            "Test set: Average loss: 0.0741, Accuracy: 9771/10000 (98%)\n",
            "\n",
            "Train Epoch: 2 [0/60000 (0%)]\tLoss: 0.078850\n",
            "\n",
            "Test set: Average loss: 0.0502, Accuracy: 9832/10000 (98%)\n",
            "\n",
            "Train Epoch: 3 [0/60000 (0%)]\tLoss: 0.030611\n",
            "\n",
            "Test set: Average loss: 0.0462, Accuracy: 9852/10000 (99%)\n",
            "\n",
            "Train Epoch: 4 [0/60000 (0%)]\tLoss: 0.011267\n",
            "\n",
            "Test set: Average loss: 0.0345, Accuracy: 9897/10000 (99%)\n",
            "\n",
            "Train Epoch: 5 [0/60000 (0%)]\tLoss: 0.037650\n",
            "\n",
            "Test set: Average loss: 0.0329, Accuracy: 9894/10000 (99%)\n",
            "\n",
            "Train Epoch: 6 [0/60000 (0%)]\tLoss: 0.010037\n",
            "\n",
            "Test set: Average loss: 0.0298, Accuracy: 9904/10000 (99%)\n",
            "\n",
            "Train Epoch: 7 [0/60000 (0%)]\tLoss: 0.002660\n",
            "\n",
            "Test set: Average loss: 0.0286, Accuracy: 9907/10000 (99%)\n",
            "\n",
            "Train Epoch: 8 [0/60000 (0%)]\tLoss: 0.004788\n",
            "\n",
            "Test set: Average loss: 0.0288, Accuracy: 9904/10000 (99%)\n",
            "\n",
            "Train Epoch: 9 [0/60000 (0%)]\tLoss: 0.107029\n",
            "\n",
            "Test set: Average loss: 0.0294, Accuracy: 9908/10000 (99%)\n",
            "\n",
            "Train Epoch: 10 [0/60000 (0%)]\tLoss: 0.005248\n",
            "\n",
            "Test set: Average loss: 0.0303, Accuracy: 9909/10000 (99%)\n",
            "\n",
            "Args for this run:\n",
            "{'activation': 'relu', 'batch_size': 8, 'lr': 5.515926828391339e-05}\n",
            "Train Epoch: 1 [0/60000 (0%)]\tLoss: 2.295138\n",
            "\n",
            "Test set: Average loss: 2.1610, Accuracy: 5531/10000 (55%)\n",
            "\n",
            "Train Epoch: 2 [0/60000 (0%)]\tLoss: 2.200764\n",
            "\n",
            "Test set: Average loss: 1.3279, Accuracy: 7441/10000 (74%)\n",
            "\n",
            "Train Epoch: 3 [0/60000 (0%)]\tLoss: 1.265671\n",
            "\n",
            "Test set: Average loss: 0.5350, Accuracy: 8552/10000 (86%)\n",
            "\n",
            "Train Epoch: 4 [0/60000 (0%)]\tLoss: 0.553201\n",
            "\n",
            "Test set: Average loss: 0.3752, Accuracy: 8954/10000 (90%)\n",
            "\n",
            "Train Epoch: 5 [0/60000 (0%)]\tLoss: 0.233170\n",
            "\n",
            "Test set: Average loss: 0.3154, Accuracy: 9098/10000 (91%)\n",
            "\n",
            "Train Epoch: 6 [0/60000 (0%)]\tLoss: 0.401724\n",
            "\n",
            "Test set: Average loss: 0.2758, Accuracy: 9204/10000 (92%)\n",
            "\n",
            "Train Epoch: 7 [0/60000 (0%)]\tLoss: 0.281369\n",
            "\n",
            "Test set: Average loss: 0.2435, Accuracy: 9309/10000 (93%)\n",
            "\n",
            "Train Epoch: 8 [0/60000 (0%)]\tLoss: 0.114637\n",
            "\n",
            "Test set: Average loss: 0.2223, Accuracy: 9361/10000 (94%)\n",
            "\n",
            "Train Epoch: 9 [0/60000 (0%)]\tLoss: 0.244357\n",
            "\n",
            "Test set: Average loss: 0.1991, Accuracy: 9448/10000 (94%)\n",
            "\n",
            "Train Epoch: 10 [0/60000 (0%)]\tLoss: 0.311125\n",
            "\n",
            "Test set: Average loss: 0.1829, Accuracy: 9494/10000 (95%)\n",
            "\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "C-DoVDiNXY5Y",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "# import pickle\n",
        "# with open('dump2.pkl', 'rb') as f:\n",
        "#     q = pickle.load(f)\n",
        "\n",
        "# print(q)"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "KPoLKU9NU_it",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "import os\n",
        "import warnings\n",
        "import itertools\n",
        "\n",
        "import numpy as np\n",
        "import pandas as pd\n",
        "import matplotlib.pyplot as plt\n",
        "\n",
        "from sklearn.gaussian_process.kernels import Matern\n",
        "from sklearn.gaussian_process import GaussianProcessRegressor\n",
        "from scipy.special import ndtr\n",
        "from scipy.stats import norm"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "lSzZvGrsd3gi",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "import matplotlib as mpl\n",
        "mpl.rcParams.update(mpl.rcParamsDefault)\n",
        "\n",
        "\n",
        "plt.style.use('seaborn-paper')\n",
        "# %matplotlib inline\n",
        "\n",
        "# fig, ax = plt.subplots(figsize=(16, 6))\n",
        "# ax = plot_convergence(search_result, ax=ax)\n",
        "# plt.savefig(f\"conv{meth}.svg\", ax=ax)\n",
        "# ax.set_ylabel(r\"min $f(x)$ after $n$ queries\")\n"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "Mm3HoQSSA2OG",
        "colab_type": "code",
        "outputId": "eb597ee5-96ab-44b5-90fc-a2015422d2df",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 53
        }
      },
      "source": [
        "fun = np.array(search_result.func_vals) * -1\n",
        "fun\n"
      ],
      "execution_count": 35,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "array([0.1028, 0.0982, 0.1135, 0.1135, 0.4635, 0.1135, 0.9569, 0.0982,\n",
              "       0.0982, 0.9909, 0.9494])"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 35
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "JGsDepQ0jWKk",
        "colab_type": "code",
        "outputId": "458aa0b8-e811-4dd0-ebbc-9b779599ca60",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 215
        }
      },
      "source": [
        "out = []\n",
        "for a in range(len(fun)):\n",
        "  out.append(np.max(fun[:a+1]).item())\n",
        " \n",
        "out"
      ],
      "execution_count": 36,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "[0.1028,\n",
              " 0.1028,\n",
              " 0.1135,\n",
              " 0.1135,\n",
              " 0.4635,\n",
              " 0.4635,\n",
              " 0.9569,\n",
              " 0.9569,\n",
              " 0.9569,\n",
              " 0.9909,\n",
              " 0.9909]"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 36
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "82oHtDOelAJy",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "def nnsvm(rcParams, i=0):\n",
        "    rcParams['font.family'] = 'serif'\n",
        "    rcParams['font.serif'] = 'Ubuntu'\n",
        "    rcParams['font.monospace'] = 'Ubuntu Mono'\n",
        "    rcParams['font.size'] = 12 + i\n",
        "    rcParams['axes.labelsize'] = 12 + i\n",
        "    rcParams['axes.labelweight'] = 'normal'\n",
        "    rcParams['xtick.labelsize'] = 10 + i\n",
        "    rcParams['ytick.labelsize'] = 10 + i\n",
        "    rcParams['legend.fontsize'] = 12 + i\n",
        "    rcParams['figure.titlesize'] = 14 + i\n",
        "    rcParams['lines.linewidth']= 2.7\n",
        "nnsvm(plt.rcParams, i=-4)"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "uMuoXFQtpVOo",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "SPINE_COLOR = 'gray'\n",
        "\n",
        "def format_axes(ax):\n",
        "    \n",
        "    for spine in ['top', 'right']:\n",
        "        ax.spines[spine].set_visible(False)\n",
        "\n",
        "    for spine in ['left', 'bottom']:\n",
        "        ax.spines[spine].set_color(SPINE_COLOR)\n",
        "        ax.spines[spine].set_linewidth(0.5)\n",
        "\n",
        "    ax.xaxis.set_ticks_position('bottom')\n",
        "    ax.yaxis.set_ticks_position('left')\n",
        "\n",
        "    for axis in [ax.xaxis, ax.yaxis]:\n",
        "        axis.set_tick_params(direction='out', color=SPINE_COLOR)\n",
        "        \n",
        "    ax.grid(alpha=.25)\n",
        "\n",
        "    return ax"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "hEmWwhRJpWU3",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        ""
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "F7QeDwmxjzhy",
        "colab_type": "code",
        "outputId": "7cf27c07-13d9-44ae-a5d3-875bafe5ef25",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 434
        }
      },
      "source": [
        "plt.plot(range(1, len(out)+1), out, mfc='gold', marker='*', markersize=20, linestyle='-.', lw=2.7)\n",
        "plt.ylabel(r'Max Accuracy')\n",
        "plt.xlabel(r'Number of calls to evaluate $f(x)$')\n",
        "plt.title('Max Accuracy achieved for neural network task')\n",
        "format_axes(plt.gca())\n",
        "plt.savefig(f\"conv.svg\", bbox_inches=\"tight\")\n",
        "plt.show()"
      ],
      "execution_count": 39,
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAi0AAAGhCAYAAACtc4RMAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAAPYQAAD2EBqD+naQAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOzdd3xUVd4G8OdOKmmTBmkQAgkEiDRBSkIERBCkrmABFVDhRbG+Lq4oi+jK6qordhEFQdkVlQivgCCIBVKQ3ntCCYSW3tvMPe8fITeZ1JlkZu5M8nw/n3z0zL1z55mTSfLjnnPPlYQQAkREREQ2TqN2ACIiIiJjsGghIiIiu8CihYiIiOwCixYiIiKyCyxaiIiIyC6waCEiIiK7wKKFiIiI7AKLFiIiIrILLFqIiIjILrBoIWplhg0bhr///e/1bo+Pj4eHhwf0er0VUxlavnw5wsLCGtznv//9L0JDQ+Hh4YFPP/3UOsFU9scff0CSJOh0OrWjmJ0tvLeZM2fioYceUu31qXEsWsgkw4YNgyRJWLZsmcHj+fn58PT0hCRJSE5OtniORYsWQZIkfPbZZxZ/rdYmNjYWBQUFcHBwUDtKvXQ6Hf7nf/4H7777LgoKCjB37ly1I7UYr776KoYMGaJ2jEZduHDBar9vyHawaCGT9ejRo1bRsnr1anTs2NEqr6/T6fDFF1/Az88PS5cutcpr1qesrEzV12+trl27hqKiIvTt27fJx9Dr9ZBl2Yypmk4I0SLPnhirvLxc7QhkJ1i0kMnGjx+P69evY/fu3cpjS5cuxZw5cwz2u3r1KsaNG4eAgAB4enqiV69eWLt2rbL9jz/+gLu7O/bu3Qug4hf32LFjMX78eDR0H8/169cjPT0d33zzDY4cOYKEhASD7ZcvX8aDDz6I9u3bw8vLC3369MGBAwcAVBQ8S5YsQY8ePeDp6Yn27dvjrbfeAgCsWrUK7du3NzhWzX91Dhs2DE899RQeeOAB+Pj44JlnnkFJSQnuvfdehISEwNPTE5GRkfjkk08MjpOVlYW5c+eiU6dO8PT0RLdu3bB161YkJyfD0dERFy5cMNh/3LhxeO655+p8/8eOHcOIESPQtm1baLVaDBw4EL/99pvBPidPnsSECRMQGBgIrVaLQYMG4dKlS8r2/Px8TJs2DVqtFh06dDAo/uo6Tf/111+jd+/e0Gq1iIqKwrfffgsAkGUZHTp0wOrVqw1ef8mSJejVq5fS3rx5MwYOHAgfHx906dIFH374ocH+W7duRc+ePeHh4YE77rjDIGtN27dvR2RkJACgd+/e8PDwQGpqKvR6Pd555x107doVWq0W/fv3x5YtW2q9r2+//RZdu3aFm5sbbty4Uev4lZ+DZcuWISwsDFqtFvfeey/y8vKUfXJycvDEE0+gY8eO8PPzw913341z584p2+saggsLC8Py5csBVJ0lWLFiBXr37g03Nzfs27cPf/zxB6Kjo+Hn5wcfHx/ccccdOHToUL19UVPle4yLi0PXrl3h6emJkSNHIi0tTdmnpKQEL7/8MsLDw+Hj44Pbb78dBw8eBFAx5PbGG29g165d8PDwgIeHB+Lj4zF58mS89NJLyjGGDx8Of39/pej77rvvEB4ermzftWsXYmNj4ePjg06dOmH+/PkoLS016ItFixZh9OjR8PT0xLvvvlvrvSQnJ6N79+544YUX6iwuo6KiAFR9Bh5//HEAwCeffIJbbrkFXl5eCAwMxMMPP4yMjAzleb///jv69+8PrVYLPz8/xMTEIDs7u87+/PLLLxEUFIQdO3Y03vlkHYLIBEOHDhULFiwQr7zyipg5c6YQQoj4+HgRGhoqUlJSBABx9uxZIYQQly5dEj/88IPIz88XZWVlYvny5cLR0VEcO3ZMOd7bb78tQkNDRUZGhnjttddE586dRXZ2doMZhg0bJiZNmiSEEGLgwIFi6tSpyraioiLRpUsXMXPmTJGeni70er04fvy4uHDhghBCiAULFojw8HCxe/duodfrRWZmpkhKShJCCLFy5UoREhJi8FqLFi0SMTExBu/fzc1N/PTTT0Kv14vCwkJRVFQkvvzyS5GdnS30er3YtGmTcHZ2Fj///LMQQghZlkVsbKwYM2aMuHjxopBlWaSkpIjjx48LIYQYOXKkePnll5XXuHjxonBwcBAnT56s8/0fPXpUbNu2TRQVFYmSkhKxaNEi4eXlJa5fvy6EEOLatWvCz89PvPTSSyI3N1fodDqxZ88ekZ6errwHLy8v8euvvwq9Xi/i4uKERqNRvm+///67ACDKy8uVfunQoYPYu3ev0Ov1Ij4+Xnh6eor4+HghhBALFy4UQ4cONcjYvXt38cEHHwghhPjtt9+EVqsV27dvF3q9Xhw9elS0b99e/Oc//xFCCHHu3Dnh7Owsli1bJsrKykRiYqLw9/cXHTt2rPczcP78eYPPmhBC/Pvf/xYhISFi//79ory8XKxZs0Y4OTmJ/fv3G7yvSZMmiYyMDFFSUiJ0Ol2tY69cuVI4ODiI5557ThQVFYkrV66IiIgI8corryjfz2HDholp06aJzMxMUVJSIv72t7+J7t27i7KyMqWPFyxYYHDcjh07ii+++MIgf3R0tEhNTRU6nU6UlJSIhIQEkZiYKEpLS0VeXp6YPXu2CA0NFaWlpXV+b2qq3D5t2jSRk5MjcnJyRHR0tJg+fbqyz4wZM8SIESPEpUuXRHl5ufjoo49E27ZtlZ+7mp95IYRYunSpuPXWW4UQQhQUFAh3d3cRFhYm9uzZI4QQ4rHHHhOPP/64EKLi8+vm5ibee+89UVpaKs6cOSN69Oghnn32WYO+CAgIEElJSUKWZVFYWGjw3n7//XcREBAgli9fbtJnQAgh4uLixOnTp4VerxcXLlwQAwYMEA888ICyPTg4WHz55ZdClmVRWloqkpKSREFBgdI3Dz74oNDr9WLevHmia9eutY5P6mLRQiap/GV86dIl4enpKbKzs8W0adPE66+/Xu8vkep69eolPvzwQ4PHJk+eLHr37i3c3d3FwYMHG3z9EydOCABi8+bNQgghVqxYIZydncWNGzeEEEKsXbtW+Pr6ipKSklrPlWVZeHh4iLVr19Z5bGOLluq/AOszYcIE8fzzzwshhNi7d6+QJEnJWNO6detEUFCQ8oeoriKgMVqtVmzYsEEIIcQ777wjoqKi6t136NCh4pFHHjF4zN/fX3z77bdCiNp/GHv27Ck+++wzg/1nzZolHnvsMSFExR+P6kVPQkKCcHFxEZmZmUIIIcaPHy/mz59v8PzFixeLESNGKP9f+Qex0vPPP29y0dK1a1fx/vvvG+w3YcIEMWfOHIP3derUqXqPK0TF58DFxUUpQIQQYt68eWL06NFCCCH2798vnJycRH5+vrJdp9MJV1dXpZAztmipLGzrk5WVJQCII0eOGLyHxoqWixcvKo99/PHHolu3bkIIITIyMursg4iICLF69WohRN1FS0pKipAkSaSnp4uNGzeKoUOHimeeeUa8/vrrQgghQkNDxbp164QQQrzxxhuiT58+Bs9ft26daNOmjZBlWemLmp+JyuxLly4V7dq1E7/99luDfWPM75vK1/b19VXaYWFhYsGCBeLy5cu19p0xY4aYOHGiGD9+vBg+fLjIyspq8NhkfRweoiZp3749hg8fjn//+9/48ccf8dhjj9XaJzs7G7Nnz0anTp3g5eUFb29vHD9+vNYp+eeffx6HDx/GhAkT0KdPnwZfd+nSpQgNDcVdd90FALj//vvh6uqqnHY/f/48wsLC4OLiUuu5GRkZKCgoUIYWmqpTp04G7dLSUsybN08ZlvD29saWLVuU93n+/Hn4+Pigbdu2dR5vwoQJcHBwwIYNG6DT6bBixYpaQ23Vpaam4oEHHkBoaKjSr3l5eQav19h7DA4ONmi7u7sjPz+/zn3Pnj2Lv/71r/D29la+1qxZgytXrgCoONU/YsQIrFixAkDFlT/33HMPfH19led/8MEHBs//17/+hatXrwKoGM6r2ac128a4dOmSwRAFAERERCA1NdXkY/v7+8PJyUlpV++fs2fPQqfToX379sr78fPzUzKYomaWI0eOYPz48QgJCYGXl5eyva5hrIZU//5Wz145aXXgwIEG34+0tDRcvny53uN17twZnTt3xi+//IJt27Zh1KhRGDVqFLZu3YrTp08jLS0Nd9xxB4D6vw/FxcVIT0+v971Xeu2113DPPfdg+PDhJr3nSuvWrUN0dDTatWsHLy8vPPzww8jKylKuhtuwYQPOnTuHfv36ISIiAosWLTIYCv3zzz+xefNmLF68GD4+Pk3KQJbDooWa7IknnsAbb7yBMWPGICgoqNb2+fPn49SpU9ixYwdyc3ORk5ODqKgog/kqubm5mDFjBh577DFs2rQJGzdurPf1CgsL8fXXX+P69esIDg5GYGAgwsPDUVxcjGXLlkGWZYSFheHChQt1TpD19/eHh4cHzpw5U+fxPT09UVhYaPBY5R/m6jQawx+bJUuWYOPGjdi4cSOys7ORk5ODMWPGKO8zLCwM2dnZBuPq1Tk4OGD27Nn4/PPPsWnTJpSXl2Py5Mn19sPs2bMhyzL27t2LvLw8ZGdnw8vLy+D1zp49W+/zTRUYGIhPP/0UOTk5yldBQQE2b96s7DNr1ix89dVXyMrKwtq1azFr1iyD58+fP9/g+fn5+Th+/DiAigK45pyemm1jdOjQASkpKQaPpaSkIDQ01OCxmt8/UwUGBsLZ2Rnp6ekG76m4uBhTp04FUPuzpNPp6iw8ama59957ER4ejmPHjiEvLw/nz58HgAbneJmaHagojqpnLyoqwvz58+vMVGnkyJHYtm0btm3bhrvuugvDhg3DwYMHERcXhwEDBkCr1QKo+D5Un98DVHwf2rRpY1C41/c68fHx2L59O55++ukG33ddz798+TLuvfdePP3000hNTUVeXp4y36ryWD179sQ333yDa9euIS4uDp999hlWrlypHOPOO+/E8uXLMW7cuFpzxUh9LFqoye666y788ssveO+99+rcnpubCzc3N/j5+aG8vBwfffSR8ocKqPglMmPGDERGRuKLL77AsmXLMH369Fq/8Cr997//RXFxMfbv349Dhw4pXzt37kRqaip++uknjBs3Dj4+Ppg7dy4yMjIghMCJEydw8eJFSJKEp59+Gi+99BL27dsHIQSysrKwa9cuAEDfvn2Rn5+P7777DrIs448//jCYOFyf3NxcuLi4oG3btpBlGWvXrsW2bduU7f3790d0dDQeeeQR5V+z58+fx8mTJ5V9Zs+ejd9//x2vvfYaZs6cCWdn5wZfz8PDAz4+PigsLMRLL72EgoICZfv06dNx+fJlLFy4EPn5+dDr9di3b1+9RVNjnnvuObz++uvYu3cvZFlGaWkp9u7di/379yv7TJo0CeXl5ZgxYwYCAwMN/pX87LPP4qOPPsKvv/4KnU4HnU6HY8eOYefOnQCAqVOn4ujRo1i+fDl0Oh3+/PNPfP311ybnnDVrFv7973/j0KFD0Ol0+P7777F582aDAsochgwZgltuuQVPPPGEUohkZ2fjhx9+QFFREYCK7/mGDRtw5coVFBcXY/78+UZdIZObmwsvLy9otVpkZWXhr3/9q1mzd+zYEZMmTcKTTz6JixcvAqiYlL1lyxblzFdgYCBSU1NRUlJi8NxRo0Zh/fr1yMzMxK233gp3d3cMGDAA77zzDkaNGqXsN23aNJw+fRofffQRysrKkJKSgoULF2LWrFmQJKnRjGFhYUhISEBCQgIeeuihevutbdu20Gg0OH36tPJYQUEBZFmGv78/XF1dcfbsWbz55pvK9rKyMqxcuVI546PVauHg4ABHR0eDY8+cOROrVq3CPffcg3Xr1jWamayHRQs1mSRJGDFiRK0rbiotXrwYxcXFCAgIQFhYGK5fv46YmBhl+1tvvYUjR45g9erVkCQJU6dOxUMPPYTJkyejuLi41vGWLl2K6dOnIyoqCoGBgcrXoEGDMGHCBHz66ado06YNfvvtNxQUFKBnz57QarV48MEHkZWVBQD4xz/+gVmzZuHBBx9UrmiKj48HUHEK/OOPP8a8efPg7e2NZcuW4ZFHHmm0H+bNm4cOHTqgY8eOCA4Oxq+//opJkyYZ9NOPP/6IoKAgDB48GJ6enrj77rsNhhKCgoIwYcIEHD58uMGhIQD48MMPcfjwYfj4+KBHjx4ICQkx+B4EBARg586d2L9/Pzp16gQ/Pz88/fTTtf4IGevZZ5/Fq6++iscffxy+vr4ICQnBCy+8YHAmwdnZGQ8//DA2bdqERx991OCP06RJk7B69Wq88soraNeuHdq1a4dZs2YpRVTnzp2xfv16vP/++/D29sbLL7+MJ554wuSczz//PJ588klMmTIFvr6+eOutt7Bu3Tr079+/Se+7Pg4ODvjll1/g5uaGgQMHwtPTE71798b69euV9/2///u/6NevH7p3747IyEhEREQgJCSk0WN/+eWXWLt2LTw9PTFo0CCMGTPGrNkB4JtvvkG/fv0wcuRI5Wq3L774QjkTcf/99yMyMhLBwcHw9vZWrs674447UFhYiDvvvFN5n3fddRdyc3MNipaOHTti27Zt+O6779CuXTvccccdGDNmDN5++22jMwYEBGDHjh1IS0vDxIkTlWKwujZt2uCNN97ArFmz4O3tjblz56Jbt2548803MX36dHh6emLGjBm1FouLi4tDVFQU3N3dMXToUMycORMzZsyodfwJEyZgw4YNmD17tjL8TOqThLnOOxJRs7z44os4cOAAfvnlF7WjEBHZJMfGdyEiS0tNTcWKFStqrXdCRERVODxEpLIHH3wQPXr0wLRp0ywyHEBE1FJweIiIiIjsAs+0EBERkV1g0UJERER2gUULERER2QUWLURERGQXWlTRsmnTJrUj2AxZlpGRkVHnLd3JfNjP1sF+th72tXWwn5umRa3TUt8N34iIiGxK2Rlo9LkAfNVOYryym/dtc+6qWoQWdaaFiIjILhRsgnPZtsb3syX5G4ECdUc0WLQQERFZmVS4Cc5lW9WOYZqCjRVfKmpRw0NEREQ2T58NFCfCCYDQ5wAaOxgiupm54v9zAAdvVWLwTAsREZE1FWyBBB0k6IDCLWqnMU7BFgC6ii8VM7NoISIisqbCqnkhUqGdXPVaPaeKQ0QsWoiIiKxF6G6etbipcGvFY7asZuaCn1XLzKKFiIjIWooSADlHaUpytbkitqpGZqiY2apFS25uLgYMGAAPDw8cO3bMYJter8ejjz6K2NhYPPfcc9aMRUREZB11Da2ofEVOo2wos1WLFjc3N/z000+YMmVKrW2bNm1CcHAw4uPjUVhYiF27dlkzGhERkeXZUAFgNBvKbNVLnp2cnNC2bds6tyUlJWHs2LEAgNGjRyMxMRGDBw+u91g6nQ56vd7gMVmWuSTyTbIsQwjB/rAw9rN1sJ+th33dRLmrIN14GpIoMv25ZWeAU1KDuwjJDaLdx4B2RhMD1sGGMms0xp1DsZl1WrKzs+Hl5QUA0Gq1yMrKanD/hIQE7Nixw+Axb2/vRp/XWsiyjPz8fAghjP4wkOnYz9bBfrYe9nVTTYCDNhKe+Y/BUX/arEfWOXRDvudy6PWRgFn/xtlOZn9/f6OOazNFi7e3N/Ly8gBUzH3x9W14sZ0hQ4bUOhMTFxfX6PNaC1mWIUkSfHx8+IvHgtjP1mG3/WwD92oxlVxyCo5yHrx8O9pPX9tMPw8G/PdB3HgSUt7XZjmi8JoBTbuPodW4YUXCeZTpzX0GrB0c8T1u176K7m7rzXLE6pnNzWaKlujoaGzfvh233347tm7dikceeaTB/R0dHeHoaBhfo9HYzw+ZFUiSxD6xAvazddhlPxf+BEgS4NpN7STGK9oM5/JiaDQD7KevbamfNR5A8FeA23Dg+pNAU4ZeAEByAwI/haSdgcpBmA9/TUZ+qWUuNX4Tj+HkX8eiTdYzZs1sblb/RN59993Ytm0bZs+ejVWrVmHOnDkAgHHjxiE1NRWxsbFwdXVtcD4LEZFdsIF7tZiK98Rpvut5JVh37k68fWo1zuV1NPn5eqceQNhe885fMUK55/SK13XuYfqTnaOsktnqZ1o2b95s0J45c2ZFEEdHrFq1ytpxiIgsw0bu1WIS3hOnSXKLyrHrXAaSUjKRmJyBlPTCm1tc8KXDu1g8eCmmRPxq1LHWnr0Td42Ig5eL1nKBG+Jys2C6NhfI+8q452hnAgGfABYYDqrJZoaHiIhaFOVeLai4V4vXVFXjGOXmPXEAQBRuAbwfVDmQEVTo5+IyPfZeyEJiSgaSkjNx7EouhKh73xK9K5Ydu8fooqVjl1fQxsWzzm1LH+oHnQWv6mrj5FDxPxo3wO8F44sW3xesUrAALFqIiCyj5r1a7KFoqXlPHHsoWqzQz+V6GUcu5yAxueJMysHUHKMnxLo4avDErQeMfq0B/r8BDrF1bhvSxbgrbMwib63x++avBVwWWS5LNSxaiIjMrb57tUg2/Cu3vnvi2FNmM/WzLAucvp6PxOSKIZ/d5zJRWKZv/IkAHDQSerXXIibcH9ERfrg11Aeul+YBZUa+eN73gL91CoAG5ZtQtFgxsw1/GomI7FR992pxG6pepsbUd08cO8rc3H6+mluMxT+dxJ8pmcgsNLbKACIDPBEd4YeYcH8M7OwLT1enqo2lx4GyE4ZPkNwgt/sYhYUF8Cicb7i4W9kJoPRExdwStdSTGYGfAkLUvirKiplZtBARmVt9y57bcgHAzPBydcLWY9egk+uZoHJTe582ypmU6HB/tPV0qX/nmsMszlFAyPeAUzeU6rPg7jcc0tX7DYsEKw631Km+zJVFSZsBQNq9qmS2k4vwiYjsiA3dq8VoLTxzXkk5fjlxHa9uOI7R7+9Efkl5rX3cXRzRN7T21Uf+Hs4Y3zsY/7qnJ+L/NhwJL96Bt6b0wsQ+IQ0XLIDhMIt2JhC2x/CMROXVOl7VLhXO+77hY1qaDWfmmRYiImPlrGr6gmFG3KtFOQVvzrUumBkA4AVgpAYY2RUo6uSC8+feRlSPZ2rtFx3uj5NX8zGwky+iI/wRE+GHyABPSFITlkurHGZprI80bkDwKsBtWMX7VnOIyMYz80wLEZGxvGc2ffGtxlhqcS5mNnA6OxQTNr2HuJQRdW7/n9s74+ArI7Fi5m14bEgndAv0alrBAlQMs5jSR9XftykTYc3JxjPzTAsRkSmasvhWI5LSx+Gb1BdRLhcB2Ifhke3wwIDQWvv9duo6vtt7SWn/z+3h6NfRp9Z+n+1IwcHUbIPHnDTLMK3jvxDt/5NZMte1oFjN1333vj7wcKn9Z+b57w6hsKzx5ejNnXlt8p1YuOtxlMquiKpnoq17HXmbTONeMbRiyhomlZ+v7E/Ml8MUNp6ZRQsRkalqnhpv4r1aispdsPDPufghZQSA3JtfQKCXa537p2YWYevx60p7Up+QOvc7lJpjsF+lTUefwJSILvjHwKVwcyptcuZk6S30Cnq20df9l04G6pjy8dvpG8gpqj2npC7myFw51JF+LQbvT/XA4M5+0Lo5Nf685vJ7oWnPq1zcTQ02npnDQ0RETdWMYYzKYYof6hmmsJS45DsxYdN7OJNd+0xOYyozp4n7LJCsfs3JXH2oY+6wCIy+JdA6BQtZBIsWIqLmqOtKikasPXsnJm5aguTcJvwRNoPk3FBM2LQEccnGF0z2mLnOK1/IrklC1HfHBPuzZs0aTJ1qB0tlW4Esy8jKyoKvr6/93F7eDrGfrcMu+rn0OHD+FqN2zQs6BNmpe73bnR01cHOuPXpfUq5HSXnVyqxuzo5wdqzdHwWlOuiMWGZeU3YCXtf6mpzZ2Nf1cnWCRlN7EmtuUTkEmvanx5TM6HTcZgsWu/hM2yDOaSEiMgcT7tXiVf5/gLa3yS/h6uQA18qb2jWgrsmvdSr8P6Nf25jMxr5us4ZnTMis+iJtZHYs74iIzMHUe7XYAmYmO8OihYioueq4V0tRuQv+Gv+/KPD5vOLqleoqF+JSU333xAn4EvkeH0LYUWYErQICV9pmP5NZsWghImquGkNDlVfZnCj6CzwCZtd9hZFai4dVquv+Mjevsil1nQoRutuuMtd7JZfamcmsWLQQETVXtT+M1a+yiQn3q3iQ95cxD3vMTGbFooWIqDmq3atl7ZXX8ULicyjRVywOF9PFv2q/ygXpKocx1By6qH5/maBVQNDKuldAZWayMSxaiIiao9q9Whx9ZuLO7u3g4eIIR42EAWG+tffn/WWaxh4zk9nxkmciouaodq+Wv/QF/tK3PXR6GcnpBfXfx4b3lzGdPWYms2PRQkTUHHXcb8XRQYNugV4NP4/3lzGNPWYms+PwEBEREdkFFi1ERERkF1i0EBERkV3gnBYiomZau+8Sdp3LxJAIf8RE+CPAy1XtSEQtEosWIqJm2nLsGn47dQPrDqQBALY/fzsi2nmqnIqo5eHwEBFRM5TrZew+l6m0/T2c0dnfQ8VERC0XixYiomY4n1EIvRBKe3C4PzQaScVERC0Xh4eIiJqha4AnDi8ahQMXc5CYnIG+od5qRyJqsVi0EBE1k4ujAwaH+2Fw5Q0SicgiODxEREREdoFFCxEREdkFFi1ERERkF1i0EBE10R+nbyCrsEztGEStBifiEhE1QWZBKWau3AsA6BHkhfv6t8fMmE4qpyJq2XimhYioCXZVW1DuxNU8XMktUTENUevAooWIqAkSkzMM2jER/iolIWo9WLQQETVBYnLVmRYnBwm3hfmomIaodWDRQkRkoktZRUjNKlLat4b6wM2ZUwSJLI1FCxGRiTg0RKQOFi1ERCZKqFW0cPl+Imtg0UJEZAJZFtiVUjWfxcPFEb3a8yaJRNbAooWIyASnruUjs9qCcgM7+cLJgb9KiayBP2lERCZISuF8FiK1sGghIjIBJ+ESqYdFCxGRkcp0Mnafz1La/h4u6BrgoWIiotaFRQsRkZEOX85BUZleacdE+EGSJBUTEbUuLFqIiIyUcJZDQ0RqYtFCRGQkTsIlUheLFiIiIxSW6nAwNUdph/m5IcS7jYqJiFofFi1EREbYcz4LOlkobbI+x9kAACAASURBVJ5lIbI+Fi1EREaovXQ/ixYia2PRQkRkhOrrs0gSMLgz7zdEZG0sWoiIGpFRUIpT1/KVdlSwF3zcnVVMRNQ6sWghImpEUrUbJAIcGiJSC4sWIqJGJNZcnyWcRQuRGli0EBE14vT1qqEhZwcNbgvzVTENUevlqHYAIiJbt35uNM7eKEBicgYyCkrRxtlB7UhErRKLFiKiRkiShK4Bnuga4Kl2FKJWjcNDREREZBesWrS8+OKLiI2NxcMPP4zy8nLl8eLiYowfPx5Dhw7FiBEjcP36dWvGIiIiIjtgtaLl8OHDSEtLQ3x8PLp164a4uDhl25YtW3DLLbdgx44dmDlzJlasWGGtWERERGQnrFa0JCUlYdSoUQCA0aNHIzExUdkWERGBwsJCAEB2djb8/Xk5IRGp7/iVXCTfKIAQovGdicjirDYRNzs7G0FBQQAArVaLrKwsZVuXLl1w4sQJREVFQQiBPXv2NHo8nU4HvV5v8Jgsy5Bl2bzB7ZQsyxBCsD8sjP1sHWr189s/n8KOMxkI9HJBdLg/XpvQA+4uLfv6BX6mrYP9bEijMe4citV++ry9vZGXlwcAyM3Nha9v1ToHX331FYYMGYJXX30VcXFxeP311/HWW281eLyEhATs2LGj1mtUL4ZaM1mWkZ+fDyGE0R8GMh372TrU6OdyvYzd5yp+n1zLK8XOMzdQUhCC0kLJKq+vFn6mrYP9bMjYERarFS3R0dFYsmQJpk+fjq1btyImJkbZJoRQAvv7+yM3N7fR4w0ZMgSDBw82eCwuLs6gGGrNZFmGJEnw8fHhD4QFsZ+tQ41+3nM+CyW6qn8FD+nSFn5+Lf8mifxMWwf7uWmsVrT06dMHAQEBiI2NRWhoKObNm4c5c+Zg2bJlmDZtGu6//37ExcVBr9cbNRHX0dERjo6G8TUaDb/51UiSxD6xAvazdVi7n0P93DF/TDckJmdg74UsxET4t5rvMT/T1sF+Np0kWtAMszVr1mDq1Klqx7AJsiwjKysLvr6+/IGwIPazdajdz6U6PYQAXJ1a/kq4avd1a8F+bpqWPaOMiMgMXBxbfrFCZA9Y3hEREZFdYNFCREREdoFFCxEREdkFzmkhIqomPb8UUz5LQnS4H2Ii/BEd7g9fd2e1YxEReKaFiMhAUkoGLmYWYc2eS3jqm4P4Iv6c2pGI6CYWLURE1SQmZxi0Y8J5LzQiW8GihYjoJiEEEpMzlbazowb9w3xUTERE1bFoISK66WJmEdJyipV2/44+rWJBOSJ7waKFiOimhJpDQxEcGiKyJSxaiIhuSkph0UJky1i0EBEBkGWBpJSq+Syero7oGaJVMRER1cSihYgIwImrecgpKlfagzv7wUEjqZiIiGpi0UJEhDoudebQEJHNYdFCRAROwiWyByxaiKjVK9XpsfdCltIO8HJBeFt3FRMRUV1YtBBRq3fgYg5KymWlHRPhD0nifBYiW8OihYhavVqXOnPpfiKbxKKFiFo9zmchsg8sWoioVcsrKceRy7lKO7ytOwK1riomIqL6sGgholZt97ks6GWhtHmWhch2sWgholaN67MQ2Q8WLUTUqlUvWjQSMKizn4ppiKghLFqIqNW6kVeCszcKlHbP9t7QtnFSMRERNYRFCxG1Wom1LnXmWRYiW8aihYharcTkTIP2EM5nIbJpLFqIqFUSQhjMZ3Fx1ODWjj4qJiKixrBoIaJWSRbA8yO7YlKfYLT1dEH/MB+4OjmoHYuIGuCodgAiIjU4aCTc278D7u3fAUII5BXr1I5ERI3gmRYiavUkSYLWjVcNEdk6Fi1ERERkF1i0EBERkV1g0UJERER2gRNxiajVefK/B1CulxET4Y+YCH+Et3WHJElqxyKiRrBoIaJWpaRcj+0nr6NUJ2PbiesI0roiaf4dasciIiNweIiIWpUDF7NRqpOVdnS4P8+yENkJFi1E1KqkF5TCu9rlzTERvN8Qkb3g8BARtSoT+4RgfK9gnLiah8TkDAzpwvsNEdkLFi1E1OpoNBJuCdHilhCt2lGIyAQcHiIiIiK7wKKFiIiI7AKLFiIiIrILLFqIqFXIKCjFt3tScSmrSO0oRNREnIhLRK3CzjPpmL/uKACgg28b/H1sD9wVFahyKiIyBc+0EFGrkJCcofz/paxieLk6NbA3EdkiFi1E1OIJIZCUnKm0XZ00uLWjt4qJiKgpTCpacnJyLJWDiMhiUtILcS2vRGnfFuYLF0cHFRMRUVOYVLR06dIFs2fPxuHDhy2Vh4jI7JJSMgzaMRFcBZfIHplUtCQnJyMqKgr33XcfYmNj8f3330Ov11sqGxGRWSScrVG0hLNoIbJHJhUtWq0Wzz33HE6fPo0FCxZg3rx5CA0NxT//+U8UFhZaKiMRUZPpZYE/z1XNZ/F2c0KPYC8VExFRU5k8ETcvLw/vv/8+nn32WURFReGjjz7CtWvXcNddd1kiHxFRsxxLy0VeiU5pD+7sBweNpGIiImoqk9ZpmTNnDn788UdMmTIFGzZsQGRkJADgnnvuQffu3S0SkIioOapf6gxwPguRPTOpaOnatStOnz4Nrbb2nVF/++03s4UiIjIXTsIlajlMGh66/fbbodFUPSUvLw/79u0DAAQFBZk3GRFRM5WU67H3QrbSDvFugzA/NxUTEVFzmFS0zJkzB25uVT/wbm5uePzxx80eiojIHPZfzEaZTlba0eF+kCTOZyGyVyYVLbIsw8GhakEmR0dH6HS6Bp5BRKQezmchallMKlqcnZ1x9uxZpX3mzBk4OfH+HURkm5JqFC3REX4qJSEiczBpIu6iRYswZMgQjBkzBgCwdetWrFy50iLBiIiaI7eoHEfScpV21wAPtPN0VTERETWXSUXL2LFjER8fj+3btwMAFi5ciPDwcIsEIyJqjl3nMiFEVZtDQ0T2z6SiBai47Llr166WyEJEZDaJNeezcOl+IrtnUtFy48YNLFq0CIcPH0ZJSdUdUw8cOGD2YEREzZFYbX0WB42EgZ19VUxDROZg0kTcxx57DGFhYcjIyMBrr72G4OBgjB071lLZiIia5GpuMc6lV90PrXd7LTxdedEAkb0zqWi5dOkSXnzxRbi4uGD8+PFYt26dMr/FGC+++CJiY2Px8MMPo7y83GDbt99+izvuuAPDhg3Drl27TIlFRGQgMTnToM35LEQtg8mXPAOAq6srMjMz4ejoiIyMjEaeVeHw4cNIS0tDfHw8unXrhri4OGXblStX8OOPP+LXX3/FH3/8gcGDB5sSi4jIQK35LCxaiFoEk+89lJmZiYceeggDBw6El5cX+vXrZ9Rzk5KSMGrUKADA6NGjsXLlSkydOhUA8PPPP8PFxQUjR45EUFAQli5dCg8PjwaPp9PpoNfrDR6TZRmyLNfzjNZFlmUIIdgfFsZ+tg5T+lkIYVC0uDpp0Lu9F79HRuJn2jrYz4aq3yKoISYVLf/5z38AAM8++yz69++P7OxsjB492qjnZmdnK/cn0mq1yMrKUrZdv34dGRkZ+OWXX7B06VJ8/PHHmD9/foPHS0hIwI4dOwwe8/b2NjhuaybLMvLz8yGEMPrDQKZjP1uHKf2cVVgOV8eqpfr7hniiMC8XhQ08h6rwM20d7GdD/v7GnQ01umjR6/Xo2bMnTpw4AQCIiYkxKZC3tzfy8vIAALm5ufD19TXYNnz4cEiShBEjRmDx4sWNHm/IkCG1hpHi4uIMjtuaybIMSZLg4+PDHwgLYj9bhyn97OsL/PFCAK7kFCMpJRO+7s78vWACfqatg/3cNEYXLQ4ODmjbti2KiooMbpporOjoaCxZsgTTp0/H1q1bDYqemJgYvPPOOwCAQ4cOoXPnzo0Hd3SEo6NhfI1Gw29+NZIksU+sgP1sHab2c3tfd9zn627hVC0TP9PWwX42nUnDQxEREYiJicG9995rMOfkmWeeafS5ffr0QUBAAGJjYxEaGop58+Zhzpw5WLZsGXr16oUOHTpg2LBhcHFxwX//+1/T3wkRERG1aCYVLbIso0+fPgY3TTTlNu+VZ1MqLVu2TPn/N954w5QoRERE1MqYVLTw5ohERESkFpOKlq+//rrOx6dPn26WMEREzbHuwGV0buuBniFaOGiMPwtMRPbBpKJl48aNyv+XlJQgISEBgwYNYtFCRKorLtNj/g9HUaaX4enqiEl9QvD6pFvUjkVEZmRS0bJ27VqD9vnz57FgwQKzBiIiaop9F7NQpq9YqCu/RIcyHRftImppmnWdVadOnXD8+HFzZSEiarKEGkv3R0f4qZSEiCzFpDMtGzZsUP5fr9dj9+7dcHFxMXsoIiJTPTSwIzr4uCEpJQO7UjIRHc77DRG1NCYVLe+9917VEx0dERERge+++87soYiITNXB1w0PDeqIhwZ1hCwLaDgRl6jFMalo+f333y2Vg4jIbFiwELVMJs1p+fzzzw1uSJiZmYkvvvjC7KGIiIiIajKpaPn0008Nbjzm5+eHTz/91OyhiIiIiGoyqWgRQtR6TK/Xmy0MEZGpyvUyZLn27yYianlMKlqCgoLw/fffK+3vvvsOQUFBZg9FRGSs9QfTcNs/t+Opbw5gzZ5UZBaUqh2JiCzE5KuHJk2ahL/97W8AADc3N/z4448WCUZEZIyk5AxkFpZh05Gr2HTkKsLbesDPg0sxELVEJhUt3bt3x4kTJ3D69GkAQGRkJBwcHCwSjIioMUIIJKZkKm03Zwf06eCtYiIisiSThoc2btyI/Px89OjRAz169EBeXh5++uknS2UjImrQ2RsFSM+vGg4a0MkXzo7NWuibiGyYST/dCxcuhLd31b9ivL29sXDhQrOHIiIyRsJZw6X7Y7gKLlGL1qx/kkiSxKuHiEg1SSk1ipYIFi1ELZlJRYunpyeSkpKUdmJiIjw9Pc0eioioMTq9jD/PVS126evujG6B/H1E1JKZNBH37bffxl/+8hd069YNQggkJydj/fr1lspGRFSvw5dzUVCqU9rR4X5cvp+ohTOpaBk8eDBOnjyJXbt2AQCio6Ph4eFhkWBERA1JSubQEFFrY/KcFh8fH9x9990ICwvDP/7xD4SEhFgiFxFRgxJqFC1DWLQQtXgmFS0FBQVYvnw5Bg0ahL59+8LFxQXbt2+3VDYiojoVlelwMDVHaXfwbYMOvm4qJiIiazCqaElMTMSjjz6KDh064Oeff8bChQsRFBSEN998Ez179rR0RiIiA3svZKNMLyttXupM1DoYVbTExsbi8uXLOHnyJOLi4jB27FhoNFzAiYjUwfksRK2TUZXHihUrUFxcjNtuuw0LFizA2bNnLZ2LiKheNeezRIf7qZSEiKzJqKLlkUceQXx8PLZv3w6dTofbb78d165dwxdffIHs7GxLZyQiUmQVluHE1Tyl3T3IizdIJGolTBrjiYyMxFtvvYXLly9jzZo12LhxIzp06GCpbEREtexKyYQQVe0YnmUhajVMWqelkoODAyZOnIiJEyfi6tWr5s5ERFSvxJpL93fhfBai1qJJRUt1QUFB5shBRNZSdqbiv85d1c1hirIz0OhzAfgisdp8FkeNhAFhvurlIiKr4iVARK1N/kagYJPaKUxTsAnOZdtwObsIFzOLlIf7hnrD3aXZ//YiIjvBooWotSnYWPFlR6TCTXAu24rElEyDx3mpM1HrYlLRkpCQUOuxtWvXmi0MEVmYPhsoTgSKEgB9TuP724KbmZ3Kd+Pg+QsGm1i0ELUuJhUtM2fOxNtvvw0AKC8vx9y5c5U2EdmBgi0AdBVfhVvUTmOcgi2QoIMEHRyLtyoPuzs7oE8HbxWDEZG1mVS07NmzBwkJCbj77rsxePBgaDQaJCYmWiobEZlbYbW5LPYyRFQt88B2Scr/D+jkCycHjnATtSYm/cT7+vpi+vTpSEpKwrVr1/DUU0/B2dnZUtmIyJyE7uaZlpsKfq54zJbVyDym02HMGBSCzv7uHBoiaoVMmnb/9NNPIzExEfv27cOpU6cwevRoLF68GA899JCl8hGRuRQlAHK1eSzyzfktbkPVy9SYGpmdpRy8NioXmDQMsiwaeCIRtUQmnWkpLS3Frl27EBERgXHjxmHnzp1YunSppbIRkTnVNRxk60NEDWTWaCQrhyEitZlUtHz++edwcam6x0doaCh27txp9lBEZAEtrGghotbH5FWZ9uzZg0OHDqGkpER57JlnnjFrKCIyQc4q4PqTgChqdNdays4Apxo5YyG5AYGfAtoZTYpXJ3vMTESqM6loeeONNxAXF4fU1FQMHToUv/zyC0aMGMGihUhN3jOBNgOAtHuBshPmPbZzFBDyPeDSw7zHtcfMRKQ6k4qWb775Bvv27cOgQYPwww8/4PTp03j55ZctlY2IjOXSAwjbC1ybC+R9ZZ5jamcCAZ8AGjcAwKlrefhu7yXzHPsmR+krjG23GH28fjTPAWtkJqKWxaSixdXVFa6urpBlGUIIREZGIiUlxVLZiMgUGjcgeBXgNqzpQy8ASvWucGn/Wa2hldTMIqxMvNDsmDV9gdmYEtEJ/xz8GVwcShp/Ql04HETUKpg0EbdNmzYoLy9Hnz59MG/ePLz33nvQ6/WWykZETeE9s+Ksi7PpwyOns0Ox+PhXVv/jH5d8Z8XrNiEznKMq3i8LFqIWz6SiZenSpSgrK8O7776LvLw8JCYmYvXq1ZbKRkRNVTlc5GX8H/K1Z+/ExE1LcLW4swWD1e9qcWeTM0M7Ewjbw/krRK2EJIRoMSs0rVmzBlOnTlU7hk2QZRlZWVnw9fWFRsOlzi3F5vu59Dhw/hajdk12/xOlDt3g6eKEUL/ac0LySspxKatpQ07GUF7XhMzodJwFi5nZ/Ge6hWA/N41Rc1qef/75BrcvWbLELGGIyMzyjL8Le0SbnwH/gfVu93J1QlSw1hypGmZCZuSvBVwWWS4LEdkUo8q7999/H4mJifD09IRWq631RUQ2Kt+EAiDve8vlMIU9ZiYiqzDqTMuvv/6KL7/8Et988w3uu+8+PProowgPD7d0NiJqjtLjtdZAKSp3wcI/5+IvfYMwxPNVwyuMyk4ApSfUHW6pIzMkN8jtPkZhYQE8CudDsrXMRGQ1Rp1pGT58OFavXo39+/cjNDQUDz74IIYPH47du3dbOh8RNVWNYZbT2aGYsOk9bLhwJ3p0f7buK4xMOcthCTWHhqpdGVTqOhUidLftZSYiqzFp9o+XlxcmTpyIiRMn4tSpUzh16pSlchFRc1X7Y155ZVBybiiGR7aDr7tz3VcYqT3cUr0AqevKIFvMTERWY1TRotfrsX79eowbNw4jR46Eg4MDDhw4gBkzuC4CkU2qHGaR3PBlyiK8kPgcSvSuAIDJ/dpX7Ve5IF3gyooF2iqHW1TOjKBVQNDKule2taXMRGRVRs1pCQkJQWhoKB555BHExMQAANLT05Geng4A6NWrl+USEpHp8tYCzlHI9vkP3vjqCoCKlQ183Z0xPLJd7f2r3wtIrStybmY2+r5BtpCZiKzKqKLF1dUV6enpePvttyFJEqov7SJJEs6dO2exgETUBBp3IGwP1iVdh05OUx6e0DsYzo71nGCtHHrJ/sRKIWu4mdmk+wapnZmIrMqoouXChQsWjkFEZuX3AgDgh/2XDR6eUn1oqC4aN+W5VtfU11UzMxFZFZfhI2qhTl7Nw4mreUo7MsATUcFeKiYiImoeFi1ELVTNsyyT+4VAkiSV0hARNR+LFqIWSKeX8X+HrihtjQRM6hOiYiIiouZj0ULUAu08m46MglKlfXvXtmjn5apiIiKi5mPRQtQC/bA/zaA9+dZGJuASEdkBFi1ELUxuUTl+OXFdaXu6OmJkjwAVExERmQeLFqIWZuORKyjTy0p7XK9guDo5qJiIiMg8rFq0vPjii4iNjcXDDz+M8vLyWtv/9a9/oX///taMRNTi/HCg5tosnIBLRC2D1YqWw4cPIy0tDfHx8ejWrRvi4uIMtufn5+Po0aPWikPUIqWkF+Bgao7S7uTvjltDfVRMRERkPkatiGsOSUlJGDVqFABg9OjRWLlyJaZOnaps/+CDD/DUU0/h6aefNup4Op0Oer3e4DFZliHLcj3PaF1kWYYQgv1hYbbWz3H7Lhm07+kbDCGEwa037JGt9XNLxr62DvazIY3GuHMoVitasrOzERQUBADQarXIyspStuXm5uLo0aP4+9//bvTxEhISsGPHDoPHvL29DY7bmsmyjPz8fAghjP4wkOlsqZ/1ssC6akNDEoDhndxbxM+ELfVzS8e+tg72syF/f3+j9rNa0eLt7Y28vIolxXNzc+Hr66tse//9940+w1JpyJAhGDx4sMFjcXFxBsdtzWRZhiRJ8PHx4Q+EBdlSPyckZ+B6fpnSHtTZDz3CglRMZD621M8tHfvaOtjPTWO1oiU6OhpLlizB9OnTsXXrVsTExCjbkpOTsWvXLgDA2bNn8c9//hMLFixo8HiOjo5wdDSMr9Fo+M2vRpIk9okV2Eo/62Wga4AHzlwvAFBxc0S1M5mTrfRza8C+tg72s+msVrT06dMHAQEBiI2NRWhoKObNm4c5c+Zg2bJlWL16tbJf//79Gy1YiKi24d3aYVhkWxxLy8P6g2kYfUug2pGIiMzKakULALzzzjsG7WXLltXaZ9++fdaKQ9TiSJKEnu216Nleq3YUIiKz4zkpIiIisgssWoiIiMgusGghIiIiu8CihciOCSEw9fM/8c7WUziXXqB2HCIii2LRQmTH9l3Mxq5zmfjk9xTc8e4O/P3/eCsMImq5WLQQ2bEf9hveHLFHEK8aIqKWi0ULkR3LKqxaAdfZUYOxvVrGCrhERHWx6jotRGRen0/vj8vZRVh/IA0FpTpo2zipHYmIyGJYtBDZufY+bnh6RBe1YxARWRyHh4iIiMgusGghIiIiu8CihYiIiOwCixYiO3MsLRcvrTuCfReyIIRQOw4RkdVwIi6Rnfl+3yWs2VPxFebnhg8e6IveHbzVjkVEZHE800JkR0p1emw4fEVpp+UUo4Ovm4qJiIish0ULkR35/dQN5BSVK+3hke3g6+6sYiIiIuth0UJkR+JqLNs/pV97lZIQEVkfixYiO5FRUIo/TqcrbV93ZwyLbKdiIiIi62LRQmQnfjx0BTq56mqhCb2D4ezIH2Eiaj34G4/ITtS8ozOHhoiotWHRQmQHTl7Nw4mreUo7MsATUcFeKiYiIrI+Fi1EdqDmWZbJ/UIgSZJKaYiI1MGihcjG6fQy/u9Q1dosGgmY1CdExUREROpg0UJk43aeTUdGQanSvr1rW7TzclUxERGROli0ENk4rs1CRFSBRQuRDcspKsP2EzeUtperI+7sHqBiIiIi9bBoIbJhG49cRZleVtrjegfD1clBxUREROph0UJkw2pdNXQrh4aIqPVi0UJko1LSC3DoUo7S7uTvjltDvVVMRESkLhYtRDaq9lkWrs1CRK0bixYiG6SXBdYfTFPakgT8hUNDRNTKsWghskG7UjJxNbdEaQ/u7IcQ7zYqJiIiUh+LFiIbFLf/kkGba7MQEbFoIbI5+SXl+Pn4NaXt7uyA0bcEqpiIiMg2sGghsjFHL+dCFlXtMT2D4ObsqF4gIiIbwd+ERDYmOsIfe1++E5uOXsEP+y9zbRYioptYtBDZIK2bEx4c2BEPDuyodhQiIpvB4SEiIiKyCyxaiIiIyC6waCEiIiK7wKKFyEas/vMifj52DWU6ufGdiYhaIU7EJbIBxWV6vLXlFApKdfBxc8J9t3XAS2O6qx2LiMim8EwLkQ3YduIaCkp1AIDsonJczSlp5BlERK0PixYiG/DzsWsG7clctp+IqBYODxHZgPcf6IOJp24gbn8aTl7Nw5AIf7UjERHZHBYtRDbAxdEBo28JwuhbglCq08NBI6kdiYjI5nB4iMjGuDg6qB2BiMgmsWghIiIiu8CihYiIiOwCixYilZTq9Ph2Typyi8vVjkJEZBdYtBCp5LeTNzB/3VHc9s/teOqbAzh6OVftSERENo1FC5FKfjhwGQBQppOx6chVXM0tVjkREZFtY9FCpIKMglL8cTpdafu6O2NYZDsVExER2T4WLUQq+PHQFehkobQn9A6GsyN/HImIGsLfkkQqiNt/2aA9hcv2ExE1ikULkZWduJKHk1fzlHa3QE9EBXupmIiIyD6waCGyssoJuJUm39oeksRl+4mIGsOihciKyvUyfjyUprQdNBIm9g1WMRERkf1g0UJkRTvPpCOjoExp397FH+08XVVMRERkP1i0EFlRraEhTsAlIjIaixYiK8kpKsP2EzeUtperI+7sHqBiIiIi+8KihchKNh65ijK9rLTH9Q6Gq5ODiomIiOyLVYuWF198EbGxsXj44YdRXl51k7iNGzdi4MCBGDJkCJ599llrRiKymh/2175qiIiIjGe1ouXw4cNIS0tDfHw8unXrhri4OGVb7969kZiYiISEBNy4cQP79u2zViwiq0hJL8ChSzlKu5O/O24N9VYxERGR/XG01gslJSVh1KhRAIDRo0dj5cqVmDp1KgAgNDRU2c/Z2RkaTeO1lE6ng16vN3hMlmXIslzPM1oXWZYhhGB/WJix/Ry375JB+56+wRBCQAhRzzOoOn6erYd9bR3sZ0PG/N0HrFi0ZGdnIygoCACg1WqRlZVVa5+9e/fixo0buPXWWxs9XkJCAnbs2GHwmLe3d53HbY1kWUZ+fj6EEEZ/GMh0xvSzXhZYV+2qIQnA8E7u/KyagJ9n62FfWwf72ZC/v79R+1mtaPH29kZeXsXS5bm5ufD19TXYfvnyZTz33HNYv369UccbMmQIBg8ebPBYXFxcreO2VrIsQ5Ik+Pj48AfCgozp5/izGbieX7U2y6DOfugRFmStiC0CP8/Ww762DvZz01itaImOjsaSJUswffp0bN26FTExMcq2/Px8PPDAA1i2bBnatWtn1PEcHR3h6GgYX6PR8JtfjSRJ7BMr53KRvQAAFA5JREFUaKyf1x9MM2hP6dee35Mm4OfZetjX1sF+Np3VeqpPnz4ICAhAbGwsjh8/jsmTJ2POnDkAgPfffx/nz5/HU089hWHDhtUa9iGyV/kl5fj5+DWl7ebsgNG3BKqYiIjIflntTAsAvPPOOwbtZcuWAQAWLlyIhQsXWjMKkVVsOXoNJeVVE+3u7hkEdxer/tgREbUYPCdFZEFxddzRmYiImoZFC5GFXMoqwp7zVVcIhXi3wcBOnChORNRULFqILKS9TxusfXwwHritAzxdHDH51hBoNJLasYiI7BYH14ksRJIk3Bbmi9vCfLFofJTBfYeIiMh0LFqIrKCNswPagDdHJCJqDg4PERERkV1g0UJERER2gUULkZmdupaHy9lFascgImpxOKeFyMz++dNJxJ/NwODOfpjcrz0m9gmGkwP/fUBE1FwsWojM6GpuMRKSMwAAu85l4lxGAf7SN0TlVERELQP/+UdkRusPpkGIqvakviFw4NosRERmwTMtZDvKzlT817mrujlMUXYGGn0ugIqVbu++JQiFpTqsP5CGK7klmMJl+4mIzIZFC9mO/I2AJAG+z6udxHgFm+BcVgzgNgBAmL87XrirG/46MhIHL+WgS4CnuvmIiFoQDg+R7SjYWPFlR6TCTXAu21rrcY1GQr+OPiokIiJquXimxRgtYNjC5umzgeLEm/+fAzh4q5vHGDczOwEQ+hxAYyd9TURkp3imxRj5G4GCTWqnME3BJjiXbVM7hfEKtgDQVXwVblE7jXEKtkCCDpI9ZSYismMsWozRgoYtbFZhtaLQXvq6Wuai7B8hql82REREZseipTGVwxZFCRXDFvagctiifLd9ZBa6m2dabir4ueIxW1Yjsy5vC+7+4Hd8vjMFN/JKVAxGRNRysWhpDIctLK8oAZCrFVdytfkttqpGZm+XAniJPXhj8yl8+keKisGIiFouFi2NsfNhC6nQDubi1NWvtt7XdeQb0WEPAGBKP67NQkRkCSxaGtIChi1QuNX2M7egoqVboCeigr1UCERE1PLxkueG1Dds4TZUvUyNqZFZUjNzzirg+pOAaMIdj8vOAKcaWf5ecgMCPwW0M5oUr07NyByuTcPPo4cCpxvYyRKZiYhaCRYtDanvDIAtFy22lNl7JtBmAJB2L1B2wqyHLtF0R5rbKpQVdUOwczm0bZxq7XM5uwj5JVVnmbq084BjHXdbPnM9H3q58sqfe+Ds3gPti2bART5l1sxwjgJCvgdcepj3uERErQSLlobUVwC0+7f1sxjL1jK79MAe8TP8y55HZ+c4sxxy7dk7sfDPx1GiTweQjo+n9cW4XsG19ntz8yn8dPSq0t739zvh7+FSa7+pn/+JzMIyg8dcHRZj8eClmBLxq1kyQzsTCPgE0LiZ53hERK1Q6yxaWtmwhaUy7z6XiS3HriGzsAxZhaWYNyoSfUNrL13/8Y6r2HlmJqZEtMc/Bi6Fm1Opqe8AAFBU7oKFf87FDykjmvR8U5ToXTEv4X/x57WezcrM4SAiIvNpnUWLBYctLDYEYMHMeXIkfs//COdSOyKr8BiyCsuQWViKrMIyZBWW4S99Q7BgbO33c/p6PlYlXVDa9/UvqrNo8XN3BgDEJd+JQ+ld8emwf6GrT6pJGU9nh+LJP+YjOTfUtDfXTM3JzOEgIiLzap1FC1DxhyRsL3BtLpD3lVkOebLoHsRfexW6VGcAyUY/T4KEJ4aF13pclgU+21m15keItxYTe5k3c9VQSxmAs3XucyO/7rMMvjeLkUpZNYZY6tovOTcUEzYtMWno5WDuJGzOWIAhUW0wpI7tYX7udT5vaGRbtPWsGg5ydXKoc7/7b+uAojJ9AwnCsDbjNozVLEYf7Y9GZeZwEBGR+bXeogWo+IMSvApwG9b0oRfUHLYw8V/jADQS6ixaBIC3f666FCX6/9u7+6Coyr4P4N9VeUcCdtFIUYY0wGIXcKVcRG51YgilJxsVU2zMsTQraobRwcYEQ6OZTMt5GnOmxBKiZBJfbvPWcZSBkjFNdsQgBwszMtRkA9+AWK7nD9vz8Lq7IHA45/5+ZpzZc/Y6Z7/XdWbwt9d1dvdhLf4ncswAZLbvxi3HxQgANN7t/qPVc/SBCA/0gdbLFf7//NO5TgBqI53KGhW5CVF9mK1YYAxyqt2axDDnTti8CahxsmjxX82ChYion/13Fy0297H0IteyxWBk9nEfAa23G0b7uHf7vH6sL/792jSpEOlpJiNqnF/XZaPrRc4HvlkIuGU6336gNBY633aoZCYiUhEWLTZ9WC76/6WV7v9TH3D/ZL5avQyjxVdOHfKf3xKRV7MGE8b6ICbMtcPsh9bLDVrve/v8vFzh0s3Hg9vzdhuBx8Y80LfsN3tRADTuAXRDoABQYmYiIhVh0dLeME9Au9rpomX8xPX4+JHQgYujAXa9MEXa9vV07aaRJ9xHZwB1zhUtidPfR+KTMt8Y2vxj19kh26dshOi67NVSCTRXyntDaw+Z20b9L27fvgXv2xnQDLXMREQqw6Kls14sAcTojgO6uAGLotFo8K/QUQ7bPfD3PudPOhSWLTqPcedP2XS37CV37p4yu4Sh2VoPL+0MaP5IGVqZiYhUhr891FlvlwCGAqVlbp/3gaVA8PcdZyRsS3U+7b7bRO7cSsxMRKQyLFra62nZInAX8GDuvcft2ZYA5NTTssXonbjpvQ1iqGW25bWNa2Bu95+ysX2yyzbucuZWYmYiIhVi0dJed0sAwafvfZup79J7j1073aPQm1mOgWAnc7P7cxDjTg2tzI2FHcfVkfbjLlduJWYmIlIhFi3tKXEJQGmZh3l1zeiIrQ+dZ40GixIzExGpEIsWGyUuASgxs7aPX7pm+2SXHJSYmYhIhVi02ChxCUCJmYmIiPqIRYuNEpcAlJiZiIioj/g9LTZ9ncaXe9miL7hsQURECsSZFiIiIlIEFi1ERESkCCxaiIiISBFYtBAREZEiqOpG3Pr6ehQUFMgdY0gQQqC+vh7+/v7QaDRyx1EtjvPg4DgPHo714OA4dzRy5EjMmTPHYTuNEEIMQh4aZM3NzXj33XeRkZEBNzc3ueOoFsd5cHCcBw/HenBwnPuGy0NERESkCCxaiIiISBFYtBAREZEisGhRqeHDhyM+Ph7Dhw+XO4qqcZwHB8d58HCsBwfHuW94Iy4REREpAmdaiIiISBFYtBAREZEisGghIiIiRWDRQkRERIrAooWIiIgUgUULERERKQKLFhX6/vvvMXXqVEyfPh3PPfcc/v77b7kjqVpBQQECAgLkjqFqxcXFmDVrFmbMmIGioiK546hWW1sbli5diri4OEybNg0//fST3JFUo6GhATExMfD29sb58+cBAIWFhTCZTJg1axZqa2tlTqgMLFpUKCgoCMePH0dJSQmCg4Oxf/9+uSOpltVqRWFhIYKCguSOolp3797F+++/j8OHD+PEiROYO3eu3JFUy2w2o7m5GaWlpcjJycGWLVvkjqQanp6eOHToEObNmwcAaG1txZYtW1BcXIy3334b2dnZMidUBhYtKhQYGAgPDw8AgKurK4YN42UeKAUFBZg/fz7HeACVlZXBw8MDycnJmDt3Lurq6uSOpFpjx46FEAJCCFgsFuh0OrkjqYaLi0uHGdnq6mqEh4fD1dUVsbGxOHfunIzplIN/aVXs119/xdGjR5GcnCx3FFWyWq3Ys2cPUlJS5I6ialevXsXFixdx8OBBvPjii8jKypI7kmrpdDq4uLggLCwMr732GlatWiV3JNWyWCzw8fGRtq1Wq4xplINFi0o1NjZiyZIl2LVrF1xcXOSOo0p5eXlYsGABZ1kGmK+vL2JjY+Hq6opZs2bhxx9/lDuSah09ehQjRozAhQsX8PXXXyM9PV3uSKrl6+uLxsZGaZu/QeQc/rVVodbWVixcuBCZmZkIDQ2VO45qVVZW4vPPP0diYiKqq6uRlpYmdyRVmjJlCqqqqiCEgNlsRkhIiNyRVEsIAa1WC+DerEtDQ4PMidRr4sSJqKqqQktLC06ePAm9Xi93JEXgDyaq0O7du/HGG28gIiICAPDyyy9zCWOAGY1GnDlzRu4YqvXRRx/hq6++gkajwc6dO/Hwww/LHUmVWltbsXjxYtTV1aG5uRlbtmyByWSSO5ZqJCUlwWw2Y/z48VixYgU8PDzw4Ycfwt3dHZ999hlv6HcCixYiIiJSBC4PERERkSKwaCEiIiJFYNFCREREisCihYiIiBSBRQsREREpAosWIiIiUgQWLURERKQILFqIiIhIEVi0EPWz4OBghIWFobW1VdpnNBpRXFzcr6+j0Wjw119/9es5Hdm/fz/Cw8MRGRmJioqK+z5f+z70tj9ZWVloamq67wz9oT+uxf32Jz8/HxEREYiOjobZbMb8+fNRVlbm8Li4uDjU1NT0+XWJBhOLFqIB0NzcjE8//VTuGA61L6yc8fHHH2P9+vUwm83Sz0TIZcOGDUOmaOkP99OftrY2pKen45tvvsHZs2fR0tKC+vp6TJ061eGx6enpyMzM7NPrEg02Fi1EAyArKwvZ2dm4c+dOl+c6vyvX6XS4dOmS9NymTZvw+OOPIzg4GPv27UNOTg6MRiMmTpzYZbZm8+bNiIqKwiOPPIL8/Hxp/+nTpzFz5kwYjUZERUWhsLCww+tnZmZiypQpWLt2bZd8R44cQXR0NPR6PeLj41FZWQkASEtLQ2lpKd58881uf4+mrKwM06ZNg8FggF6vx/79+wEAixcvhtFohF6vx+zZs1FXV2d37O7evYuUlBRMmjQJBoMBCQkJXdqsXLkSwL1ZgsjISFy7ds1u9s56Gp9Nmzbh1VdfldrdunUL/v7+uH79utN9sXd9ezpHT/2xdx1tLBYLwsLCcPPmTSQnJ2P79u3YsWMHFi1aJLVZvnw5MjIyAACXL19GeHg4SktLAQCzZ8/G4cOH+eOIpAyCiPrV+PHjRXl5uUhNTRUbN24UQggxefJkceLECSGEEACExWKR2mu1WlFTUyM998EHHwghhDh27Jjw8vISubm5Qggh9uzZI4xGo3QcALFu3TohhBA///yz8PPzEzU1NcJisYjIyEhx5coVIYQQ169fF0FBQaK2tlY6bsOGDd1mv3r1qvD39xfnzp0TQgiRl5cnwsPDRVtbmxBCiPj4eFFUVNTluBs3bohRo0aJkpISIYQQVqtV3LhxQwghxLVr16R2OTk5YsWKFR36YBsL2+O9e/eKhISEDufuTudxdJTdxt74XL58WQQEBIimpiYhhBA7d+4Uzz77rHRsT33prh827a+vs+dwlLOzgoICkZKSIm2HhISIiooKafvKlSsiICBAnD17Vjz22GPiyJEjHY6fMWOGOHjwYJfzEg01I+QqlojULjs7GzExMdK7aGfZfpHbaDTi9u3bWLhwIQAgJiYG1dXVHdouX74cABASEoLp06ejpKQEOp0Ov/zyC5566qkObS9cuIAxY8YAAJYtW9bta586dQoRERHS0s/ixYvxyiuv4Pfff8fYsWN7zFxWVobQ0FDExcUBAIYNGwZ/f38AwBdffIHdu3ejqakJTU1N0Ol0dvtvMBhQVVWFVatWIT4+HklJSXbb9zb7yZMnexyfmTNnIioqCgcOHMD8+fOxa9curF69WmrT2750x9lz2Mtpu442Z86cweTJk6Xt2tpajB49WtoODAzEypUrYTKZkJ+f32X26sEHH0RtbW2v+0I02Fi0EA2Q4OBgLFq0CBs3buywf/jw4bBardJ25/sY3N3dpXadtx3dg6LRaCCEwKOPPoqTJ0/22M7b29v5jtyHb7/9Ftu2bUNZWRlGjRqFAwcOYP369XaPCQkJQWVlJY4fP45jx45hzZo1MJvN8PPz65dMjsZn2bJlyM3NxeTJk3Hx4kUkJib2qi/2rm9vxsOZ62jzww8/4K233pK2PT09O7zun3/+iQMHDsDHxwfjxo3rcnxTUxM8PDwcvg6R3HhPC9EAWrduHfLy8nDlyhVp34QJE3Dq1CkAwN69e3H79u0+nz83NxcAcOnSJZSWliIuLg4mkwk1NTU4duyY1M5sNqOlpcXh+Z544glUVFTg/PnzAIAvv/wSY8aM6fLOvjOTyYTq6mrpPom2tjbU19fDYrFg5MiR0Gq1aGlpwY4dOxxmqK2thUajwdNPP43NmzdDCIHffvutS7uRI0d2uA/D2eyOxueZZ57B6dOnkZOTg9TUVIwYce+9nbN9sXd97Z2jc3+cvY5CCJSXlyM6Olrap9frceHCBQBAQ0MDkpKSsHbtWrz33ntIT0/vkrmqqgoGg6Hb/hANJSxaiAaQTqdDWloa/vjjD2nf1q1b8frrryM6Ohrl5eXQarV9Pr/VakVUVBQSEhKwbds2BAcHw8/PD4cOHcI777wDg8GASZMmISMjA21tbQ7PFxAQgPz8fDz//PPQ6/XYvn07CgsLodFo7B7n5+eHoqIiZGRkQK/XIzo6Gt999x0SExMRGhoqLR1FRkY6zFBRUYHY2FgYDAZERUVhyZIl0Ov1Xdqlp6fjySeflG5cdTa7o/Fxc3PDggUL8Mknn+CFF16QjnO2L/aur71zdO6Ps9exuroaWq0Wvr6+0r558+bhyJEjuHPnDubMmYOXXnoJKSkpSE1NRWNjI/bt2ye1vXTpEqxWK4sWUgSNEELIHYKIiPrPrVu3YDKZUFZWBi8vL7ttMzIyMGHCBOn+KKKhjDMtREQq4+3tja1btzr1pXEPPfRQjzdmEw01nGkhIiIiReBMCxERESkCixYiIiJSBBYtREREpAgsWoiIiEgRWLQQERGRIrBoISIiIkVg0UJERESKwKKFiIiIFIFFCxERESkCixYiIiJShP8Db7E4rpv/kPYAAAAASUVORK5CYII=\n",
            "text/plain": [
              "<Figure size 640x440 with 1 Axes>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "-_5l0xmEismv",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "with open('dumpEI.pkl', 'rb') as f:\n",
        "  w = pickle.load(f)"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "o_MYCsNSi9-R",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "w"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "x2EkCGO3i-zN",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        ""
      ],
      "execution_count": 0,
      "outputs": []
    }
  ]
}