{
  "nbformat": 4,
  "nbformat_minor": 0,
  "metadata": {
    "colab": {
      "provenance": [],
      "gpuType": "T4"
    },
    "kernelspec": {
      "name": "python3",
      "display_name": "Python 3"
    },
    "language_info": {
      "name": "python"
    },
    "accelerator": "GPU"
  },
  "cells": [
    {
      "cell_type": "markdown",
      "source": [
        "## Single Item Recommender System\n",
        "\n",
        "Here is a [better rendering](https://nbviewer.org/github/facebookresearch/Pearl/blob/main/tutorials/single_item_recommender_system_example/single_item_recommender_system.ipynb) of this notebook on [nbviewer](https://nbviewer.org/)."
      ],
      "metadata": {
        "id": "77bs19g1jVzC"
      }
    },
    {
      "cell_type": "code",
      "source": [
        "%load_ext autoreload\n",
        "%autoreload 2"
      ],
      "metadata": {
        "id": "nFomZD4OjZLK"
      },
      "execution_count": 1,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "source": [
        "## Installation\n",
        "If you haven't installed Pearl, please make sure you install Pearl with the following cell. Otherwise, you can skip the cell below."
      ],
      "metadata": {
        "id": "TD8BKr3SjcI1"
      }
    },
    {
      "cell_type": "code",
      "source": [
        "%pip uninstall Pearl -y\n",
        "%rm -rf Pearl\n",
        "!git clone https://github.com/facebookresearch/Pearl.git\n",
        "%cd Pearl\n",
        "%pip install .\n",
        "%cd .."
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "5i2jE98RjhK1",
        "outputId": "6516257f-cb11-4331-aabd-c176e761b99b"
      },
      "execution_count": 2,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "\u001b[33mWARNING: Skipping Pearl as it is not installed.\u001b[0m\u001b[33m\n",
            "\u001b[0mCloning into 'Pearl'...\n",
            "remote: Enumerating objects: 6412, done.\u001b[K\n",
            "remote: Counting objects: 100% (1561/1561), done.\u001b[K\n",
            "remote: Compressing objects: 100% (379/379), done.\u001b[K\n",
            "remote: Total 6412 (delta 1325), reused 1269 (delta 1181), pack-reused 4851 (from 1)\u001b[K\n",
            "Receiving objects: 100% (6412/6412), 55.14 MiB | 12.56 MiB/s, done.\n",
            "Resolving deltas: 100% (4379/4379), done.\n",
            "/content/Pearl\n",
            "Processing /content/Pearl\n",
            "  Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n",
            "  Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n",
            "  Preparing metadata (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n",
            "Requirement already satisfied: gym in /usr/local/lib/python3.10/dist-packages (from Pearl==0.1.0) (0.25.2)\n",
            "Collecting gymnasium[accept-rom-license,atari,mujoco] (from Pearl==0.1.0)\n",
            "  Downloading gymnasium-1.0.0-py3-none-any.whl.metadata (9.5 kB)\n",
            "Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from Pearl==0.1.0) (1.26.4)\n",
            "Requirement already satisfied: matplotlib in /usr/local/lib/python3.10/dist-packages (from Pearl==0.1.0) (3.7.1)\n",
            "Requirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (from Pearl==0.1.0) (2.1.4)\n",
            "Collecting parameterized (from Pearl==0.1.0)\n",
            "  Downloading parameterized-0.9.0-py2.py3-none-any.whl.metadata (18 kB)\n",
            "Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from Pearl==0.1.0) (2.32.3)\n",
            "Collecting mujoco (from Pearl==0.1.0)\n",
            "  Downloading mujoco-3.2.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (44 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m44.4/44.4 kB\u001b[0m \u001b[31m4.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hRequirement already satisfied: torch in /usr/local/lib/python3.10/dist-packages (from Pearl==0.1.0) (2.4.1+cu121)\n",
            "Requirement already satisfied: torchvision in /usr/local/lib/python3.10/dist-packages (from Pearl==0.1.0) (0.19.1+cu121)\n",
            "Requirement already satisfied: torchaudio in /usr/local/lib/python3.10/dist-packages (from Pearl==0.1.0) (2.4.1+cu121)\n",
            "Requirement already satisfied: cloudpickle>=1.2.0 in /usr/local/lib/python3.10/dist-packages (from gym->Pearl==0.1.0) (2.2.1)\n",
            "Requirement already satisfied: gym-notices>=0.0.4 in /usr/local/lib/python3.10/dist-packages (from gym->Pearl==0.1.0) (0.0.8)\n",
            "\u001b[33mWARNING: gymnasium 1.0.0 does not provide the extra 'accept-rom-license'\u001b[0m\u001b[33m\n",
            "\u001b[0mRequirement already satisfied: typing-extensions>=4.3.0 in /usr/local/lib/python3.10/dist-packages (from gymnasium[accept-rom-license,atari,mujoco]->Pearl==0.1.0) (4.12.2)\n",
            "Collecting farama-notifications>=0.0.1 (from gymnasium[accept-rom-license,atari,mujoco]->Pearl==0.1.0)\n",
            "  Downloading Farama_Notifications-0.0.4-py3-none-any.whl.metadata (558 bytes)\n",
            "Requirement already satisfied: imageio>=2.14.1 in /usr/local/lib/python3.10/dist-packages (from gymnasium[accept-rom-license,atari,mujoco]->Pearl==0.1.0) (2.35.1)\n",
            "Collecting ale-py>=0.9 (from gymnasium[accept-rom-license,atari,mujoco]->Pearl==0.1.0)\n",
            "  Downloading ale_py-0.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (7.6 kB)\n",
            "Requirement already satisfied: absl-py in /usr/local/lib/python3.10/dist-packages (from mujoco->Pearl==0.1.0) (1.4.0)\n",
            "Requirement already satisfied: etils[epath] in /usr/local/lib/python3.10/dist-packages (from mujoco->Pearl==0.1.0) (1.9.4)\n",
            "Collecting glfw (from mujoco->Pearl==0.1.0)\n",
            "  Downloading glfw-2.7.0-py2.py27.py3.py30.py31.py32.py33.py34.py35.py36.py37.py38-none-manylinux2014_x86_64.whl.metadata (5.4 kB)\n",
            "Requirement already satisfied: pyopengl in /usr/local/lib/python3.10/dist-packages (from mujoco->Pearl==0.1.0) (3.1.7)\n",
            "Requirement already satisfied: contourpy>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib->Pearl==0.1.0) (1.3.0)\n",
            "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.10/dist-packages (from matplotlib->Pearl==0.1.0) (0.12.1)\n",
            "Requirement already satisfied: fonttools>=4.22.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib->Pearl==0.1.0) (4.53.1)\n",
            "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib->Pearl==0.1.0) (1.4.7)\n",
            "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib->Pearl==0.1.0) (24.1)\n",
            "Requirement already satisfied: pillow>=6.2.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib->Pearl==0.1.0) (10.4.0)\n",
            "Requirement already satisfied: pyparsing>=2.3.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib->Pearl==0.1.0) (3.1.4)\n",
            "Requirement already satisfied: python-dateutil>=2.7 in /usr/local/lib/python3.10/dist-packages (from matplotlib->Pearl==0.1.0) (2.8.2)\n",
            "Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas->Pearl==0.1.0) (2024.2)\n",
            "Requirement already satisfied: tzdata>=2022.1 in /usr/local/lib/python3.10/dist-packages (from pandas->Pearl==0.1.0) (2024.1)\n",
            "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->Pearl==0.1.0) (3.3.2)\n",
            "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->Pearl==0.1.0) (3.10)\n",
            "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->Pearl==0.1.0) (2.2.3)\n",
            "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->Pearl==0.1.0) (2024.8.30)\n",
            "Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from torch->Pearl==0.1.0) (3.16.1)\n",
            "Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from torch->Pearl==0.1.0) (1.13.3)\n",
            "Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch->Pearl==0.1.0) (3.3)\n",
            "Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch->Pearl==0.1.0) (3.1.4)\n",
            "Requirement already satisfied: fsspec in /usr/local/lib/python3.10/dist-packages (from torch->Pearl==0.1.0) (2024.6.1)\n",
            "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.7->matplotlib->Pearl==0.1.0) (1.16.0)\n",
            "Requirement already satisfied: importlib_resources in /usr/local/lib/python3.10/dist-packages (from etils[epath]->mujoco->Pearl==0.1.0) (6.4.5)\n",
            "Requirement already satisfied: zipp in /usr/local/lib/python3.10/dist-packages (from etils[epath]->mujoco->Pearl==0.1.0) (3.20.2)\n",
            "Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch->Pearl==0.1.0) (2.1.5)\n",
            "Requirement already satisfied: mpmath<1.4,>=1.1.0 in /usr/local/lib/python3.10/dist-packages (from sympy->torch->Pearl==0.1.0) (1.3.0)\n",
            "Downloading mujoco-3.2.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (6.3 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m6.3/6.3 MB\u001b[0m \u001b[31m91.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading parameterized-0.9.0-py2.py3-none-any.whl (20 kB)\n",
            "Downloading ale_py-0.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.1 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.1/2.1 MB\u001b[0m \u001b[31m85.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading Farama_Notifications-0.0.4-py3-none-any.whl (2.5 kB)\n",
            "Downloading glfw-2.7.0-py2.py27.py3.py30.py31.py32.py33.py34.py35.py36.py37.py38-none-manylinux2014_x86_64.whl (211 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m211.8/211.8 kB\u001b[0m \u001b[31m19.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading gymnasium-1.0.0-py3-none-any.whl (958 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m958.1/958.1 kB\u001b[0m \u001b[31m60.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hBuilding wheels for collected packages: Pearl\n",
            "  Building wheel for Pearl (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n",
            "  Created wheel for Pearl: filename=Pearl-0.1.0-py3-none-any.whl size=217554 sha256=5ef3dd5dee9fd717490bf02d1e3aa130cae342a5e7644c7fdef04e04eb39f4ba\n",
            "  Stored in directory: /tmp/pip-ephem-wheel-cache-tia9ebiq/wheels/83/80/1d/d9211ba70ee392341daf21a07252739e0cb2af9f95439a28cd\n",
            "Successfully built Pearl\n",
            "Installing collected packages: glfw, farama-notifications, parameterized, gymnasium, ale-py, mujoco, Pearl\n",
            "Successfully installed Pearl-0.1.0 ale-py-0.10.1 farama-notifications-0.0.4 glfw-2.7.0 gymnasium-1.0.0 mujoco-3.2.4 parameterized-0.9.0\n",
            "/content\n"
          ]
        }
      ]
    },
    {
      "cell_type": "markdown",
      "source": [
        "## Import Modules"
      ],
      "metadata": {
        "id": "BdZf6tkjjlQF"
      }
    },
    {
      "cell_type": "code",
      "source": [
        "import random\n",
        "from typing import List, Optional, Tuple\n",
        "\n",
        "import numpy as np\n",
        "\n",
        "import torch\n",
        "import torch.nn as nn\n",
        "from pearl.action_representation_modules.identity_action_representation_module import (\n",
        "    IdentityActionRepresentationModule,\n",
        ")\n",
        "from pearl.api.action import Action\n",
        "from pearl.api.action_result import ActionResult\n",
        "from pearl.api.action_space import ActionSpace\n",
        "from pearl.api.environment import Environment\n",
        "from pearl.api.observation import Observation\n",
        "from pearl.api.space import Space\n",
        "from pearl.history_summarization_modules.lstm_history_summarization_module import (\n",
        "    LSTMHistorySummarizationModule,\n",
        ")\n",
        "from pearl.neural_networks.sequential_decision_making.q_value_networks import (\n",
        "    EnsembleQValueNetwork,\n",
        ")\n",
        "from pearl.pearl_agent import PearlAgent\n",
        "from pearl.policy_learners.sequential_decision_making.bootstrapped_dqn import (\n",
        "    BootstrappedDQN,\n",
        ")\n",
        "from pearl.policy_learners.sequential_decision_making.deep_q_learning import (\n",
        "    DeepQLearning,\n",
        ")\n",
        "from pearl.replay_buffers.sequential_decision_making.bootstrap_replay_buffer import (\n",
        "    BootstrapReplayBuffer,\n",
        ")\n",
        "from pearl.replay_buffers import BasicReplayBuffer\n",
        "from pearl.utils.functional_utils.experimentation.set_seed import set_seed\n",
        "from pearl.utils.functional_utils.train_and_eval.online_learning import online_learning\n",
        "from pearl.utils.instantiations.spaces.box import BoxSpace\n",
        "from pearl.utils.instantiations.spaces.discrete_action import DiscreteActionSpace\n",
        "import matplotlib.pyplot as plt\n",
        "\n",
        "set_seed(0)"
      ],
      "metadata": {
        "id": "Lp6pRjTDjpDo"
      },
      "execution_count": 3,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "source": [
        "## Load Environment\n",
        "This environment's underlying model was pre-trained using the MIND dataset (Wu et al. 2020).\n",
        "The model is defined by class `SequenceClassificationModel` below.\n",
        "The model's state dict is saved in\n",
        "tutorials/single_item_recommender_system_example/env_model_state_dict.pt\n",
        "\n",
        "Each data point is:\n",
        "- A history of impressions clicked by a user\n",
        "- Each impression is represented by an 100-dim vector\n",
        "- A list of impressions and whether or not they are clicked\n",
        "\n",
        "The environment is constructed with the setup below. Note that this is a contrived example\n",
        "to illustrate Pearl's usage, agent modularity and a subset of features,\n",
        "not to represent a real-world environment or problem.\n",
        "- State: a history of impressions by a user (note that we used the history of impressions\n",
        "  instead of clicked impressions to speed up learning in this example).\n",
        "  Interested Pearl users can change it to history of clicked impressions with much longer\n",
        "  episode length and samples to run the following experiments.)\n",
        "- Dynamic action space: two randomly picked impressions\n",
        "- Action: one of the two impressions\n",
        "- Reward: click\n",
        "- Reset every 20 steps."
      ],
      "metadata": {
        "id": "0SdYqSnhp4h4"
      }
    },
    {
      "cell_type": "code",
      "source": [
        "class SequenceClassificationModel(nn.Module):\n",
        "    def __init__(\n",
        "        self,\n",
        "        observation_dim: int,\n",
        "        hidden_dim: int = 128,\n",
        "        state_dim: int = 128,\n",
        "        num_layers: int = 2,\n",
        "    ) -> None:\n",
        "        super(SequenceClassificationModel, self).__init__()\n",
        "        self.lstm = nn.LSTM(\n",
        "            num_layers=num_layers,\n",
        "            input_size=observation_dim,\n",
        "            hidden_size=hidden_dim,\n",
        "            batch_first=True,\n",
        "        )\n",
        "        self.mlp = nn.Sequential(\n",
        "            nn.Linear(hidden_dim + observation_dim, hidden_dim),\n",
        "            nn.ReLU(),\n",
        "            nn.Linear(hidden_dim, hidden_dim // 2),\n",
        "            nn.ReLU(),\n",
        "            nn.Linear(hidden_dim // 2, 1),\n",
        "        )\n",
        "        self.register_buffer(\n",
        "            \"default_cell_representation\", torch.zeros((num_layers, hidden_dim))\n",
        "        )\n",
        "        self.register_buffer(\n",
        "            \"default_hidden_representation\", torch.zeros((num_layers, hidden_dim))\n",
        "        )\n",
        "\n",
        "    def forward(self, x: torch.Tensor, action: torch.Tensor) -> torch.Tensor:\n",
        "        batch_size = x.shape[0]\n",
        "        h0 = (\n",
        "            self.default_hidden_representation.unsqueeze(1)\n",
        "            .repeat(1, batch_size, 1)\n",
        "            .detach()\n",
        "        )\n",
        "        c0 = (\n",
        "            self.default_cell_representation.unsqueeze(1)\n",
        "            .repeat(1, batch_size, 1)\n",
        "            .detach()\n",
        "        )\n",
        "        out, (_, _) = self.lstm(x, (h0, c0))\n",
        "        mlp_input = out[:, -1, :].view((batch_size, -1))\n",
        "        return torch.sigmoid(self.mlp(torch.cat([mlp_input, action], dim=-1)))\n",
        "\n",
        "\n",
        "class RecEnv(Environment):\n",
        "    def __init__(\n",
        "        self, actions: List[torch.Tensor], model: nn.Module, history_length: int\n",
        "    ) -> None:\n",
        "        self.model: nn.Module = model.to(device)\n",
        "        self.history_length: int = history_length\n",
        "        self.t = 0\n",
        "        self.T = 20\n",
        "        self.actions: List[List[torch.Tensor]] = [\n",
        "            [torch.tensor(k) for k in random.sample(actions, 2)] for _ in range(self.T)\n",
        "        ]\n",
        "        self.state: torch.Tensor = torch.zeros((self.history_length, 100)).to(device)\n",
        "        self._action_space: DiscreteActionSpace = DiscreteActionSpace(self.actions[0])\n",
        "\n",
        "    @property\n",
        "    def action_space(self) -> ActionSpace:\n",
        "        return DiscreteActionSpace(self.actions[0])\n",
        "\n",
        "    @property\n",
        "    def observation_space(self) -> Space:\n",
        "        return BoxSpace(low=torch.zeros((1,)), high=torch.ones((1,)))\n",
        "\n",
        "    def reset(self, seed: Optional[int] = None) -> Tuple[Observation, ActionSpace]:\n",
        "        self.state: torch.Tensor = torch.zeros((self.history_length, 100))\n",
        "        self.t = 0\n",
        "        self._action_space: DiscreteActionSpace = DiscreteActionSpace(\n",
        "            self.actions[self.t]\n",
        "        )\n",
        "        return [0.0], self._action_space\n",
        "\n",
        "    def step(self, action: Action) -> ActionResult:\n",
        "        action = action.to(device)\n",
        "        action_batch = action.unsqueeze(0)\n",
        "        state_batch = self.state.unsqueeze(0).to(device)\n",
        "        reward = self.model(state_batch, action_batch) * 3  # To speed up learning\n",
        "        true_reward = np.random.binomial(1, reward.item())\n",
        "        self.state = torch.cat([self.state[1:, :].to(device), action_batch], dim=0)\n",
        "\n",
        "        self.t += 1\n",
        "        if self.t < self.T:\n",
        "            self._action_space = DiscreteActionSpace(self.actions[self.t])\n",
        "        return ActionResult(\n",
        "            observation=[float(true_reward)],\n",
        "            reward=float(true_reward),\n",
        "            terminated=self.t >= self.T,\n",
        "            truncated=False,\n",
        "            info={},\n",
        "            available_action_space=self._action_space,\n",
        "        )\n",
        "\n",
        "    def __str__(self) -> str:\n",
        "        return self.__class__.__name__\n",
        "\n",
        "\n",
        "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
        "device_id = 0 if torch.cuda.is_available() else -1\n",
        "\n",
        "model = SequenceClassificationModel(100).to(device)\n",
        "model.load_state_dict(torch.load(\"Pearl/tutorials/single_item_recommender_system_example/env_model_state_dict.pt\"))\n",
        "actions = torch.load(\"Pearl/tutorials/single_item_recommender_system_example/news_embedding_small.pt\")\n",
        "history_length = 8\n",
        "env = RecEnv(list(actions.values())[:100], model, history_length)\n",
        "observation, action_space = env.reset()\n",
        "assert isinstance(action_space, DiscreteActionSpace)\n",
        "\n",
        "# experiment code\n",
        "number_of_steps = 50000\n",
        "record_period = 400"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "BYdPfGgZp8HN",
        "outputId": "a0b7719f-5ecd-43e1-e0fd-7e9275b4a0d5"
      },
      "execution_count": 4,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "<ipython-input-4-4a6e0c4f9da6>:105: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature.\n",
            "  model.load_state_dict(torch.load(\"Pearl/tutorials/single_item_recommender_system_example/env_model_state_dict.pt\"))\n",
            "<ipython-input-4-4a6e0c4f9da6>:106: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature.\n",
            "  actions = torch.load(\"Pearl/tutorials/single_item_recommender_system_example/news_embedding_small.pt\")\n"
          ]
        }
      ]
    },
    {
      "cell_type": "markdown",
      "source": [
        "## Vanilla DQN Agent\n",
        "Able to handle dynamic action space but not able to handle partial observability and sparse reward."
      ],
      "metadata": {
        "id": "RIh8ZV-BqAAW"
      }
    },
    {
      "cell_type": "code",
      "source": [
        "# create a Pearl agent\n",
        "\n",
        "action_representation_module = IdentityActionRepresentationModule(\n",
        "    max_number_actions=action_space.n,\n",
        "    representation_dim=action_space.action_dim,\n",
        ")\n",
        "\n",
        "# DQN-vanilla\n",
        "agent = PearlAgent(\n",
        "    policy_learner=DeepQLearning(\n",
        "        state_dim=1,\n",
        "        action_space=action_space,\n",
        "        hidden_dims=[64, 64],\n",
        "        training_rounds=50,\n",
        "        action_representation_module=action_representation_module,\n",
        "    ),\n",
        "    replay_buffer=BasicReplayBuffer(100_000),\n",
        "    device_id=device_id,\n",
        ")\n",
        "\n",
        "info = online_learning(\n",
        "    agent=agent,\n",
        "    env=env,\n",
        "    number_of_steps=number_of_steps,\n",
        "    print_every_x_steps=100,\n",
        "    record_period=min(record_period, number_of_steps),\n",
        "    learn_after_episode=True,\n",
        ")\n",
        "\n",
        "torch.save(info[\"return\"], \"DQN-return.pt\")\n",
        "plt.plot(record_period * np.arange(len(info[\"return\"])), info[\"return\"], label=\"DQN\")\n",
        "plt.legend()\n",
        "plt.show()"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 1000
        },
        "id": "GDnAlQQNqC7z",
        "outputId": "3fae3bea-06af-4ef6-dc19-76e0e0a666b1"
      },
      "execution_count": 5,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "episode 5, step 100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 10, step 200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 15, step 300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 20, step 400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 25, step 500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 30, step 600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 35, step 700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 40, step 800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 45, step 900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 50, step 1000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 55, step 1100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 60, step 1200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 65, step 1300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 70, step 1400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 75, step 1500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 80, step 1600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 85, step 1700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 90, step 1800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 95, step 1900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 100, step 2000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 105, step 2100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 110, step 2200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 115, step 2300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 120, step 2400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 125, step 2500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 130, step 2600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 135, step 2700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 140, step 2800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 145, step 2900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 150, step 3000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 155, step 3100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 160, step 3200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 165, step 3300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 170, step 3400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 175, step 3500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 6.0\n",
            "episode 180, step 3600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 185, step 3700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 190, step 3800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 195, step 3900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 200, step 4000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 205, step 4100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 210, step 4200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 215, step 4300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 220, step 4400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 225, step 4500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 230, step 4600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 235, step 4700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 240, step 4800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 245, step 4900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 250, step 5000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 255, step 5100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 260, step 5200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 265, step 5300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 270, step 5400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 275, step 5500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 280, step 5600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 285, step 5700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 290, step 5800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 295, step 5900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 300, step 6000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 305, step 6100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 310, step 6200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 315, step 6300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 320, step 6400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 325, step 6500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 330, step 6600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 335, step 6700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 340, step 6800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 345, step 6900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 350, step 7000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 355, step 7100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 360, step 7200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 365, step 7300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 370, step 7400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 375, step 7500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 380, step 7600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 385, step 7700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 390, step 7800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 395, step 7900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 400, step 8000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 405, step 8100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 410, step 8200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 415, step 8300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 420, step 8400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 425, step 8500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 430, step 8600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 435, step 8700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 440, step 8800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 445, step 8900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 450, step 9000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 455, step 9100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 460, step 9200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 465, step 9300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 470, step 9400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 475, step 9500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 480, step 9600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 485, step 9700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 490, step 9800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 495, step 9900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 500, step 10000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 505, step 10100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 510, step 10200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 515, step 10300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 520, step 10400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 525, step 10500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 530, step 10600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 535, step 10700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 540, step 10800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 545, step 10900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 550, step 11000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 555, step 11100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 560, step 11200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 565, step 11300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 570, step 11400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 575, step 11500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 580, step 11600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 585, step 11700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 590, step 11800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 595, step 11900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 600, step 12000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 605, step 12100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 610, step 12200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 615, step 12300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 620, step 12400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 625, step 12500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 630, step 12600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 6.0\n",
            "episode 635, step 12700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 640, step 12800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 645, step 12900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 650, step 13000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 655, step 13100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 660, step 13200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 665, step 13300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 670, step 13400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 675, step 13500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 680, step 13600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 685, step 13700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 690, step 13800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 695, step 13900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 700, step 14000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 705, step 14100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 710, step 14200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 715, step 14300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 720, step 14400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 725, step 14500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 730, step 14600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 735, step 14700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 740, step 14800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 745, step 14900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 750, step 15000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 755, step 15100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 760, step 15200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 765, step 15300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 770, step 15400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 775, step 15500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 780, step 15600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 785, step 15700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 790, step 15800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 795, step 15900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 800, step 16000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 805, step 16100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 810, step 16200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 815, step 16300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 820, step 16400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 825, step 16500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 830, step 16600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 835, step 16700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 840, step 16800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 845, step 16900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 850, step 17000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 855, step 17100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 860, step 17200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 865, step 17300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 870, step 17400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 875, step 17500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 880, step 17600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 885, step 17700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 890, step 17800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 895, step 17900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 900, step 18000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 905, step 18100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 910, step 18200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 915, step 18300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 920, step 18400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 925, step 18500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 930, step 18600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 935, step 18700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 940, step 18800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 945, step 18900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 950, step 19000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 955, step 19100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 960, step 19200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 965, step 19300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 970, step 19400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 975, step 19500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 980, step 19600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 985, step 19700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 990, step 19800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 995, step 19900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1000, step 20000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1005, step 20100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1010, step 20200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1015, step 20300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1020, step 20400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1025, step 20500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1030, step 20600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1035, step 20700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1040, step 20800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1045, step 20900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1050, step 21000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1055, step 21100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 7.0\n",
            "episode 1060, step 21200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1065, step 21300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1070, step 21400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1075, step 21500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1080, step 21600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1085, step 21700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1090, step 21800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1095, step 21900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1100, step 22000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1105, step 22100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1110, step 22200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1115, step 22300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1120, step 22400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1125, step 22500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1130, step 22600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1135, step 22700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1140, step 22800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1145, step 22900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1150, step 23000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1155, step 23100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1160, step 23200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1165, step 23300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1170, step 23400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1175, step 23500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1180, step 23600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1185, step 23700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1190, step 23800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1195, step 23900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1200, step 24000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1205, step 24100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1210, step 24200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1215, step 24300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1220, step 24400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1225, step 24500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1230, step 24600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1235, step 24700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1240, step 24800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1245, step 24900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1250, step 25000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1255, step 25100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1260, step 25200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1265, step 25300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1270, step 25400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1275, step 25500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1280, step 25600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1285, step 25700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1290, step 25800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1295, step 25900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1300, step 26000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1305, step 26100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1310, step 26200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1315, step 26300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1320, step 26400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1325, step 26500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1330, step 26600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 6.0\n",
            "episode 1335, step 26700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1340, step 26800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1345, step 26900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1350, step 27000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1355, step 27100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1360, step 27200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1365, step 27300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1370, step 27400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1375, step 27500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1380, step 27600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1385, step 27700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1390, step 27800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1395, step 27900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1400, step 28000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1405, step 28100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1410, step 28200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1415, step 28300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1420, step 28400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1425, step 28500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1430, step 28600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1435, step 28700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 6.0\n",
            "episode 1440, step 28800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1445, step 28900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1450, step 29000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1455, step 29100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1460, step 29200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1465, step 29300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1470, step 29400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1475, step 29500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1480, step 29600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1485, step 29700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1490, step 29800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1495, step 29900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1500, step 30000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1505, step 30100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1510, step 30200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1515, step 30300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1520, step 30400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1525, step 30500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1530, step 30600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1535, step 30700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1540, step 30800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1545, step 30900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1550, step 31000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1555, step 31100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1560, step 31200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1565, step 31300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1570, step 31400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1575, step 31500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1580, step 31600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1585, step 31700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1590, step 31800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1595, step 31900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1600, step 32000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1605, step 32100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1610, step 32200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1615, step 32300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1620, step 32400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1625, step 32500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1630, step 32600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1635, step 32700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1640, step 32800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1645, step 32900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1650, step 33000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1655, step 33100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1660, step 33200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1665, step 33300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1670, step 33400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1675, step 33500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1680, step 33600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1685, step 33700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1690, step 33800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1695, step 33900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1700, step 34000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1705, step 34100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1710, step 34200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1715, step 34300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1720, step 34400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1725, step 34500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1730, step 34600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1735, step 34700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1740, step 34800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1745, step 34900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1750, step 35000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1755, step 35100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1760, step 35200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1765, step 35300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1770, step 35400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1775, step 35500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1780, step 35600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1785, step 35700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1790, step 35800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1795, step 35900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1800, step 36000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1805, step 36100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1810, step 36200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1815, step 36300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1820, step 36400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1825, step 36500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1830, step 36600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1835, step 36700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1840, step 36800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1845, step 36900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1850, step 37000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1855, step 37100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1860, step 37200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1865, step 37300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1870, step 37400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1875, step 37500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1880, step 37600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1885, step 37700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1890, step 37800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1895, step 37900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1900, step 38000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1905, step 38100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1910, step 38200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1915, step 38300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1920, step 38400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1925, step 38500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1930, step 38600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1935, step 38700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1940, step 38800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1945, step 38900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1950, step 39000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1955, step 39100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1960, step 39200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1965, step 39300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1970, step 39400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1975, step 39500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1980, step 39600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1985, step 39700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1990, step 39800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1995, step 39900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2000, step 40000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2005, step 40100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2010, step 40200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2015, step 40300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2020, step 40400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2025, step 40500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2030, step 40600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2035, step 40700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2040, step 40800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2045, step 40900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2050, step 41000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2055, step 41100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2060, step 41200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2065, step 41300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2070, step 41400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2075, step 41500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2080, step 41600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2085, step 41700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2090, step 41800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2095, step 41900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2100, step 42000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2105, step 42100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2110, step 42200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2115, step 42300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2120, step 42400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2125, step 42500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2130, step 42600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2135, step 42700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2140, step 42800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2145, step 42900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2150, step 43000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2155, step 43100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2160, step 43200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2165, step 43300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2170, step 43400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2175, step 43500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2180, step 43600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2185, step 43700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2190, step 43800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2195, step 43900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2200, step 44000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2205, step 44100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2210, step 44200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2215, step 44300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2220, step 44400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2225, step 44500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2230, step 44600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2235, step 44700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2240, step 44800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2245, step 44900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2250, step 45000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2255, step 45100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2260, step 45200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2265, step 45300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2270, step 45400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2275, step 45500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2280, step 45600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2285, step 45700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2290, step 45800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2295, step 45900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2300, step 46000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2305, step 46100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2310, step 46200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2315, step 46300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2320, step 46400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2325, step 46500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2330, step 46600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2335, step 46700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2340, step 46800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2345, step 46900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2350, step 47000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2355, step 47100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2360, step 47200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2365, step 47300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2370, step 47400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2375, step 47500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2380, step 47600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2385, step 47700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2390, step 47800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2395, step 47900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2400, step 48000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2405, step 48100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2410, step 48200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2415, step 48300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2420, step 48400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2425, step 48500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2430, step 48600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2435, step 48700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2440, step 48800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2445, step 48900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2450, step 49000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2455, step 49100, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2460, step 49200, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 2465, step 49300, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2470, step 49400, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2475, step 49500, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2480, step 49600, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2485, step 49700, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2490, step 49800, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2495, step 49900, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2500, step 50000, agent=PearlAgent with DeepQLearning, BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n"
          ]
        },
        {
          "output_type": "display_data",
          "data": {
            "text/plain": [
              "<Figure size 640x480 with 1 Axes>"
            ],
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiYAAAGdCAYAAAAmK7htAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAADBvklEQVR4nO29ebxdVXk+/uwz3vne3CQ3uSEBgiQgBBAEESOKBUGgKG2tVmkjarW2iYWv1UqcrdZYq3xrW6XVCvn2p4hDGSxgLAIhMpcIQgQShoREMpHpzveM+/fHOe/aa6+99nTOPsO+930+n3zg3nvOPvvsvfZa73re531ewzRNEwwGg8FgMBhtgESrT4DBYDAYDAaDwIEJg8FgMBiMtgEHJgwGg8FgMNoGHJgwGAwGg8FoG3BgwmAwGAwGo23AgQmDwWAwGIy2AQcmDAaDwWAw2gYcmDAYDAaDwWgbpFp9AkFQLpexe/du9Pb2wjCMVp8Og8FgMBiMADBNE2NjY1i0aBESiWBcSCwCk927d2PJkiWtPg0Gg8FgMBg1YNeuXVi8eHGg14YKTNatW4ebb74Zzz77LDo7O/GGN7wB//AP/4ATTjjB833/9E//hOuuuw47d+7EvHnz8M53vhPr1q1DR0dHoM/t7e0FUPlifX19YU6ZwWAwGAxGizA6OoolS5aIdTwIQgUm9913H1avXo2zzjoLxWIRn/rUp3DhhRfi6aefRnd3t/Y9N954I6655hpcf/31eMMb3oBt27bhyiuvhGEYuPbaawN9LqVv+vr6ODBhMBgMBiNmCCPDCBWYbNiwwfbz+vXrMTQ0hM2bN+NNb3qT9j0PPvggVq5cife+970AgGOPPRbvec978Mgjj4T5aAaDwWAwGLMAdVXljIyMAAAGBwddX/OGN7wBmzdvxqOPPgoAePHFF3HnnXfikksuqeejGQwGg8FgzEDULH4tl8u4+uqrsXLlSqxYscL1de9973tx4MABvPGNb4RpmigWi/jIRz6CT33qU67vyeVyyOVy4ufR0dFaT5PBYDAYDEaMUHNgsnr1amzZsgX333+/5+s2btyIr3zlK/j2t7+Ns88+G88//zyuuuoqfOlLX8JnP/tZ7XvWrVuHL37xi7WeGoPBYDAYgUGb5lKp1OpTiR2SySRSqVSkVh6GaZpm2DetWbMGt912GzZt2oSlS5d6vvbcc8/F61//evzjP/6j+N33v/99fPjDH8b4+Li2rlnHmCxZsgQjIyMsfmUwGAxGZMjn89izZw8mJydbfSqxRVdXF4aHh5HJZBx/Gx0dRX9/f6j1OxRjYpomPvrRj+KWW27Bxo0bfYMSAJicnHQEH8lkUhxPh2w2i2w2G+bUGAwGg8EIhXK5jO3btyOZTGLRokXIZDJs4hkCpmkin8/jlVdewfbt27Fs2bLAJmpeCBWYrF69GjfeeCNuu+029Pb2Yu/evQCA/v5+dHZ2AgBWrVqFo446CuvWrQMAXHbZZbj22mtx+umni1TOZz/7WVx22WUiQGEwGAwGo9nI5/Mol8tYsmQJurq6Wn06sURnZyfS6TReeukl5PP5wP5kXggVmFx33XUAgPPOO8/2+xtuuAFXXnklAGDnzp22iOkzn/kMDMPAZz7zGbz88suYP38+LrvsMvz93/99fWfOYDAYDEYEiGKXP5sR9fWrSWPSbNSSo2IwGAwGwwvT09PYvn07li5dGslOf7bC6zrWsn5zmMhgMBgMBqNtwIEJg8FgMBiMtgEHJgwGg8FgxAzUc84wDKTTaSxYsABvfetbcf3116NcLtte++CDD+KSSy7BnDlz0NHRgVNOOQXXXnutw7fFMAx0dHTgpZdesv3+8ssvFzrSZoADE0as8eALB/Dj/93V6tNgMBiMpuNtb3sb9uzZgx07duDnP/853vKWt+Cqq67C7//+76NYLAIAbrnlFrz5zW/G4sWLce+99+LZZ5/FVVddhS9/+cv4kz/5E4dth2EY+NznPteKryNQs/Mrg9EO+PiPf4PdI9N4w/FzsXgOl/sxGIzaYZompgqtcX/tTCdDe6hks1ksXLgQAHDUUUfhjDPOwOtf/3qcf/75WL9+Pd7znvfgQx/6EN7+9rfjO9/5jnjfn//5n2PBggV4+9vfjh//+Md497vfLf62Zs0aXHvttfjEJz7h2W6mkeDAhBFrjE5XdgXjuWKLz4TBYMQdU4USTvrcL1ry2U//3UXoytS/JP/e7/0eTjvtNNx8882YO3cuDh48iI9//OOO11122WVYvnw5fvjDH9oCk5UrV2Lbtm245pprcPvtt9d9PrWAUzmMWCNfquRSi6W2r3pnMBiMpuDEE0/Ejh07sG3bNgDAq1/9atfX0WtkrFu3Dhs2bMCvfvWrhp6nG5gxYcQaxWpgUiiVfV7JYDAY3uhMJ/H0313Uss+OCqZp2tJCXnZluv42J510ElatWoVrrrkGDzzwQGTnFRQcmDBii1LZRNm0/p/BYDDqgWEYkaRTWo1nnnkGS5cuxbJly8TPb3jDG7Sve81rXqM9xhe/+EUsX74ct956awPPVA9O5TBiC5klKXAqh8FgMHDPPffgqaeewh/90R/hoosuwuDgIL7xjW84Xvezn/0Mzz33nGsZ8JIlS7BmzRp86lOfcpQVNxocmDBii6LEkjBjwmAwZhtyuRz27t2Ll19+Gb/+9a/xla98Be94xzvw+7//+1i1ahW6u7vx7//+77jtttvw4Q9/GE8++SR27NiB733ve7jyyivxoQ99CJdcconr8deuXYvdu3fjl7/8ZRO/FQcmjBijUJQYkzJrTBgMxuzChg0bMDw8jGOPPRZve9vbcO+99+Kf//mfcdtttyGZrGhW3vnOd+Lee+/Fzp07ce6552Lp0qX48z//c1xzzTW2EmIdBgcH8clPfhLT09PN+DoC3MSPEVvsH5vG6/7+bgDAf6w6ExectKDFZ8RgMOKE2djEb3p6Gu94xzuwa9cu3HfffZg/f34kx+QmfgwG7CXCRWZMGAwGwxcdHR247bbbsGrVKmzatKnVp6NF/OXHjFkLWfxaZI0Jg8FgBEJHRweuueaaVp+GK5gxYcQWciUOG6wxGAzGzAAHJozYQk7fMGPCYDAYMwMcmDBii0JRZkxYY8JgMGpDDGpA2hpRXz8OTBixRYEZEwaDUQfS6TQAYHJyssVnEm/Q9aPrWS9Y/MqILWQfE2ZMGAxGWCSTSQwMDGD//v0AgK6uLluPGYY3TNPE5OQk9u/fj4GBAeGdUi84MGHEFjJLwowJg8GoBQsXLgQAEZwwwmNgYEBcxyjAgQkjtuByYQaDUS8Mw8Dw8DCGhoZQKBRafTqxQzqdjowpIXBgwogt5HJh7pXDYDDqQTKZjHyBZdQGFr8yYouirbswa0wYDAZjJoADE0ZsUeDuwgwGgzHjwIEJI7awdRdm51cGwxMjUwX88b89iP/voR2tPpVZh//a/Dv80XUPYv9Yc7v0xhUcmDBiC9n5tcRN/BgMT2x+6RD+d8dh/OCRna0+lVmHHz+2C5tfOoyHXjjY6lOJBTgwYcQWeYklYcaEwfBGrlAJ3semiy0+k9mHXJXd5XkqGDgwYcQWsviVNSYMhjfy1edlPMeBSbORF4EJM7tBwIEJI7aQOwoXOZXDYHiCFsfxXJF7wzQZFBSyQ3UwcGDCiC3yJRa/MhhBQc9LqWxiqlBq8dnMLuQ5lRMKHJgwYosiG6wxGIEhV7GNs86kqeBUTjhwYMKILeT0DT/wDIY3ZIZxjHUmTUWuWGGouHVGMIQKTNatW4ezzjoLvb29GBoawuWXX46tW7f6vu/IkSNYvXo1hoeHkc1msXz5ctx55501nzSDAdgnWmZMGAxv5CXGhCtzmgtmTMIhVK+c++67D6tXr8ZZZ52FYrGIT33qU7jwwgvx9NNPo7u7W/uefD6Pt771rRgaGsJPf/pTHHXUUXjppZcwMDAQxfkzZjGKXC7MYASGXF7PqZzmgjZRHJgEQ6jAZMOGDbaf169fj6GhIWzevBlvetObtO+5/vrrcejQITz44INIp9MAgGOPPba2s2UwJBRKbLDGYASFzJiM57iLbrNQLpti41TkDVQg1KUxGRkZAQAMDg66vuZnP/sZzjnnHKxevRoLFizAihUr8JWvfAWlkrsqPJfLYXR01PaPwVBRsJUL8wPPYHhBDuRHmTFpGrh6MDxqDkzK5TKuvvpqrFy5EitWrHB93Ysvvoif/vSnKJVKuPPOO/HZz34W3/jGN/DlL3/Z9T3r1q1Df3+/+LdkyZJaT5MxgyF7AvBOhMHwRp6rclqCPHdBD42aA5PVq1djy5YtuOmmmzxfVy6XMTQ0hO985zt47Wtfi3e/+9349Kc/jX/7t39zfc/atWsxMjIi/u3atavW02TMYMgPORusMRjesKdyODBpFqgVAMDzVFCE0pgQ1qxZg9tvvx2bNm3C4sWLPV87PDyMdDqNZDIpfvfqV78ae/fuRT6fRyaTcbwnm80im83WcmqMWYRCmVM5DEZQyIE8BybNg8yY5Is8TwVBKMbENE2sWbMGt9xyC+655x4sXbrU9z0rV67E888/j7IUKW7btg3Dw8PaoITBCApO5TAYwZGTfUymWfzaLMhMFTMmwRAqMFm9ejW+//3v48Ybb0Rvby/27t2LvXv3YmpqSrxm1apVWLt2rfj5L//yL3Ho0CFcddVV2LZtG+644w585StfwerVq6P7FoxZCRa/MhjBUWAfk5bAFpjwBioQQqVyrrvuOgDAeeedZ/v9DTfcgCuvvBIAsHPnTiQSVryzZMkS/OIXv8D/+T//B6eeeiqOOuooXHXVVfjkJz9Z35kzZj1sGhMWlTEYnshzKqclkAOTPM9TgRAqMAnSkXLjxo2O351zzjl4+OGHw3wUg+EL7pXDYAQHO7+2BnnJGoM3UMHAvXIYsYXMmBQ4d8tgeMImfuXApGnI2TQmvIEKAg5MGLGFXJVT4twtg+EJLhduDeTARL4HDHdwYMKILWQxX4F3Iow2Q7ls4pWxXKtPQ0DulTMaw6qc8VwRI1PxO+88MyahwYEJI7aQS+9YY8JoN1x33wt43Vd+ibue3tfqUwEA5IuW1mE8VwykGWwXlMsmLvnmr3D+N+6LnXuqvSonXufeKnBgwogt7N2F+YFntBee3j0K0wR+8MhLrT4VAPaKENMEJvPu/craDRP5InYemsSB8RxGY8aa2Kty4hMMthIcmDBii3yJGRNG+4LG5/3PHcCRyXyLzwYoKK6jcarMkTUxcUuH5NnWIDQ4MGHEFjJjwsZFjHYDLULFson/+W3r0zmqh8Z4Lj7MgxxExU1AKp8vM7vBwIEJI7aQNSZs9cxoN8g7+9uf2tPCM6mgoCzocWJM5HONG2OSk7Q9Bd5ABQIHJozYQt6JlM2KQI7BaBfIu+MHnj+AwxOtTedQr5y+joqvZpwCE1sqJ2asA/fKCQ8OTBixhbpzYpM1RjtBdSb+xW/3tuxcTNMUC+Tcnkrn9jh5mchNB+PGOthTOfE691aBAxNGbKHma1kAy2gnkLfOGUcPAADuaGE6Rw7iB7srXd3j5P46bkvlxGsDInd1Zo1JMHBgwoglTNN07D54N8JoJ1DK4R2vOQoA8OALB3FwvDWGa/KunQKTsRgxJjK7E7fnnLsLhwcHJoxYQseOMGPCaCfQIvSq+T1YcVRfNZ3Tmuoceac+lwKTGLm/jk7PDI0JMybBwIEJI5bQ7ZriNmExZjZI85RKGrj0lEUAgDtblM6hxTFhAP1daQBxTuXEawOiNvGLk+Nuq8CBCSOW0Ald4zZhMWY2iDFJJw1cesowAODBFw5gqgWOq7Q4ppMJ9GYrVTlxEr/KnitxYx1U3xWep/zBgQkjlpBztR3phON3DEarQQxeOpnAksFOAJWy9ol88wMCWswzqQR6snEvF47Xc64GJnELrFoBDkwYsQQ93AkDyCSrgUnM1PqMmQ2qykklEjAMA6mEAaA1Cyu5vmZTCfR2VFI5cRK/jsW4Kkd13I2beLcV4MCEEUsUpN1oSgQm/MAz2gcWY1IJSFLV/7ZiYc1LqZyeqsHaeIzEr3JgEreFnRmT8ODAhBFLFET+PtHSnSiD4QYajxQ4pxOtSznKqZzeuKdy4saYqBoTnqd8wYEJI5ag3WgqKVHkMZuwGDMboiqnOj6TLWRMbOLXaionTuLXODu/5hypHJ6n/MCBCSOWsDEmnMphtCGK0hgFKloToDULK31mxpbKiU9gYisXjltgUrBXYXFg4g8OTBixhNCYJForKmQwdDBNUwTKpC0hrUlLxK9FZ1XOeL4Yi8aXpbKJCanEOm7MqCp+5Q2UPzgwYcQSNDmlU4mWigoZDB3kxYe0Je0gfs0kE+itMiZmi0qXw0JNOcUtlcPi1/DgwIQRS9DklEoYSLZQVMhg6CCPRQpIKJXTih2zLH7NphKCvYmDzkQ9x7g5PDsDE56n/MCBCaPlGJsu4IYHtmPvyHTg98jlwjTJcq8cRrtApu+twKTy31bsmK1yYQOGYcTKZE3VwsQtFeJI5cQssGoFODBhtBw3//plfPG/n8Z1G58P/B5ZWJhs4YTPYOggLz5WKqd1zF5eYkwACAFsHAITtdmgykC0O+h8s6nWiZ/jBg5MGC3HyFRl4jkwkQ/8noJULkwTPzMmjHYB7eoTBpBI2MWvrRinlvg1CQDozcanZFh1qI2bloxKtYml4g2UPzgwYbQcNImHaW4myoUTEmPCgQmjTWAFztYU20pmL6+40MapZNiRyokR41AqmyIQ7a4GJnELrFqBVKtPgMEg2jtMYGJV5RhIGLQT5Qee0R4QqcZqMFL5/xaKX5V0guX+2v629Gq6KU6pEDnt1JVJVn8Xn/NvFTgwYbQctKOYLAQPTOiBTyUSoLk/ThMWY2aDAmeZMSERbGsZk2pgQoxJDFI54zl78BQnxkEOTJgxCQ4OTBgth5XKCT5J0nvSyQSqhAlrTBhtA8uZ2GJM2kL8moyf+FVN5cRpA5IrVTZbhgF0piuMSZxSUa0CByaMlkMwJmFSOUrOXP4dg9FqiAZ+CYsxobROa8Wv1cCkKn6NQ2BC4teEAZTNeD3nsrEdMWZq+TDDiVDi13Xr1uGss85Cb28vhoaGcPnll2Pr1q2B33/TTTfBMAxcfvnlYc+TMYNB1PZ0mFSO1LmVdqJx2kkxZjZEAz8pcLZE2q30MVFTOfHRmAx0ZQDEy8ckJ2l70i1kzOKGUIHJfffdh9WrV+Phhx/GXXfdhUKhgAsvvBATExO+792xYwc+/vGP49xzz635ZBkzE/UyJqkW7kQZDB2KUtM8QisXpoLiYxIrjUk1MJnTVWF54lRuK5dpp7l1RmCESuVs2LDB9vP69esxNDSEzZs3401vepPr+0qlEq644gp88YtfxK9+9SscOXKkppNlzEwIjUmhBNM0YRiGzzskjUkigbJZ+f9W7EQZDB2KJSdj0lLxq1KVEyvn1xwFJhkAE7FiHPIaxiRuBnGtQF0+JiMjIwCAwcFBz9f93d/9HYaGhvDBD34w0HFzuRxGR0dt/xjthy0vj+DTtzyFV8ZydR2HJnHTBKYLwR5aUZWTNEQqpxSjCYsxs0GeOrLGJNVCI8CC5JQMAL0dMdKYVEuarVROfBZ22XG3lb2S4oaaA5NyuYyrr74aK1euxIoVK1xfd//99+N73/sevvvd7wY+9rp169Df3y/+LVmypNbTZDQQ3/3Vi/jBIzuxYcueuo4jP6hTAXUmwsckmbB6kPADz2gT6MTZNE5bsTDlHOLX+KRySPw62E2pnPg857L4lcZCgRkTX9QcmKxevRpbtmzBTTfd5PqasbEx/Nmf/Rm++93vYt68eYGPvXbtWoyMjIh/u3btqvU0GQ3E4cnKTiYoy+EGeQc5GbBkWC7HTCXZYI3RXihI4mxCO/qYxMFgzdKYxJAxkQJCuva8gfJHTeXCa9aswe23345NmzZh8eLFrq974YUXsGPHDlx22WXid2VSq6dS2Lp1K171qlc53pfNZpHNZms5NUYTMV6d1OrdAdoYk4AC2IJtoq38f5xyz4yZDWGwJju/tlL86saYxCKVY6/KiRNjIjNVFJjGqdy5VQgVmJimiY9+9KO45ZZbsHHjRixdutTz9SeeeCKeeuop2+8+85nPYGxsDN/85jc5RRNz0IRRL1MhP6iBUznSjpSmKc7dMtoFRUXTAUBKObaOMckojMlEvoRS2RSlzO2GYqks5gRK5cRpYc8VK+cui1/jVFXUKoQKTFavXo0bb7wRt912G3p7e7F3714AQH9/Pzo7OwEAq1atwlFHHYV169aho6PDoT8ZGBgAAE9dCiMeoPx0lIxJ0JJhwZgkDJgm70QY7YWCpionSSnHlpYL25v4AZXnuL8z3fRzCoKJnDUfxNHHxJ7KoVRefM6/VQilMbnuuuswMjKC8847D8PDw+Lfj370I/GanTt3Ys+e+sSQjHhgXDAm9T1opZpSOdUdKavdGW2IoqYqp5VN/CwRZsUWPZtKCvaknQWwo9V0cTaVEJbucVrYZabKmqd4A+WH0KkcP2zcuNHz7+vXrw/zkYw2RblsYjwfEWNSCs+YyDn8ksjdxmfCYsxsFHRVOW3gYyKfT29HCgcn8m2tM6GgqbcjFUuNhsyYkL6nwN2FfVGXjwlj9mIiXwTFqfUyJvIOIqjGRBa/trIMk8HQQVeV00rxa15xfgWsdE4729JbgUnaun4xes7lwKSVGqO4gQMTRk2Q6d9oUzlhy4UTQrjHFCmjXVCUNFCEVgbQahM/wKrMGW1jxoTKmXuyKXH94uScKju/trK7dNzAgQmjJsj0b72BSaGWVI4kLozjTooxsyE0Jpomfq0IoNWqHEDql9PWgUnl3HqyKek5j09gYjXxSyLTwlRe3MCBCaMmyLuseieKUk1VOVaTNDHh8wPPaBNYVTntkcpRfUwAoCdbqcSJWvz6/z20A6uufzSwkN0Leo1JfDYgNkt67oIeGByYMGpClKkcObCZDqkxqTAm3F2Y0V4QPiaJNhG/Ks6vANCdrVS5TEQcmNzw4A5s2vYKHt95uO5jEZvT05ESVS1xYhxkS3qhMYnR+bcKHJgwaoJM/9a7g6mNMaGqnASSCd6JMNoLJHC0MSYtKhc2TdNiGFMaBifi8yGmZLpYP2NCqZzebEpsQOKUss1pqnLilIpqFTgwYdQEucdGSzQmZZpomTFhtB8sZ2KdxqS54zQv7dDtgUljmsrRM1xvDy3ApSonRhsQe1UOb6CCggMTRk2QUzn1TrRyQBE8lWMZWCWZImW0GayqHGcTv2ZroeQqloxG8xL1cyMYk4DPshfG5FROMn7ltrLoOM3i18DgwIRRE8YirMqxW9IHLRe2NCa0E2HGhNEuoA6y6TYQv8o7dF1gko/wfIqlsliMcxEwMXK5MAV5phmfZz1XDc7k7sJxYnxaBQ5MGDVhLMKqHPn9YcuFbaKymExWjJmPoqZXTqsMtogxSSUMJLTdjqM7H9kgMQrGRFeVA8SHdaAgLSt1F47LubcSHJgwaoLsFlm3wZq0gwju/Go5a6aExoQfeEZ7wOou7KzKafZuv6CpyKn8HP1CKZcIR8GYyIGJfP5xEcDam/jFr6qoVeDAhFEToi0XrqWJn9X7QzTHYoqU0SYg9k5u4tcq8WNO42ECNCaVIzOekWpMsmnBOAHx8SySnV/jWFXUKnBgwqgJ9lRO88uFi1IOnylSRruhKAXOhFaLX52MSfSpHHtgEoXGxHJ+TSbkVE48FnebwRpvoAKDA5NZgiCdocMgSvFroZYmflLenHZSUVLkUV8vRvPRynvo2cSvRamcrIMxaUAqpyCncqLQmFRSxr0dKRiGIbEO0Z1zI8eJZbCWlBgq3kD5gQOTWYDHdhzCmV/+JW59/OXIjhlVuXC5bEKeFwKncspSd+GIrZ6LpTLe/q8P4EP/+Vgkx2M0H8/vH8NZf383bnhge0s+nxbOlLaJX5MZE01nYUAuF45uYZ6KkDEplMriGNTXJ2rWYSJXxO994z6svfmpSI6nwq4x4dYZQcGBySzA/c8fwMGJPDZteyWyY0bVxE8NaqYKJZQDHK8odReOmjHZP5bDUy+P4JfP7GPmJKZ4bMdhHBjPYePW6MZ8GMjjk9AqKr9QdKaVKj9Hv4OXy/3rZUzkOaa72gk56rTttn1j2H5gAnc/sy+S46nI2TQmbLAWFByYzAJQ2iUKi2jrmFZVTj2MiW736HeepmnauremIqZ3adIzTRaqxRW02JZbFFgWdOXCYlFtsvjVlTGJfgdvS+XUyZgQK9uZttIgUafDiNFo1HMuC49ZCxccHJjMAtDOo96JglAqm5iQKNt6ynR1E4KfAFae2GXGJKrJRT4+TyLxhFhwWrQ7lXs5ERqhjwgCP/Fr41I59W2ERslcrZrGARB5IzwKHKK25SfkixqDNd7s+IIDk1kA2nlExZhMKO6s9Uz+JY0rpZ/ORJ7YG1EuLE96+QZNWIzGghiTVjmEWlVjssakVc6vlhmhjMakcqLzMRmXGvgRonZPpXNslCDVbklvOVQHSVfPZnBgMgtAO4+oGBO5IgeIRmNiGFYbdr/KnELR+rxUIhF5Kkee9DgwiScsir41909XlZNslfjV1ceksamcehkT2VyNEPWzTjqYRrEYso+JzbmWzSA9wYHJLEDUjMl4pIGJVb3QlalMQL6pnEYzJtLxubQvnii0mjERTfx0FvBtwpikok/l2MWv0WhM9KmciBiTgjVOoh4rxVIZdMhMKmFr6MheJt7gwGQWgAKJKAyPALsdPVCn+LX6gCYTBjozFcbEr5Gflb83YBiy+NWMpIpGzjczYxJPNFrU6AdLnK3pLhzROA0KV8ZEONFGaUlvHat+jYllrkaI2tZd3nhErSeTAzO5XLgRnzXTwIHJLADtPKIwPAKsCYNQz06D3ptOJNCZrgQmfhOaWoope0VEseuRFzMu7Ysn6L61ijHRVeXYdsxNPK+cq/i1cm5RsoJTBWtuqJehFRqTjrT4neWeGxVjYp1j1MGCvKnJJBOxdK5tFTgwmQUYi5oxmbbnfqNI5SSTMmPiV5Vjn/TlXWkUEz6LX+OPXKsZk5IVcBPkIKWZARMtgg7GJBV9askmfq27XLhalZOVUznRMiYyqxF1eoUCvmTCQCqZaJhz7UwEByYzHOWyaTEmETTVAiwGZqCrspOpz8eEmp0Z6AocmLgzJtEEJpL4tRSd90u74OB4rumfeWA8Fzh9cWgiX/fnUUDZuqocJ2Ni3zE3b2FqZionyiZ+Y9NO8WvUjfDkwKRRjIms7RGpqCIzJl7gwGSGQy7tnY5o90+MyUBnBkCdPiaSxoQCE78JTe4sDCipnAh2PUUbYzKzJpAfPPISXvvlX+Knm3/XtM/csGUvzvzyL3HdfS/4vvaHj+7EGV+6C7c8Xt/5tVr8agXPTvEr0Fzxo7v4NXrDr+lC9OXCjdSYyOntqIXuuq7OQrzLjIknODCZ4ZBLe/PFciT18+T6Gi1jkkBHOhhjIr8HUHaiETzw8gQ106pynth5BACw+aVDTftM+qz7nzvg+9pn94wCAJ7ePVrXZ7acMdEYrCUTBozqUG3mwuTXKyfKdKXqY1KPyJc0KpTiBay0bVSBXd7GmEScytEEJplU9CzVTAQHJjMccrM9IJqFdqx6zP7OSmBSn/jVoryDp3LsjIlhRNthWJ70GuUI2SocrKZJdh+Zbtpn7h6pfNa2feO+ry1U71+9u20aIy3zMSFRt0v6pJmMiS6lIP8cpQ5HfXbruY/CC0Z2z43YC8auMYmaMam6vrZBv6S4gQOTGQ7VDK3evC8gpXKiYExsqZwKZTsVsFw4rTGvimInUpzBPiYUmOwZmWraZ+45UvmsA+M5HPbRj9DiUO84bbnzq8bHBLDGaTPPK695XoDoG+IBzme3HgGsugEBou83JJ9f1M+6MFdLOwXQzJh4gwOTGQ652R5Q/060ckxVYxJtubCf82tR46op2z3Xi/wMdn4l4euekeYxJnulz9q2b8zztcWIGJNWVuWUy6Yw1ko1IRjwg7vzq2WwFpWvivrs1lMyXNCkoKxUTvQak8hTORptT0a65gx3cGAyw6GmciJhTJSqnFIdhlFEedsN1oKlcjKaiocoHvjiDNaYUMXL2HTRMTYagVLZxL4xqwrouf3e6RwKOusdp0L82oIFQNaPyFU5QPTdcYPAauKnP5coz8eRyqmLMbFX3wFyKif6qpyoUzmyHT0h1YA2ADMRHJjMcKj28VF4mYyJwCQjflcrU6HTmPg18dP1IaFJN2qNyUxiTKbyJdvCsbcJ6Zz9Y9O2e/KcL2NCqZz6rnsrnV/l8SP7mADRd8cNAvqsbEqvMYnyfNRnNwrGRK66S0XMOMjPd6NSOfaqnOr5cxM/T3BgMsOhakyicH8VVTmdliNjqUbGRKRlEoZI5QQ2WEvoGJNoq3JmUi744ITdv6QZAlg1ZeQngKUFp95x2spyYTkwURmTKEXaQeGWyrE1lYugLD5fLItAsLu6yYhEY5JybkCiS+U0sCpHk4oS/Ylm0IanEQgVmKxbtw5nnXUWent7MTQ0hMsvvxxbt271fM93v/tdnHvuuZgzZw7mzJmDCy64AI8++mhdJ80IjjFHKqf+B4JYmDndUmBS40Qrl/4K8aufxqTsvhNhxsQdB8ftwtNmCGD3VIMfWqie2+/NmND9i44xaf79s6VyFPFr1Dv+IHAVv0rnFgVbID+3xKbWxZhUgyVdVUtUjINNYxLxs57TVENFXVU0UxEqMLnvvvuwevVqPPzww7jrrrtQKBRw4YUXYmJiwvU9GzduxHve8x7ce++9eOihh7BkyRJceOGFePnll+s+eYY/1FROFIzJuCgXtlI5tVLmRZvGpDIcA6dyEk61fhQP/Ey1pFcdVZshgKXg55xXzQUAHBjPezq70rWvV/xK961soqkN8wA7C2gYamDSfI2BG2NiGIZUMhxBYFJ9blMJQ7i11qMVogBPDqiivn42jUnEwYLOYC3N4tdASPm/xMKGDRtsP69fvx5DQ0PYvHkz3vSmN2nf84Mf/MD283/8x3/gv/7rv3D33Xdj1apVIU+XERZqVU69O9FiqSxSLSR+BWoXGcoak850ZTgG7i5s20n5i193HpxEoVzGq+b3eB9fmqCiTuU8sesIjp3bZdPnNAsHFCv6PSFTOS8dnEChZOL4Ie/rZ/uMavBz3PweLNk3hl2HprBt3xhef9xc7etpUa+3fYJcWVUqm46USiOha+BHED4mbVAuDFTOMV+KJpVDz21nJolsOrpUTkrjnhuZ+NVWLtwYg7VsSjaI43LhIKhLYzIyMgIAGBwcDPyeyclJFAoFz/fkcjmMjo7a/jFqg1p5US9jMpGz3t/XkRZOljUzJtLu0rKk935o6T06itctlVMum/jD6x7AO/71gcDdiwEgF+EE8tvdI7j8Ww/gb378m8iOGQbEVJAeZ3eIVE6hVMbl33oA7/jX+30ZLRnEmCzs68DyoV4A3gJY2rXWz5hY59hsAazQRiT0gYD8mmaej8qYAJL7awTnQxuWznQSHdXPij6VE+31s+nJImZHdUxVOmLn2pmKmgOTcrmMq6++GitXrsSKFSsCv++Tn/wkFi1ahAsuuMD1NevWrUN/f7/4t2TJklpPc9bDWS5c38M3Vu34mU0lkEklkDTqE/NZqZyE5PwalDHRGS/pv1+uWMaB8TzGc0WMThW0r1GPD0Sbynn5cGWR/t3h5pmbyaDAZFmV8dgbIpWz/cAEDk8WMJEv4XeHJwO/jxiTRQMdWLagEph4CWALEZULywtOswWwQjelYUxaKX7NahiTdISpHLpnXREzJvZUTrQLu8zMNayJn0a8O9NsCKJGzYHJ6tWrsWXLFtx0002B3/PVr34VN910E2655RZ0dHS4vm7t2rUYGRkR/3bt2lXrac56UFUO7ZLrnfAp0KEccrJOMZfcXThorxydv4HfhC8zRX7BmZwOinKyEotuBDqfWnCgKn5dcVQ/gHAak+ekYGJ3iPdRumhhf6cIiLxM1izxa71VOdY9bBVjopqryb9rpsZAPC8axoS8gKJJ5VBvm1Q0jIkuldNAS/qoS3ipM3lGG1hxYOKFUBoTwpo1a3D77bdj06ZNWLx4caD3fP3rX8dXv/pV/PKXv8Spp57q+dpsNotsNlvLqTEUkMZkbncG+8dydVPkVivyir4klTCQQx0+JtLkQ4xJrlhGqWzamvPJEHbfNsbEe8KXv7dfOqvYIMZECDsjqIyqBYeq5cInL+rDTzdXgsyx6YK4l16Qg4mg/ifFUhn7x6qMSX+HYNe8TNaiEL+WyqZtPDadMaFAQDN+Uy2oynDrlQNYz02UqRyZMamHoS3oUrYRB3a2wKRBqRzZPyZqg7iZilCMiWmaWLNmDW655Rbcc889WLp0aaD3fe1rX8OXvvQlbNiwAWeeeWZNJ8qoDcRwzOupBHp1MyZKK/JknQ+azJhQuTDgfZ6Wj4mzV44rYyJNkP6MSWO8DWjybxVjQn1ylszpEg0Yg7ImcplvUP+T/WM5lM3KvZ3Xk8XxQz0wjEpKSRXiEmg8FMtmzbtKleVqdmlmsezOmLRCY0CLr078mo5Q8zJVqIpfJY1JPZo2XSonyvMFVEv65lXlcCrHG6ECk9WrV+P73/8+brzxRvT29mLv3r3Yu3cvpqasHdSqVauwdu1a8fM//MM/4LOf/Syuv/56HHvsseI94+P+nUYZ9YMCiXm9lcCkbsYkZw9MaPKNQmPSITW78krniM6tmgnLbREiWhXwnyxlSncmMSbkYzLYk8FwfyWVuvtIMPZD1oUE9T+h1y3o60Ci2nJgyZyu6vH06Rw5GKl1rKrvazZjYjkTOxmTegP52s7HX/waRaA0la98TqUqp5rKqXGsm6YprpGNGaXrF8H5lstmw9K2gJslPYtfgyBUYHLddddhZGQE5513HoaHh8W/H/3oR+I1O3fuxJ49e2zvyefzeOc732l7z9e//vXovgVDi1LZxER1gZ/XUylPrbcMk1JDDo1JzeXCds8H0cjPIzDRpXKSPu3Ep8MwJsX6F0ev404XS0331gAs59d53VkRmAQRwOaLZew4YHkVBWVZiFlZNGDpyZYvqOhMnnMRwMoLdq3snhpMNnsRsFI57gxFK3xMVEt6wApWoliUSbTelUmiI0Vp2druoRwwpLSpnPrPV2UtInd+1TAmGe6VEwihNCZBJtONGzfaft6xY0eYj2BECLkiZ37UqZxqYEI7mHKtTfwUgVtXJompQgmTBffKHN2O1E8UF0pjUm7MLorO2zQr/59JNc9bYzJfFAHZYE8GwwOdAIIJWbcfmLBdk6CBCQU9w/2d4nfLFvTil8/s92BMpMCkxqBQvWdNZ0zK9jEtoxW9UnQpEet8oqsSmZI0Jh11VuXI91DXnTcKxkk9t8hTOZruwpamhxkTL3CvnBkMCkyyqYRIvdTLAIiqnIg0JjJjAkB0GPZiTPRlhN7nEa4qpzHiV3nyb7bOhNI42VQC3ZkkhvsqLMaeAKkcCiIGuzPiPUE2KeSTQuwMEIQxkQLIqBiTFolf9VU5zd0xl8tWSsQrlRMJY1K9Xx3ppGBnat0IyeeTDmELEAbqBqVx5cJOgzVmTLzBgckMhpx26RAq+XpTOc6qHMBycA0LWWMCIFCH4aK2XNg7lSMHZH7XwC5+jTAwCXEOUYOEr3O7MzAMQzAme0f92Q8yRHvj8fMAABP5kqMHkw5UKiwHJsuqJmvb9o9pgxuZTq9Vn6Des1rZvFpRFDtlj6qcJu2Y5WBYF5jQ76LRmGgYk5pZr8r5GAZs1XlRajTUc2tOKqf5zr9xBAcmLcYTu47g50/t8X+hgnyxjJse3Yldh9zNruQKGhKjRVUu3BNSY/LSwQn86H93utLstCsK0mG4oNGY+DEm+RC6EVsTv0hTOTIb0Nwd08FqFczcakovjPiVhK+nLu4XbQiC2NnvGbU8TAjHD/UgYQBHJgt4RVOZI6ddatUnqPe32RqTgmAB3ctzm7Uw5V2YB3E+LqmcIPOLCiswSUni1/oYk3QiYes3FKWPiTMwiboqp+pjomk2ylU53uDApMVYc+Ov8Zc/+HWoCQAAfvnMPlxz81P46oZnXV8zlrPYDRKj1W+wVmFhRFVOwK6+f/ffT+OT//UU7n/ugO33NBkk1VSOV7mwZuK3AiR/jYkvY9LgqpzK+bSGMaF0DAUme0amfdMy26qlwssW9Aq9SBA7e0oTyeLXjnQSi6uVOTsOOMe8fI1qZUzUSb/5PibuGpNmi1/l8avzMXFL5dxdnV++cuczgT9LTuVY4tf6WC81mIrSx6RpqZwGNiGcqeDApMUgm/CgZZsEWmi8NAJjDWRMqConEVBjQjbsamdZVWNCXiaeGpOic9Lya44m6xX8roFclRMtY1J/mqJWkMZkbg8FJpUAYzJfwui0e1omVyzhpYOVAGL5gp7A1TyFUlkwIrL4FQC6s+6dZ21VObVWdDg0Jk32MdGkGgnNFr/KC7za6Riw3GBVVonml+0H3DvHq5iyGaxFxJgo6acou4g3PJVT/Q7ZtDOVw92FvcGBSYtBD8dBj1bw2vdVH/gRj74vcgVNNjLGxB6YBO39QaWqTvMru8akM0C/nKKmHXoy6Z1SCsOYyJNeo8SvzWZMyPV1bpUx6cwkrbSMB/ux/cAESmUTvdkUFvZ1WEyLTyC9b3QapllZEOkzCW6eM6Zpd2ytNd3VasZEVOXonF9bxJjo2BL59+pzSc9LmLYFZLDWlUnWPd+IyjslHZb20ZKFQaOrcnQ9iri7cDBwYNJCFEtlMWmGDUymRWDivoBT2qU3mxLmZfXu1C3dSmVRC1KVUy6bgilxBCYK7S00Jp7Or86qh7SPCNcWmPgZrDWqV44tOGoVY2K1eiAmw2vxIX3JsgU9MAwDiwb83yP/fWF/h2DVCML9UunPou4ia/fAaI+qHK/y3GYFSzoBpu581GCOrv3IVMG3qSbB1l24ToZWmMI5UjlxrMrh7sJhwYFJCyE/tAddLLr93js6VXDVCMhpl2ydhkfimK6MiftDfWSqAJqH1fr9oiOVU91phRS/Jn0ocpv4tQ3KhVutMQEknYmHkJUqcpZXOwMv7LO0KV7Yo/EwIbj1i1EX65oZk5Y7v3r4mDSZys+LZ0U/1VOqRG3iJ1/7oKzJlGjiFwVjok/lpH1E7mGgjpNmVOVEbak/U8GBSQshByaq9sIP9MDnS2XX3bdcQRMVY0IlyGF65VAaAXA3v1LFr15VObodKT3wQboL+zfxa7z4tdmMCY0vcgAGZAGse1qGPEyWVQOT4aqQ1U/8SqkeuVSY4Ca4LCiBSq0aE0dVTrMZE4+qnKi74/rBjzGxylf1qRwgWAUWYAnWK+XC9TImetbJzxYgDBpdlUNBoa4qp5kGe3EEByYtRN7GmITUmEjvPTKlf+94zkq7ZOtUyQOVB5cWVMGY+AQEAHBA+m5OYaJ9AupKV47rmcrR5PApsHGbXHI1MiaRNvGTdqVNZ0yqjNxgt5XKCZKWISM0MkYjBmSvTzWPF2Ni7RoV9kz5uV59AqFWj51aoWuZQGg2Y6Lr0CuDzlFXLkwIUoEFyKkc2TepvlSOqtOJNpXT4HLhglPfY4mNmTHxAgcmLYS8OB2cCJfKkSdtNwHsuJTKETuYOsSvE5KplsWY+JcLy2yQm8bEYkwqxwvk/KppjhWou7CvxsRelRNVX5tWMSamadoM1ghWWka/8EwXSthxsFKVQakcYkAm8yWMeuib9mhcXwluJlnqZB1VKqfZ+fy80EC5N/FrVrDkqzFJuqRypGckSD8lQE3l1DffuDUeTEfoA0PfkYqVog4WyZI+m7acX9M+GyhGBRyYtBD1pHLk945M6gOTsZzG+bWOnTqlhjrTSTGhpQKkcmT9TM4llWNZ0vuXC+uapKUS+l24+NwQlvTqd4lqwrIHJs1jTCbyJTFe5sqpnAFvvciLr0ygbAJ9HSkMVbtTd6STQqfitZO2GBNdKkc/OavXveZy4XbxMfFs4tcsjYnT5Mt+PgFSOQEYE9M07U386nR+JXbRmcqJkDGpzgPd1TknymDBNE0XHxMuFw4CDkxaCHlHGDaVE4Yx6cmmxA6mUDJrnqhV11dA3gF6BCYyY6LszEQ+XqRyglTlOMWFVkopQConBGMCROdlYjdYa96O6VB1bHWkE8InBpCqco7o0zLP7beEr7IHBjEtXjtpq7OwLpWj15ioi3Vk4tdmW9IrbsYymq0xcFvgCRmXIFEen7sDaEzypbIQuMuMSV6qPAwDyxJAX9EVRWBHz3V3tjLnRBksyMfSiV+b7a0TN8zqwOSGB7bjr36wGfdu3d+wzzBNE//0y234n9/udfxNXiAPT+ZDPcDyrt8tMLE5v0p0Yq36BrWBHxCUMfFK5eircqY8ShR1wji/HiT2PjXuk4Jpmo4JSl3o7tv2Cr624dnQE65ckdRMxuSA8DDJ2n5PbMZUoaQdQ6rwlbDIRwCbL5ZxoMqSLdSlclzM8KISv4b1MRmZLGDdnc+EMhPzghU4ezEm9nPc/NIh/N1/P+3JFOpgmib+7b4XcO+z+jlMCDBdApOUS+m2nIIJksqRz7tSLmzNN7UIyN06IkdqsFadBygtHWlfLOlYWU25sLpBi+Qzi2Ws+/kzePD5A/4vbnPM6sDk8Z1HcOdTe/HiK9FMSDps3TeGf/rlc/jSHU87/iY/sGUTODIZnDWRgwvXwETDmAC170Tp/Ho70+J35FFR8nioPTUmZbvGpCOAJb1OXOi3Ew1alaMLsNRzXnfnM/j2xhew+aXDrsfRIUy/nihxSHF9JXSkk+irsl8HNOXquw5VAo/j5nXbfi8zLTqQ46vOXI1+D2iE0A7xa3M0Jv/3l9vw75texHc2vVjT56mwUo06jYmeyv+nXz6H6x/YjjtC9s16bv84vvrzZ7H6xl9rg126FmrZLcGNvZIX1iDiVxK+ppMG0smEbb6pJQgvuKZyrOtXr/aL5oGeakPSRjXstIlf6Xo3gDF5+MWD+Pf7XsTfh2gj0K6Y1YEJUWyNrJCYyFWOPaax/VYXpzA6kyCMiSx+TSUTglWodSdKnWiH+6xdcCDGRBL2uu1mBWMSoImfEBfKGpMQqRyvBU+3iKkLHd3LwyECSaB1GpODiuurDK/qCXouqISbsLDfW5tCAWx/Z0Zvg57UB5FeOocwCMOYlMsm7qwGA4dD6rzcQN9Dx5i4jVMaU8RSBQWV70/mS9ioYX4LPoxJxoWBkDcvY9NFwZa6QTZXA1D3fJPXbD7Un+sVwNL46qFUToSbBXp2UgnDZjDYSOfX0epYoPYfcQYHJojWq0IF7e51LIUaEB0IoTOxiV81gUmxVBasA5X21lvCR7lmmZ4PpDGxpXJ8NCZBxK9lUuxrGBM38WshWFAg72Q6XQR89H5dsOmFVlXlWOZqWcffMin9jhlwr+hYNOBdzUPjsb8zpf27my17VOXCDsbEY2w+9tJh7B+rBG4kFq8XBY+qHDeDMBpjYQMTOQVz+5NOtkXYovswJo5UjjI37fVhTaaFh4l1z4UAtoaxXnRJh8k/16szEYwJpXIi1P24PTtRWuqroOAwjFtvu2JWBybZZgQm1cE+XSw5qMd6GJOcj/hV3uFQ07RsnQwRLURyt9ggjIktleNCs6dqMFhLaapyghmsud9v+fxI76Iu2hTwjU+HW8gKtlROExmTcae5GiGTdH8G3PQJC/u8/U9GRWCS1v7drT9LVIyJsyrH/Th3PLlb/P94yEDTDSLVqKnKsQJolaGojAfyjQkK+Th3P7PfEdC7dekV5+PTK4fgJ4CdlBr4EWi+qYUxcfNfkX1N6k2H0JjvboTGxCUgTKe8qwfrgXzvw/Q4akfM6sCkGYwJDXbT1PQCUXYSYbxM/KpyaDffkU6IXVG9jInONMvPx6RUNnFo0ktjond+9dKYCJrX5mPiTZHmA6ZR5PPRBa6maYr3+9HbznOQDNaayJgc0tjREzJS9YQKyvN7MSa6PP+IT2CScjFYU39uNGNSKpu4c4slSh8LeT/dUCgHYEwcfYEq5/zykalQ40oe71MFZzon57JzV8/HLTAh1tBPAEs7dFn0Wg9j4p7KiZIxUcSvkaZy9NfdLTCNAvJmLqhbb7tiVgcm1PUxytb2KuSHR905qJ4eYUqG/VI541JFDqFe0yOdaZYfY3JkMg957XLm/4mytWtM8kX3MkNrR+p0fg1isObJmEiOk7o0h1wWWU8qp5mMCQlb5QZ+BK9UTk4sDvZpYkFVYzRdKOOIxkPHNzBxmZzdFuuwCKox+d8dh/DKmLUZCHs/3VB0uW6AFcg7PFukZ/K5EOkc9Rqq6ZzAlvQuDRSPmdsFwF8AO+XFmNQw37g1QkwmDNBjX697qrMqJ8JUjqtBXOO6S8ubuaBuve2KWR2YNEVjItGN6gOqBgg1MyaaxUE08JNKe7PCZC389y2XTbFrGh6QGRND/F0HtWuyW8UELVay0FKXJy2VTREYpDRqd7ddlLzI5Ytl1/OVKWTd+JDZprA77FZpTA5pXF8JaY9UTsFlUetIJ8WxdBMgBSYDXc7Pk4/ncH51CDAby5jcUV3EX3/cIIAoUzkePiYuYlN7YBI8nUMs3JyuShB4z7P7bc+NW9ktIe2yOaNrSIGJ3w6cFkX5+a1nvvE675SLeDosrKoc0phE5/KsM1cD3IXfUUC2WAjq1tuu4MAEjU7luNP3dWlMfBmTarO9DlmMVjtjcnAij0LJhGFAuIAC/oyJygL5pXKyqYTYEekEsPL77d2Fvf0NVIbCbTduie4MMYnkXNJAYReyllXluJQLA5LGRFeN5FHRQa6xugmQWJQ+V8akmj4o64NUQlSpHB1jUiqb+Hk1jfMnZx0NoLK4RrGTFakcbRM/N4bC+twwAlgKHk9ZPIAlg52YKpRw77OviL/7W9J7p3KOrZaK7xkNpjHptKVyap9vvLQxohFivYxJ9TvS5s00o3MJtlI59oq2Rlbl2FI5zJjEFzThNtJTQl4o3RZH2u0Ercoplsq2QGBkquCI9GUPE4IlRgv/fWmgD/VmbbuYpE+ZrsoCORus2cWvhmGIyU2nM5G/t667sFuApN5jt1RKXgQmFmNSKOoDirEQ4tdy2W7c1iwfE9M0PTUmaY/g3FrUnIsDCWB3awITf42JvoKKnhVLpF2f+JWeb91i8+j2QzgwnkN/ZxoXnbxQ/D6sbkgHObhVoVuY1Od52/4wjIn1XS89ZREA4I6nLEGvf7mwS6BU3UQtnVsNTI7Ukcqp4T7mgzAmdaZeVPFrFMdUj60GhHS9owyCCCx+nSHI1tnPIQjkga7S97Q4km13UMZEfdCLZdNRxTImeZgQLPFr+B2MW7dYP8aEvhOdh6PVvWYSp345usocOUiwdxf2TuWoC69bKqUopXJ0NLccLIUSKXpQ943EWK4ozl91fgW8q3KsRS3p+NsiwZi4p3Lcq3L0O156ViiYrpkxqR6X0gq6sUmL90UnL0BnJil291HoTNz0EYD+eVHnn1o0JpmUgd8/dRhAJZ1DDTf9nF/TGo2RaZqSxqQamPiKXymVoysXjk5jUvldNO6vOU1gEpXe0K0qR04/R82ayHMTi19jjIxLfjVKFG27ZPsDSoOXFvuDGvdNHeQHnR5SNZ1Di2ZPViN+rYUxOaLvFutXlUMsEL3P3WDNGopdHiXDtMAbhpW+ASR6N4DBWuVn/WRZlMS4Wc2kLe9KwixijoqsJjEm5PralUk6jNIAaL8jwXINde78vdxf/cqF3Xa8NBYo/eh3je5/7gBWXf8odh2atP2eqomIeVPZvFLZxIZqGufSUyssAz0nUTAmNEblwJmg66ysBmB7RqaFWZYfxD1KJnDyoj4cM7cL04Uy7qla1PtV5dA5ys9lUdJxHTuvojEZzxU9GUJaFKNiTLxSOamIvEBUH5PKMSMKTKrNEx2BiVzuHPG6I8+XLH6NMSyNSeN2r3bxqz6PS7vPI1OFQA/GtDTZ0OSvVkeMezAmtexg3BiTpOHHmFDPlMr7/DQmgDW5TWgWCbmzsOwqmvRgbopSIzF6nRtjQkZTqYShZROmFUfMoFBLEZvFmFCp9hwXISpN/GF8TABgYX+Ffdk35p7KGejy0Zi4sGcyY+IlRvz+wy9h07ZX8AulDxVpgrpcGJP9Y9M4MJ5HKmHgDa+aC8B6TprHmEhjShJKUoPEoAJYuW+UYRi49JQKa0LCXl1fKRk6S3p5LMzpyoi2BV6sCQkvO7XlwvU4v7q759a7sNP8W7FUiNZfhFJhbuJXIHqTNXXTFEWQ3SpwYIImil9VjUl18C7o64BhVPKOhzUVNiroQc+mEkJgqDImtLuRA5O6GBOXNvbCYtvlISPh5cK+ykLmJkyUd0b9Lt8J0Kd+Kj+776Lk79snduPejElaTuW4aExq9ZtQz6mR8AsSvHxMvISTlBbSlbj7pnKoKkcVv1bHAlHrZY33jwzSL6nMWl4sOFXGRPVLkfxZ6B7TczIegfur2xgF9NVj4nlOJ7BsQQ+A4OkctXrl0mo6596tlXQObbpcy4U1FVLy2MwkEyLVvNtDZ2KlcqTAJFV7qtwroBLXsN6qnAKlW5KR+4vorgdgL3duZCoH8HfrbWdwYIJGp3K8GBOL/qQdbRCdCR2nI53EgFtgknOKX+vTmFRTOQNqKsenKqf6fSzGRDXVKtuOA/gFJvoJKy1EuD6BSfXYfhqTtFwu7EK7j+eKgcsL1THWLMbEL63iVi5smqYnY0JCWrUcvFw2Q/iYKOJXhTEBvP1eKChSA5OCD2OiS7XQZ0bCmHhU5eh2+9PSArm82sl5W2DGhO5R5bgnDfdh6bxu5Ipl3P3sflcHVYJOR0XXPJNMIJEwxGbEqwR1UpfKSdfjY+KVyomGMZG9RtyM5mqFLrVFiKrcWYVqrxBnAeysDkzIYK2RLpx2kZu+KiebSloTfQCdCRm1daStVM6oqjGhqpyIxK9kSe0mfnWtyql+H6Ko1ZSGTmPiFZhYjIZ9wkp6TFZ03dNJq+LH7RrIjpO6RVvelZTKpqdDrQydxiQqzwQvUIrPj73QaX/o9HS77XlVs7bDE3mbJ8x4vij0Ce7BkFsqp6oNkSZzL78XCoqmlAmZ7hcdRw1WdamWKAMTb32EM4DOSc/zcmJM9gdjTNSUh2EYuOSUSpXRHU/u9i8X1jw3FpNQeQ9tKnQVWIRpbblwPYyJVyonIo2JxDxbRoPRPJNWlZKzX5RVCRUxY1L9TKryjLMAdlYHJs1gTLxMtazAJCEMq9QdqA4yBem2iHs6v4acKMplE/tG9akcP8aEGCCd+NU0Ta3GhNIOarAFwLUdOv2sY0zyUgDoN1kKwze5XNjjHgb1MtEtEM1I5wRNq6gBo3yfdIvDnO7K8Ypl0ybUJLO/TCphsyeX4WaGR/cuk0z49nUqlMriu6nBId0vWiQdjIkm1dIjUjnRaUz03YWdu2WZAT1+iBiTgKkcTdsAKhu+d+sr4vmrJZVDf1vU716BRdClLupxfs17pnKircppZioHaJyXCbFWx82vBLdxFsDO6sAkm7LszxsFL/W9nP8l86taGROnxsTp/ForY3JgPIdi2URCMVcDrB1XWbP7L5bKQjNDNubywygHETqNic7uvFB2LiqAH2NiTbR+k6W829X1ylEXwaDur3TcPonBaka/HP/SXX1wXpC6zeoWtWwqKXQZcjAtNC0unwe4T8xyisWvr9Nh6TPdNCYWY6LXssgsXV81gI/C/VVua6BCZw5GwVc2ZWlM9o3mtIyhCqr+kBfwVw/34rh53cgXy3h6z6jj77bzkfQaxHzJ5wNYTs9eqQErlRNtd+G0ZuxZLE/t7IbsHZNNJUTlWVSb1KmCUwxMsATHjRG/vmp+pcQ7zu6vszowaWYTP0BXsiozJpUFP4jGRMeYHJmyv0+UC0cgfqUJaai3w7EL9PIPoaDEMIChqvi1LBkLyTvZwBoTKo9U8vdpj7JlmZr2Y0xkCllUrHg4tgal/ukYHemkEL81o1+OCEzcxK9u3WWrC55h6BdYwLK4lwWwfpoWQF8JAshMg+EbQMrBkOoQLAITF8ZEp1+wUjn1i1/p87zSEGXTauMgMyZ9HWnBLgYRwOoYE8MwhAiW4NfED7ACQ8EwVq8fnY+X+JXSadF1F6bnPFjJdVjIz3RWanQaVaWMrtsywQpOowtMSmVTzGmvEozJLAlM1q1bh7POOgu9vb0YGhrC5Zdfjq1bt/q+7yc/+QlOPPFEdHR04JRTTsGdd95Z8wlHiWZX5Th75VgTgJuYUAc5J21V5dgXSJ3BWrZGxsRN+Aroc+YEqpqY05Wx0fo06cjvse1ePTUm+kmfHGh1gjJ5B9jhI8iTd9P6Xjn29wXdYct9Z+rt8hwGfoxJ2iVYlf0x5LJsGTRmD0nuvn6fVzmmPv0nHFMT/gGkHAypLJZqsKb2RSpoUi0UwEfRYdirKkcOwCkQUBmKZSEEsG56FjUw0eldKr+XDb+IMbFrTCgw2TMy7aqLonsgP+e1zjdA41M58jOdSSZcA/RaMeWZyoleQiA/A5TKmTVVOffddx9Wr16Nhx9+GHfddRcKhQIuvPBCTExMuL7nwQcfxHve8x588IMfxOOPP47LL78cl19+ObZs2VL3ydcLWnjULr9Rwm5Jr6/KyaYSmNfj3H26QWZMqFGaW7mwrSqnRsaEhK+LFOEr4K0xIXOvud0ZW1UAPZDye1J1lgunPQIkOZdM6bsgqRxdxYIjMAlYXipXR9SzkwyLET/xq0tVDp1v1iUFAFjdiuVWCkcCBCYin+/SbC+VNHz7rMitDuRUjlxN5FaVU9RU5Yhy4Sh9THS9cqRxS2M1JzEmALB8KLgA1s3v44QFvYLSB5xGX9b5yL4a+kCJBO+T+RJGXa6PzpK+1vkGkJhRbSqn/lQInVMyYSCVTIj5JLpUjgdj0oAOw3JFztJ5wRovtjNCBSYbNmzAlVdeiZNPPhmnnXYa1q9fj507d2Lz5s2u7/nmN7+Jt73tbfjEJz6BV7/61fjSl76EM844A//6r/9a98nXC3lSblSFhCdjIu1MBkOkcvw0JvliWWpQJYlfa9zB7K0KXxf2axgTjzLdA1KPFnlCpkVQfjCTRtDARC8sTEqBiXovLWraYkzcUznOcmF5AVV3524TtfO4TsakLTQmLs6vftUcgJXKOaTRmARK5TgCBuvaZ308MGyMiRSYyNVElvNrc6tyii46KMDODNJYk59nAKJkOIjJmlv1SiWds0j8rGsrANh9NWhRFuZg1XvfmUkKQbpbczhd6qIexsSt+k7+XT0Lu1p5JMZkROy5rqkhoREak+m8lb4k35kxH7fedkZdGpORkREAwODgoOtrHnroIVxwwQW231100UV46KGHXN+Ty+UwOjpq+9cIyJNuoypzvHxM5MmfxK8HJgKIX4k2lTQmcgWL7JhqKxcWefuwjInejh7w7up7qCrkndeThWEYjjJRWjASBpCwVeVkHN+JQNczo0xYcqCi7pBlXwax4LmlciRGRicMrbkqR1pARConIsbk1zsP45x1d+O/f7Pb8bfA4lcHY6Jf8GToBNv0eW6dhSvHdOuVY+1i/VJucjA0WbDugXyvqG9LkKqcXpeqnKl8Ce/41gP4/G3BGF7TNKXg2b1cGJAYCokBBSAEsEEqc7x8Sn5fSufo2gqIc1IWSplhJBBrotOZmKZVNm8rF66HMfF0z60/7aKyQkFM2/aPTmPlV+/Bqz51p/j32i/dhS0vjzheq+sdJM5fpJ0jZEwKlsanK5MSzzsJYE3TxIf/8zFcecOjkTcPbARqDkzK5TKuvvpqrFy5EitWrHB93d69e7FgwQLb7xYsWIC9e/e6vKOiZenv7xf/lixZUutpekKmNxulMwnqY6LbfbrB0qbIlvTW+16uTh6D3RlbTrvWpoVudvSAxXToNSb2rrbWrsQuflXZD/pOY7mic/FyMa+yT/hOzxAgKGNCgY9eY6IyJkHLSy3NhiXsjIox2bj1FewZmXZYswNylYzekt6tZN6vxwoAwfJpq3JcxLaAR3dhkQIxrJSbS/Amp3Km8ta5y/fKlTEhZkYaQ1RWr97Pp14ewW92HcF/PvxSoCoHW6WZJpWTkBgKOg+x0aiOTWqct38s58sKyEyciuULevGWE+bjqIFOHDPY7fg7IaOwBboGdOSv8tALBx3vr3jyVP6/U8uYhB/nssZJhdV4sP5UDo2zIAZr9217BS8fmUKpbIp/ByfyeOD5A47XTnumcqIV2gLO8mQhWK6O2cd3HcH/PL0PG7e+gu0H3KUX7YKaA5PVq1djy5YtuOmmm6I8HwDA2rVrMTIyIv7t2rUr8s8A7LuMRgUmOuMi8bMUtVO+/shkwXcnMC3eJzEm05YLKe20llVz1QSxgwmbyqHARCN+9dKYuAUmQmNCO0tFeS+X1KqpErfcc0pTXUCQd6T+GhNrt6sTxNG16wvpeyEzEPVQ3DoQs6Tqk3LFkgik/H1M9OkvN9EkAK0uKoz41a1vUiopBZAui5o9leNkTAzDWlgD+Zi4pHKIDTJN4Odb9rh+J/U7AHp9ROVz7eclP8+A9/hXkfNYwAHge+87C/d/8i1aEaY4T0VMKualtHXMi1dYPXjUVKms8bGVC9c43wDeJnV+DTuDQA283RyQZTy3v5Ja+5OzluCRT52PPzpjMQBgQtNsdFLTO4iQjtgzBZDEtkolFQlgqXcSEK57datQU2CyZs0a3H777bj33nuxePFiz9cuXLgQ+/bts/1u3759WLhwoet7stks+vr6bP8agUTCSi80LpWjZ0wqrcWtnfxAZ1rspA5PerMmMmNCO9NS2RSLJKn5KVdNqMWJsVQ2hcZEJ3710pgcFKkchTER4leLurcfMyEWClVnInLP6nuk3anaG0VflaO/BpYw0iohzGkYk/lVP5egOVyZiYla/ErXSGXb6PeGYa/OkiG+o0taJZNyX9AGNSxfmHJh16qcpOEbvB20pXKsZn8iPSoJGh0+JrqqHJdyYflz5MndDfJi41ZmrXqZyM8znZfb+Hf7PLcAMpEwXKuqxPmIRdk9lXPeCfPRnUli98g0Ht91xPZ+WoQzqYTtWa7H+dW75Lp+HxO3VI7XMWnDd8rifizo68C83sr4n9RsTpptsKaKj4Vb75FplMsm7nzKGrtB2x20EqECE9M0sWbNGtxyyy245557sHTpUt/3nHPOObj77rttv7vrrrtwzjnnhDvTBsEtxx4V3LoLF0qWSC+bSiKRMCRbeu/ARIjlqk6mFPXTJEYRMeWqCbX0rjgwnkOpbCKZMMRiLMPLx+SQYEwq78u4aEx0E7ibANYtf59MGKD518GYSNS0NVl6MyaZlFu5cOX/rcAkqMbEmmijFr/SNVJLzSlI6M2mbBoeGa7Or4HEr5TKsdIqfhb4gN2lVy7llVMsfp47cjBkmtbrbIGJy9jUBbdksJYrlm33W34WH3vpsKv4Uxxb+iw3FkNNZamMCeAtAJchAhOP++QHdcOQk64hoSOdxAUnVVLyaoCm7tYJ9fTKKXgwQVH4mMibQkCyifdgYZ5TNnzdVXZIx5joqpQIDUnlFOyB0CJR4j2Fx3cdtpnjbQvY7qCVCDWaV69eje9///u48cYb0dvbi71792Lv3r2YmrIe1lWrVmHt2rXi56uuugobNmzAN77xDTz77LP4whe+gMceewxr1qyJ7lvUgUZ7mbhV5dgMfqrnoNuB6jCtlBeqkxgNvGVDCmPik8bQgcRuC3qzDmYD8PExoXLhKmOiVoC4aUx034ng2UPD5Vz0zq/eGpNUwtBWrNCEM7+38uAHTuVIKahGMSaHJ+19a/zM1QC4MoZqczgd6L4ekvrlBEnluKXd7OJXb33CAcUhme6LHFgmXcaDLrjtzloLiCweP6SI0e98yl0bJ38fw3AygQRH6kQ8z9aY9vLysX0eGax5iJT94DwfZyoHAC49pZLOufOpPbZx5lYaW0934XyjUzmK4FiUC7uc63iuKLR7y6vzKn3fCWUOKEiusl1pJ1MZdcNAQDa4q3ye7NZ7ezWQpH5lMy6Vc91112FkZATnnXcehoeHxb8f/ehH4jU7d+7Enj1WRP2GN7wBN954I77zne/gtNNOw09/+lPceuutnoLZZiLjszOrF24+JnLelSYVCkzUSVeFaoAkFvHJAibzRew6VH2AXBiTMN9VCF8HnGkcQCrT1ZRb0w5+rqoxIfGri8YEgFbUK7/HS63vKn5NJSUBsLePSUoyXbJV5VTfN78nHGMip3KiZkzoGpWkzr5AsCBBZ7sP2DuvuoE6YpdNy78kiPg1rSmZBeR7KwmENfcpXyw7rjvtGGWmx03/VJTuMSGVTIgdv3xsGsPHzK14Q8iUuA5eHiYEcV6CMakGJjbGpLLAqONfRZDqKT+4p3Lsx3zT8vnoyaawR0nnuKUt6uouXHYPuNzE02EQNpVDi/lQb1YE+t3VdJva1VfW3OhSOW7l8vVALU8mjcnLR6bEmP2rt7wKALD9wETkfXqiRuhUju7flVdeKV6zceNGrF+/3va+P/7jP8bWrVuRy+WwZcsWXHLJJVGceyTw80uoF26MiUyXEs1OAlh/xsTusiizC89XBVpzuzPieASa+IplM7AHAAUmOg8TwJ2lkJus0Xk4xK8uGhP5O6klw3mJ0XA7F7dy4WwAxoQWi0zS0ApDpwVjUvlOgZ1fxUJvWGXbkTEm8kLqLN11q8gBLH8LhyW9j6gSqCz+JNQ8NJFDWWroF6RcGLCXDNvFr+6MCT0fyYQhPp92jHJZtlsfJ1GKqoyhXuH+ao05Yv2uOPtoGAaw+aXDntbsRZdUowwRQAuDNSdDQfdMVzIvw4tZCAq3VE5W0Rd1pJO44NVDAOzpHLe0RS3zDQBR8QK4NUKs38dErTyy0iv6Y6ppHMAKTCZy9ueYrkdS0jDaz9/7s2rBlJLKocDkxVcmsG80h95sCu86cwl6sikUSiZ2tHllTu1h9gxBo1M59mZdzv+XdyW63iM6uDImUwUhbFL1JYB94gsaiO2pTsKLXAITNx8TarKWMKyGbmlFz1DyELi5pXLEjlTnCOkyYcniQj8PEdnATef8qopfw5cLJ6SdZP1jzjRN2+Jlq5AJovdI6SlsmeHxwjzJ/XUsVxS6Ka/PtNmyy4yJ3MTPo1xYbnVg7Vo9GBNlF2w1grR/tx6N+ysFQa8e7sNZxwwC8GZNChpXWRWqj4ueMQmpMYkgleMMTJzHJNM2OZ3jZiYm29OH2fjJQbI+laMXT4eBWpWj6t9UbNPo9roplaMwJiK1lU5qhccUEDdS/KpaO7z15AXV7tXkkdPeAlgOTBrQt0CG/PDYGRPnLmmuxhdCB5UxGZAmMaIc1YocwD7xBaVXLcZEn8qh3Z9aCSOXChMjpD78tChpGZMul8DEpSqnchz9hCXvAP3SKDqNiZf4dTRgVY5N/JryTieFwXShbBu78tih9IoXe+E2/oOIXwG7LooCpI50wrHblqEz2wOUJn4e5cIUfM3ryYgdIk3MOsYkiPMrYHXitqdyKkHQ3O6s6D9zh0dg4pVqJDjErxrGxG38qxCtAyIRv1Iqp2pIqDnmucvmoTebwt7Rafx652EA8m7drqeQzylMOscemHhV5dShMVFYIat/jT7Y2bbfyZiQnkPVmIhSYZcS7UY4v6rpNNmtF7DM9paHMO9rJTgwaaL4Vd415DV06aDGSVOHnAhMKufeZ2NMKLJ3BiaJhOXNMR2UMRkJypgogcm43cMEcKZyaqnKcesNUvmdfocs07Z+wlMKfDIpp8ZEdrgkjcl4rhionYHdx8R90Q0L9frozM6CVMjUypgI99eJfKDPUz9XvldWUJjwTHcdkoJe2iGSxqSg1ZjYv5uuuzDgNFkrl03xWXN7Mrh4xUIYBvD4ziP43eFJ7ffyauBHSCnnlauDMfEyIgsK91SO85gd6STeWq3O+cljv8P2AxPiWnQpjIk834RhTPwqm4JUtUzlS9h+YEL8U9Nv7hoTt1QObfgkxqQqmJ7M61M5boFJ2MBqPFe0fZf9o06jP53zLrEmvR0pvPH4+dXzr7Y7UCpznt8/1lbpHb25wSxCM1M5Wo2J9PDPC9hhWI325UmMTICWDzlTOZX3JJAvlQOZHpmmid8dps7CPuJXNTCp7jR1gQkFa0E0Jm6pHF3u2W0hkneA/oyJ5SybUdIc8uRKHgamWZmYurPej5Jc5WL5o9TPmDgCE409fCDxa6nSL4qo5+CMSVZ8LpUKe2laCLQ4y1U5VmrP8jHR3ScSh8/tyYrrqjImWZuPiZrK0bsHCy+TamByZKoAeuucrgwyqQRed+wgHtl+CL/47T588I1Ou4Siy7Ft313prpxTGFAgeFWOCNTrYEzUhVLueq7DpacO4+bHX8aPHtuFHz1mmV/qFmKab2phTBIulU26sSNjulDCeV+/F/tG7Ru8z1z6avz5uccBcHrHZDx0K6PTBcEcHy9VOloaE734VWeuBoQrFz48kceb/vFeh9j7m3/yGrzjNUeJn3U6n0X9HXhmzyguOnmheI7dUjnX3rUNdz6113aNWolZz5hYE3M0QkQVdkt6q1mg2kQKCFMubGdMaOHZMzItAgldKgcIZxP9zJ4x7B/LIZNKiAGtwo0xoYezR2oi6DBY8wgyrKoc+8RM10a32LoZd9m7C3sHBbJhlSoMlSfXOV2W3X+QyhyZMfErhQ0DtWpDZ3bmVSFDE5Zp2hfwoDvxeVLJcC2MibxrlAMGq3WAO2Mytzsj0geTolyYFmrDNbXnxpiQxoRM1qhUuK8jJa7TWcdWdCZuu0s/wzP6fpXzUKpgbOJX/fiXUenLU7/4VXU4FsGdS7Bz7rL5OOe4uejtSIl/83uzuHiF0zSzFlt6L1YU8Pcx2TsyLYKS3o6U+B6bXzrs+IwgqRwSvi7s67CNbdnHRGZNvToLV75X8F45Lx6YwNh0UZgk0jh8fOcR2+us9JG1QfrjMxfjxIW9+PCbrECD1oUdBybEMz6ZL+KeZ/cDAF5/3Fzfc2oGmDGh3WuDOr3qqh060kkHlQhY1Su+qRwXxoRyvvN6spjTrd+1+i3MMu54qtIU7i0nzBe7SRVueXxrxy316nHzMQnBmBAlu8jLHt+1XNifMZE1AqowlCbXVKLCevR2pHBksoDxXAGAPtVFoFLMdEpK5TSEMaktlQNUJmt1gg6qMTk4bgUmXpoW9XPle0UBQzIp9crR7LSFP053RrRLoMVArnZzrxjTV870KuLXA0LLYlW3yd4tOsjl5m5wiF+lppyEIKkcuZNyfT4misaENC8uOqFMKoEffvj1gY4dZr4heDUmrJyvt48JpeIW9nXg4U+dj//+zW589IeP25hotYOyVyrHzbCyK2v1YqJ5HZDZC5c5U+kZ5gW6F8uGevA//+fN+Pf7XsC6nz/rGBeiu7PE0rxtxTDetmLY9rrh/g70ZlMYq6aHTljYi3ue3Y/pQhlHD3bh5EV9vufUDDBjkm6w+FVdJAtqHtcaSFSVMzpd9EwtuTEmtLtS/Utk+FmyE0zTFGZScvt0FW7pE51GQZh5KVU5YcqFyR6fzIJkqLl7glyOKb6/y0QpypGlXjnFqkOpmsd166+ig83HJBUdY+LUmIRzYZUDD3miDJrKEcH0RC4UY6LLs8seIF7NFoWwusfSmIhyYYnpcfUxkdoOyCDxKy1sspZF/b5uXkNe3jyEpEhFeIhfXca/jIKPFiMo0sqGIUgDx6AIOt/IkFsT6GB1F9Yv7CRIp0BT1wXbqTFx1324tfjolgIPWWdC/9/hl8oJwJhMeVhDyJhWyoXdYBgGjlcEsFT6fempw77tC5qFWR+YNNOSHrAeiLyGvu3vTItJy6tfzrTiWqhS9W5pHEDuX+G9g3l6zyi2H5hANpXA+ScOub7OLY+vm9zcfEx0NLTuATRNU+R6F2k0L2runiDTtnTNCiVT61ZblGhk+dzlPHm2jsBEZkyiaOJH12eeZicfyIVVWkBzUjozaBnqXA1jEiQwySSdi4sIGJKGp0vxIalSpoPEr0oqJ5MKUpXjlsopVr8TaVmkwMQn3WqNaS/GpFrJVqf4Vd5M1RWYKOWrXuLXsAg638jwT+V4+5gQ40X3k6od5XumbgzpWdelh54TTtr2DV/FobjyPllnMilcWF0CE3G9/RkTP5dv6zODBSaA5Vz73L4xTOSsNA45+7YDODBpsPOrGvBMC8akKsiUHr5EwhBumm5eJpXmf3rGhKDzMCH4GYwRKIr+vROHPIWdbuJXnUbB8jGxO78mNUJB+k4T+ZKYMA9O5MVxF2gZE/3kItO2HTYvF+dkWZQEmGqawyqLrFZDKVUcXrB6uFhpiijGHO2oj5tXuedhUzmGoS+LVg2o3DC3Ro2JbnEp2sSv/ozJ3J6MoK7p3uh9TFQ2T69toqocEZgovZ4q/++XyglisGYtTKZpOhhQQD/+nZ8lByZRGKzZy4WjCEyCzjcyClI6VYe0ywaEQM8jbRxojB6eLFiNE5XxTXOHjjn3qnS0dCbWHDDtozFJeaSNVARlTKY0qRw3LFtgCWDvfnY/csUyjp3bPmkcgAOTxlflKA8PpRB0gjdAaiU/oaeKCyVTVApklcFKqJcxMU1TeDVc4hNF0wNdNmHrnyHvXAmqyM6rXFjWKdDiS3qCeT1ZLc2cFuyNnqWqlAvLXi7Oe07jIJVI2ILGQrHs0ALoDLncIO8COxrAmBw3vxtAhWkrlSuLHY0xP81HVsNe5DWpOB3EQj2ZF6Z6XmJbgqDj5SZ+UkWUl8bkkKQx6VJ8TOSUGQUm6vpVdDFB6xGpnGpTROlzCCIQq15nFUEs6WXxZrEsPc/S2JTvmRtrIjcsrIeCJy2VEL+Kucl/kfNDLYxJUTN3yLBSOfo5mwJL2jjM6cqIBp+Hq+nNvOIj5ZbKGZkqCCGtbsNHOhPZ/dWPvVA1Rl6w0sfVTaiLv82kj65FBq0P2/aP4Y4nKzrCdkrjAByYiMqLpmtMlHQMwW9HJj/gFO2rC8/yIffARIjRPHYwv909ipcOTqIjncDveaRxALs+RO6XI0+aBPXhL3poTJIJQ+SIySjMS/gqH0elSOXdkWwT7c2YVFoF0OIlp3JowqGFLIjJms3HJELGhK7NMXMrgUnZrFTqUDCXMCzthBvSOsYkYLXHYJdVNr3jYKVSJVBVjqarsb2Jn36nnSuWRDnv3O6soypH3yvHfgw3EzQ1lSN7mBDmSN9X18dGTke5fndJvCkHXvImRR7/boFJFBU5lffrNSZRMiZhigu82k4A7n5FBJUxSUpMNN1TtYOyWyrn+WoaZ7i/QwQ6MrrF+JNTOcHKhYP0ylFLyWXGRFcJRGyuFygweengJDZufQUAcOkp7jrCVoADk0YbrFUnKrIvthgTPV06V7L41kGeqOm9HWmrDFZuMqWDnyU7ANGN0i+NA9gnj5IPYxJGYwI4aUsSvg67mL1ZuXv3cmH5vzrGRC0llceHyPdW3y+qOAKkcmR6uhGMydzujGAqDk3kba6vCQ8hJuBksgB5gffeNaeSCfG5oQITTdAgB4VZF3dcWlhSCQN9nSmxkyTGJGdzftWPBzcTtD7lfpLAVfXioe+r8xtySxPJSEriTd3zTPDTmci6pXrgVpUTjfjVf75R4ZfKCcqYUKAJyNVjlXuqerW4pXKsFh/6zZ7Oy8Std5A4f9qgBVhzppQgh8aEPB/ZXheAMVnQl0VvR0pUEx03rxuvHnbfzLYCsz4wqaWcLSjkcj56SOiBcMvhW+I6fSpHCDBTdvqWJkuvNA4A33LZShqnSu8FiKJltkNOW+lSAaIpnuJjotOYAM6JefcRCky8zd7UCUsVGnsFBuqkKJ8zPfwkuAyTypGb+HkFRmEhl+ha3alDeoqkiEFyBiZBduM0Zun7BCkXFm3mNeXCqaRh83qRd4ayo7BhGIK6ph0j6Zc8uwuX9ekW8twZVxiTeUozzEGPnlYisPXqlSN0WWXbBkWl0v0CE1GCXofwtfL+Johfw2hMit4Bl1VFpGccxpSqHEASaQvGJFhVDulL3AwrKfiQUzluFv3i/F38lnSYLtoZk55sSoxrGhfFktWWIojGxDAM2zrRbmkcgAOThjIm8iAnWpEWw5xLHtevkZ9Q8Cvvo0nMS/gKyGI0fSD21Msj2HVoCp3pJN5y4nzPYwEKY6Kx309rGBNaPLw0JoCzZJLs8d0YE7cdsio09kqlqLtpy6W1bE0S1e+klpd6wUptJT3Nw8JCdBDuSmOeVH0QpIEfQVeZpmO83DC3275wh7Ok1zAmiYQtraHrBUTMYqeoyqHuwta9pnFluuifVMbEUZWjKRcGIK6zTgdWKPsHC1aptOmoupDhVzIctG2AH+RUTkVcr08z1wK/+UYH0RbCJSjWsW0y1FQO4CwZVoMvtWSaoOsqLMMrleNrsBZEY5K3jw/DsDpq07M/KV3bIFU5gN1SgnpAtRNmfWCSbWBgIkfEIjBRxK/qpDLYY4/sVegU/AAwUM2hBmVM3KjVXz69D0AljRNESGVnTOSFzWmSpPae8TJYAywWaEQEJtRQ0C0wsftDALBPtNVr5lWuq5bJymWtag+MmsqFJTYgClO/UYkZETv5kJ4iGVFCHb4qB7BrMIBg4le9wVqVQZPKhQE7syRKeKvflShuIX6VGZOkns1zcxymHXa+yo5Ryb76/bx0YH4eHPLnFkuma0oXCJHKiUpjUjRtQaAqzK8FtaRy8pIIWgc/51divHo1qRy6Z2qvsozLMZ+vtvg43mXDJ1I5eZkxqbqw+mlMAgQmdN3kY6njYrr62YYRnOWideJV87txgs+a0Qqw82vKvlhGCXk3qKZyXDUm3d7ur267mQ+fexzmdmdwyQrv6NdPjPa7qsB0xVH9nschGIaBhFERXdotzZ15amGwJlI53pO4ahwnGgq69O0Rniole4BEmQC6Zh0ejIkqjJQZNXo9TRKivDQIYyL7mEhjrlw2fTUgbjBN0xaAWLvCPIqdpvi9H2hnqmNMgqQJVEYhGGNiHwuApcdKVwXKhlFhO3KFEtBp6WcAK1joUnxM5BSim/7JrUO1bJi16/CkGDcknCTQZ+t0YIG6C0s7fi/GhAI8N1t6P7+PoJAt0uUx0Crxq18qJyWdrw5jgjGxxiHNqwdU8asoF3aOR8Dyk5qvpPMI3aIqx6kxcW3i52JroMN03rkR7e/KAAcnLcZEKhUOmpL5w9MXY/NLh/EnZx3ddmkcgAOThhqsyTlQYh8EY6I0kSL4WV6rnYUJF5y0ABdUu356IeuzgxElksou0QupRKVRV9FGlzsZE7USw6sqB7A3MiuXTewbqQRr7qkcp6ZAV8XkyZgopaTy+JhSHB0tjUmAqpyidT3kRShXLAemX1VM5kviOvd3prXmX8EYE2dwHsb9Uy6n7cokAy2UKSWVU5b0WKlq+WtHKokpqewZsIIBCobo2qk+JumkoWHzqAJPz5gkEwa6M0lM5Et46WClY+5AV9rxfbx0YOr40X53qYePF2Pi18hPPGMRil/la11vigjwn290kBte6pD2WdjHNIyJmFfHXTQmmlROsWRtRtxactC8risXjiKVIxiTjDtjMhlC+CqO0ZXGv773jMCvbzZmfSqnkQZrsl+CSt+7MR9ewjqv9wWF3w5GbpAWFDqTNZ2luUphWhoTf/HrwYk88qUyDENvrgbIxkVOrQsgVTF5eGR4il+V0r3emlI5CdsiVE9lDk1MqYSBrkyyZnt4VZQMhNuNz5V2k0E+D5B1ApXrLe9+aUesCyApGCBBqhC/qoxJKmkbVzbGxIOpIxbspWqFkcoGAXJPK3fGJGivnFwAjYmfj0n9jIm1YZAD0ih20kJPFYYx8ZkXZI2ODuRD02MTv9p1QZZWT0nZSm0ZZO0G+ZWo6MnaNU5AkCZ+3uJdGWIzpHEFpnJ1Sh25fV4cwYFJIwMTyQWSBJMOgzVlt0PCurFcUSuOdNOYBIWV8/UOTHQTsht0TIVu0lRtyP12l/LETGmc+T1ZjzJCp8GazoTKrQ+LaVo29WkhfrUqVtRr3xvG+VXSA6SkNEM9404WvhqGoW2oF0aImtOkcoLsxuWxEjgwUfVG0iRN10aXcjuoMiZpJZUjLazysNJpTHQmaLSYUenzvG4nhT/Y7a4Dc+tcLMMufq1fY1K/+NXawec8zqcWiAq0EOPctyrHr4kfMSZZp8ZEVOUoPlK6/jWTVRYklTBcrzEFxuM5p/jVrVdOyuf8ZYhUn40xqfonVccFCWTdNC1xxKwPTGhgNrIqR9fqPqfRYABAX2dKTMy6dM60oCBrG4TWDsYZ9JimKbwb1BJJL5DIsKQrF5adX5WUQUkSO+pgD0yqpcIu+hJAv5PSBYCiKke5BvL7UgpjYrOkd6RywjEm8vlEwZgQ5T9XEk7X1rdGH9D5QU77BSkVBpwCRntgUmW2NIzJQYXRk1M55bJpS+UYhpXOsVXleJigEWVPqRw9Y+JsCmcd23u3D1jl8cVyGW5VdkAYH5OIxK/laCtyAO/SfDf4iXq9NBqmaUqpHGsszuuxM9FW/6yqxkSjs6Jgoyvjrt3oztoDY8DShbgVD4RiTITbtKQxcaRyqmJbZkxmDhpZLmw3jKJdqcqY2AeTuvNVYVG/td06rx3MZN7K59fCmIRN5fhW5XRWuy1PFbCnKsoddknjVI6TcJyHyCVL18uNMdH1HrHR3ArtLizM80XbwqcDTUKyKZ7uHMJADT7kZmXhfEycz0AY/YJcLjwQmDGx23LLqRz6m87vhah4VfwKVK5lQVlwdF4mbk38AEuXQIGJTmulawpnHdufMRFprJK+Tw7Br1y4EamcMNVYQVCLy7EfE6TrTE3IFS2tm85gbWSqgOlCScwRNL51TSVpwXfTlwCyxqTyWtM0RQrIP5UThDFxakxoXqRn3C91FEdwYKJQylFC+CV4akyct8CLKlY73IaF1w6GAqGOdCLUINdZf+t2PWllV+LWfp4gV+VYjIlXYOJU6+us/936sMg7MJXZ0DEmtIiZpr2Jl4pSWU4R2QOTKBgTuk40bg5LfWu8XIAJ1CtH18QvLGMSPpVj97RJJgxNyk3SmIj+NVUfE+k5mMwXHUydLmgueuhn6J6+XA2EdVorXVM4QqAmfpLBlhdDQQuQW1WOn0NqUMgLZZTmakCtjIn3NfQyKCO2xDDsZmMDXRmR2qO5BHCmcuRggQStXR6BCQUtclUYjTXXVI4UmPphuuBMC6mMiZ/TbBzBgUkjGZOSB2NScO7kCZRG0an+6504vHYwB6V28mGEbzqmQqVKgXBN/AB9KmeRi+srIKWUfFI5bn1Y5OBUVOVIwtBp5Z5lUwkRbHnpTGxMTMoe8NQVmCgmanO60qLEdtfhSdvfvKAVv/rk+WXITdKCBiZqd2E5iCeojMl0oST8IsjvJ5EwxLWczJccLIKOMfFKt9BCQ2NzrialqWsKJ45d8g62K+dmfXd1TMloicbEJcVcK7xK893gV6ouB5uyKzBgub72ZFO2Mny5Xw6xr4AUwEodi+mYxJh0eyz4lvNr5bVTUkrHLVDQPW9uoLEvB+BqtZafpiWOmPWBSWMN1qxctqoxsRZu52DySuV4+R4EgZfGpJZSYUA/+etoZtVKuhhQYzJVKOGlQ5WF1s1cDZDKCH1SSm4dT+XePRSYpSU2QWVMDMMIZLKW16SIshGmcih9kkomxP/LZcR+SCuMiWmaNj8QP8iTfvCqHCWtV3IGqVmFMSEGMZNM2ISNXZLORD1vnSDaK93SqzRq06U0kwlDXGc1neOVJiLIRoDTGkaPII9/3fwUfRM/09FDpl5kPeYbN/inchLSa+2BCW0QdI0raV7bXd3kyCXlac0x6Vhe/cIsg7VqYFL9numk4RtYBXJ+9WBMjnAqZ+YiqMHavtFp3PHkHkeE7oWCNNmqpY80Aeh2JnM93F+nPXwPgsBL21BLRQ6gp8s9m/gV7YyJW4v43o6U2Jlu21vpWeHWWRjQp5R0PhEWW2G/Btbi6GR5ckW9GZYwWfMITORmXfRddYzJ/+44hAefP6A9xu8OT+LmX//Odo11OhJ1hx/Ox6RaLSVN9kF3zjRmgqSOALvgEpCDeGcASdf9kNInhyCXDKtMT1ITrHqV9Kp6Arcg3SoZtrOaBc33UJGUPFxyoheKPq1EX1PHmoTxmvFCY1M5lXu4b3Qa3974PL698Xn8vwd3CGZDB78UlRyIqZUt45oGfgQao8SYyMFgxhaYVI456SNirfytKn6tpn38OgtXzr8G8atssKZoj4jZCeLUHRfMnG9SIyxfD++I/qM3Po5HdxzC//vA6/Dm5f49ZAB7KkelNL0mAKtfjiaVUydj4pVCOCClcsIgocmZ6iz3VWOhgqQrcDtubzaF0emieEAXeqRydO3QdTn8rAtjItqty7oYTSpHFqL1BOiXIweodK1UAW6+WMb7rn8UxZKJzZ+9wLFz//xtv8Xdz+5HdzaFi05eCMBZlQPYg8pkwvAU7onvqASM8k4uaJpguL8Dz+8fd3XIVKGmciyhuJzKsTMm+8cqO101WOiU3F/V3bYun+9Vpt6rLGhuz4KbDkzH/KhIS4G8FwMqj/+RqQLm99rPJYw7rxd0qZyoApO+6jg+PFnA1zZsFb8/PJnH1Rcs177HrZcRQd44qIv76LS7YJWCyd1V6wGdKzVg3cMJwZi4z7U9EmNims62FTrI47pYKrsGsXKVWadNL2OlciqfWfb9zLiBGZMAjMnLR6bw6I5Dlf8/POX6OhVyWaKDMfGYAOb2uKv+rUZytaZy3EWXh2pM5XhV5ciTZlbJrZY8yjYJA5IdeMIAFvS6L3y63bGuKsePMdH295F9TKRrbzV+89oBOq+Feh/2jU5XNBKlMn6nGWMvvFLp2bHl5RHxOx1jMk8RogbRClnPgN0LRP6bH/72ohPx1+cvw1tOHAr0erUKQsdWqYwJ9S1ZOq/bdizRL6dQdFSV6Mz/vGzjgzIm81xKhq10oPt1k40A/QIBGv8jU865ILrAxDqfqKtyli/owTUXn4g/fu1i/PFrF+N1xw4CsI9jFX7fyx5EKIxJzlkqTKANH3Upl7+jvDnKh2FMquOlbFbGaZD3zOvJoiOdQNkEdlZT1DrIbrm6VE6hZGKqUPLtzRNHzHrGRJ4g3fqW/PypPeL/yVUwCOTJVhWdunUXBryrctys7IPCitZ14tfwrq+Ad1VOVpPKcWhMPHaX8qI71NsR2FGTYFXl+GtMdDs1OZiy2plbx+oL4GWS12gBRHBUvQ9ypcDekWm8erhP/Gyapvg7tWEH9IFJLWZn4jtWXS/pfJMJw/PeyDhlcT9OWdwf6LWAs+SzIH2mel4UvG1z6fQqvEzyZUcaQBY1ApVrKcrUfTQmhuHsk0Nwa+QXpCpHNgjz04x5CWCjt6QvaxnGemAYBj7y5leJnx984QDe+91HxL3UQdfOQj1mMmGgVDYdlTnUHsIrlbNbpHKs4xtGxUQtX7JKzidEzx0P8at03ybyRYcOTYdEwsDxQz3Y8vIotu0bx3Hz9Q0C5Y2TPD66MkmkEgaK5UqvLD8L/DiCGRNpcLqxJrc/KQUmAcy0CPaOstYka5reOxPVDEiGxZjUpzGh85Dh1ubdDypjUiyVQfOFTfzq0l3YTWMC2BdXL+Er4CLC1YiM1Z04QVdRkdYwJvKxgqVySAvgPAdKIZKzLWBRzYTDkwWxYDwnTeiW86t1v+TUQ1CzM7WhnmxS1ig4DNY0qRxVD/Xc/kpQtlzp9Go18is6xM4qYyKPDS/nV6AiKnYLzNSmcAQhrPU0WLOMAP0YE6/AJKr7lEk1LpWjgoLKXYcnbRUsMvxSOYC7gNRb/Fq5ZxTkq8GXmmqeCKDdSFTbQQAVnclU3jJl88Lyocp1eE7aaKigICeTTNjGoWEYtnExGSB9FDdwYOITmOw6NIkndh0RP4/WEJjIjMm0VDmgfj5h0MPAySozrjGVUz2Psun0AVD7kASFGhC4iSflRV7euQZlTLyEr4C+HbpOZOyms6Hz0bnVyuJXm8akupB5jQvLE0Snn3AyJnuOTEPGbqm8ccfBCXHeevFreMZE9fIJU5FTKzLKIqATpMrBfLlsiqBsmcqYSLb0ao+flMLm2RxmPZxfAX2psPU3e1M4ghcbIz5XOJeGYEw0XiZRpXJSokLKrJuR9cO8niwGuzMwTSs1pyLI90prnnVA38CPQEwwBS/qd1R7bZGg1UtjAtht6YMGCTSGt7lcA8C7/Yjs8TQdgKWJGzgwkQa/riTv51v22H4O0heFYOuVI4kddY3lZNCkN54rOhZPtflUWMgPo3pstQ9JUNDERu6nbhoF+VoXy2YgjYm86x/2EL5WzsPJmOh2gG6VSVQ9IwsXaQIck+57p6Yqx4tJs6zDNedAjIkUfMhBClBJ7RDKJvDiKxMwTVMfmEiMSeDSXaVkPkyfnFohFkO1KkfjY5IrlvHykSlMFUrIJBM4ZrDLdixaBGSHVDr3hEGW9Kh+nnXPdeOuT+MWqoPaFI5g7fb9U44lqbuw2/NseVY4x1dkGhMpXRl1KkeHZUMVxmubC1vgl8oB3N1fx0T6xV1jQlCPr5qsBWFMANmWPlgqB7BYP0/GxMOfpL/LyZhwKmcGgXKLgF53ccdTewEAJ1Qj3DCpHFkIJ6cP5E6buoevN5sSk5fKmugEmGFg72xrnYdpmpbGpE4fk1zJCnhsi7zU06NQKmsFjyr6bYGJD2OiLRd2pszcGJOCSCc4GRN50ZMnCiuV4649yhedx1WbKe6WGRMllaP+/Nz+MUzkLVttd41JMAmZLPCV/9tIxsRRlaPRZsieO7SIHTe/27Ho04QspzvUHig6xsQvlTPP4znwq8rJeDEmOt8Ql+c5iMak3rSLnMLQGSNGjeWCLXALTCiQ99Lp2NkNgle5sDqvqYyJyuKR86tfZZuwpc+XAruw0jV48ZUJh4CXQEGrjn3Rp3JmjmQ09OjbtGkTLrvsMixatAiGYeDWW2/1fc8PfvADnHbaaejq6sLw8DA+8IEP4ODBg7Wcb0PgZrK269AkfrPrCBIG8M7XLgYAjIUQv9p8TKQSMXkXr6uaMAzD2pEpVLElmq1t4jAMQ0pPWAvzeM7Kz4ctF05JO0DALsqTv5/NxKgYLJUz0BWCMdE0E9SJjCkoUO+3znhLBCbVCU/N9xJlHERjoqtQIsZkryJ+laEyKNv2jYmFKpNM2Hbb8uRLluZ+UJ0odaZ0UUN1Adbpe6y+TiVX4StgLQzy4m05v1b+K+ufgEqFl07o3qPpSKuDmw4sSBO/tBRAT/swJjT+j2iqcqLqlUP3omxanhiNvPcWW+Cdygl6DWVQdZw+lWOf1xwaE+U5mAioFyFx7KQtleMdJBw10InOdBL5Uhk7Duorc6gMWLcJlb1Mgupa4oTQo29iYgKnnXYavvWtbwV6/QMPPIBVq1bhgx/8IH7729/iJz/5CR599FF86EMfCn2yjYKbLf2d1Wqcs5fOFSWKoRgTaUGSe+UEKcmzdmR2qrhexqTyXmIMrO9LzExnOhlaRKUyJm477lTCEIZR+ZLVbMvL88HGmPhpTBQ3UQDaNu6ujImGhqddFDEmakDYK8qFA4hfNcJOugeq+FUWJlNgsrDawHDbvnGhOehTSoLn1lCV42BMIkoReEHN5+vFrxS8lQXlrQpfK6+zMyZyNZGa3sv7pFq6M5apmVeALjeFk8dbURzffUyL5yUEY6Jr5BfVfZKvBbEEjWRMhL7CJ5UTtORahpf4tV8RM6vfMSWJkgFZYxKMMbFpTHxSOYmEgWU+6Rwx1/swJkHTR3FC6NF38cUX48tf/jL+4A/+INDrH3roIRx77LH467/+ayxduhRvfOMb8Rd/8Rd49NFHQ59so+AWmNxRDUwuPXXYWoDCaEwkIZyuGZyXgHWuy45sOgJxms5g7ECNHiaA0/bbTaNgGIYtj0uvd7OkB8KlctIaxkQn5FTTKARrQtQwJtWFQX34KZcdJDDRMSbTVQbtgHSfpwtlW+M2Er+Ssd9z+8bEDnpAcVodqKFvjerl0wzGRNUIFDVMg40xqdL+xw/pGJPK68iiW75/jqocuscepn491YXG61mQm8IdltI5Xh4pBLmJnx9j4p3KiaYqR34/jeNGakyI9frd4SlRkivDmj+80mFOawDAOn9dKichtU4AnIGJu8bE+1pYGpOSCCaCsBfLhihA0zNHVsDhLn7lqpwacc4552DXrl248847YZom9u3bh5/+9Ke45JJLXN+Ty+UwOjpq+9dIqAZTALDz4CSe/N0IEgbwthULJSOtMFU51mQr6xJGp6y0gBvmuvgkWGmgOhgTTRO7QzV6mACWwNCPMZF/J2tMgpQLJxMGhnr9yoWduyhdlYFbvyCdORb9/2iVIlaFaEHKhfNSaouQlRi0fSMVViybSojrb/M1Ga38/5tPqAQmLx2axCtjlfeowUcyYWCwOvkGLRdWA/OoutZ6fqZaLqxhGug+TeVLooJDx5h0KeJXedypjInO+l4FPetejIncH0jWmXi5yhK0Tfxq0phEE0DKzx9ppRpVlQNU2CZKhekqc4KMP11fLEAKTFxYDnl+U6+bmtK0fEyCakyKIhUWJEigseymtdF1FibIVTncXbgGrFy5Ej/4wQ/w7ne/G5lMBgsXLkR/f79nKmjdunXo7+8X/5YsWdLQc9SJXx/ZXtHAnHnMIOb1ZNGb9a++UCFrFuTofMQlLSCDShUPOFI59VXlABW6GrDTw+Rg6VUi6QZV20HXUSdek4V2QTQmiwYqupJj53b5mn3pUjRaS/qUC2NSdKaWVIdSlTERjcGOTLl2CvZywZ0uloRvyaKBTpGuotSObK52ylH9GOhKwzSBX790GICeFVlSrVpZPMdbk0NQ/WWawpiIRnaK+NXWxK9yrZ/fP47pQhmZVALHzO2Gik4llSP7xSQdbJ5/kz1i5o5Wqn9U6FhNrz48BLkiya/KzjMw0Yiqa0FCSn01I5UDyGyBc1EOojFxq8rxcn4F7CyY08fE/qxTJ+sun8CEApfJXClwKgewmCO/VI7uWDQuDk7kxDzalZ7F4tewePrpp3HVVVfhc5/7HDZv3owNGzZgx44d+MhHPuL6nrVr12JkZET827VrV0PPUZfKoYmATL0olTNVKLmqqFUUpFROKpkQky7tvr1YD+Es6RC/1s+YHDe/MrnLu5VazdUAyQpeNIFzZ0zSUhBYkq6PG5bO68b33ncmrvvT1/qex4KqBoMYhsrn6MqFLUGkfC8LGsZEXZzVBeT4+T0Y7u/AZL6ETdte0Z6XlyV9rlAWYteFfR1Y2FcJJqhK5+BEHvliGYZR+X5kzPS/O9wDk//77tfg3/70DJy8qE97Pioc4tdS4xcnVWioa35Hn0/C41fN79EGp7Q7pfSXrOVRe+UEqQT7xrteg3/709f6OtnqdGBenYvFOUXEmESpBaLzJZagkUEpIAlgtYxJkFSOfc4BKkG8FZjoF2l5fnPXmFTcX2k96PZhIoipGM8VQ7EXpDHZfmBC22lY11mYQONCZlY7Mg1fzpuGhn+TdevWYeXKlfjEJz6BU089FRdddBG+/e1v4/rrr8eePXu078lms+jr67P9ayR0Nu00wInWlQVQtKvwQ1GJ/GmACSGlx8M/V1OOaJreTb+CQrdbORiJxkRJ5WgmW3lXovOu0OH8Vy/QVmOoILZBpjd1VUzyIiCzJgUNs6EGV+p1TyQMXHLKMABLk6RCN9F2aBiT4YEOYSK3t/o7Clrm9WSRSSXEZPbM3kp6UxeYLJ3XjbetGA7UJ0f+jiKVE9FO3AtpJZgtacWv9mutS+MA1iIgnDJtPVCsIBSw969yQ+X6LfT9DnM1RohBqnLkYMmvyo78KqYLZVexdhQOvapfTyM1JoC3AFb3HKrQVeVMFawSerf0i2we6SgXloLlScmV1t/HpMqYyD4mAQKTowY60Z1JolAy8dLBCcffveZ6eu5pfkgmjIaW9zcbDf8mk5OTSCgPaTJZudCqJXqroGNMVAfBTCoh7eCClQyrlQbqDtAzMKG26tKkZzNmqyOVs1zjOnhIdBauhTFRnV+dVSgE+eEPsnsNg95sSuxuKBVidTmWUznW58mTvc6108mYOCeJS0+tBCa/fHqfNp2jTeVIVTk0uQz3d4iSaHJ/JeErpRfo3tGjE1Tg6gV1/Oea4PxqUfH2EnO7+NX++W7BaadCYcv3zKExiVA/o0/l+AcL9NmThZK4j24bjZ5MSohs1cqcKFNudK/HA8xNUcBKY2gYkyDBnTJ+AOvcE4Y7Y2FnTFxSOUXTYo6SCd/rS3PORL4UqIkfwTAMHC8CNOd1sBgTjfi1GrCKz0snA29E4oDQo298fBxPPPEEnnjiCQDA9u3b8cQTT2Dnzp0AKmmYVatWiddfdtlluPnmm3HdddfhxRdfxAMPPIC//uu/xute9zosWrQomm9RJ2hnLwcmNMjlsjPh8hmwMkel8FXGxGvAC5pY6l4qByb1lAvTzvP5fWMiOLQa+NWgManOnGVTZUw8xK/FYBqTMDAMA8NVTQpRnDqDtURC9nKRGBPNoqUuYLp87+lLBnDUQCcm8iVs3OpM5+iO2yE+vyS6nQ73d4oAhFiUPVLQAlj0LyGSwETJrYsdayNTOUoFhMUuujMm5BiqQl2E5OucVMZmkMAhKHQma4UgGhNN9Zjb85xIGJL7qz0w8UqZhgWd05RPaikq0Bz08hFnZU6QVI6wpJcYkzFJrOq2SA96iF9l3ROJWLt87Ogrr6mKX6VUTtDS3eUeLrhBNCaEmVSRA9QQmDz22GM4/fTTcfrppwMAPvaxj+H000/H5z73OQDAnj17RJACAFdeeSWuvfZa/Ou//itWrFiBP/7jP8YJJ5yAm2++OaKvUD/UXiGAlMqxBSb+FRgyVCEcLY5C/Orx8JNqXaaJqYrEMOqbWI+d14100sBEvoSXqztyYUdfQyonqeTxvXLfJIjNl8pa+r5e0AIuAhNRmm0/F51QVkeN+2lMgEpAdMkpFepfl87RlgtLGpM9QvzaIc6fWBQrMKkEXCprEEVgolrSN6NXjtoJWsdWOVM5LoyJMinbUzn2sRlkNx4UgtWUNg8FTYClQg3E/Z5nN51JlNVT6jEaWZUDVMqt5/dWrp+qMwmSylE9RwBpM+kifAXsbr6OcmHRZbss0vXdAZgPes1kroTJQvCqHMCbOfKqylHNE2daYBJaxnveeed5pmDWr1/v+N1HP/pRfPSjHw37UU2Dzvl1VDgIWoOcgpSxwKmc6gNWfYiyqsbE4+GnyH6yanPcmUlaCv5UfbRdOpnA0nnd2LZvHM/tG8fiOV0iAJpXB2NClQ9ejImsMdG1uq8XIjCpBlyWmZ39we1IJzE2XbS1ByhqAgj1O7hNAJeeugjf/dV23P3MPnG/CPQ9dQLcXLGEvSOVCWhhX6cYY3tGpqsVOfZUDjVBo/sVJWOSL5Vtna8b62NiVaYA+moW1RRviUuVjLqjlO+frvN15TX1jzldSb+VvvXSRyjBrs/z7BaYRCl+VYPQZugVli/owStjOWzbN4bXLBkQvw9jsCaLX70a+BEGpflNnRNk5pBYHL8GfoDFqkzki8KtNWjpLjGgesakciwdY9KRTiCTTIgxMJPM1QDulQNArzFRxa9AMJdPGSqtS4uRVZXjfvl7silxXqT69+o2GRay+KzSJ6fyGbUxJkSr2hkTr6qcgsSYRKUxASxmYc+oeypH/nlaMpnT7abV7+DGcp22uB9HDXRiMl/Cxq37bX/LaxZDou5Hp4siFbBooAML+rPivA9PFizGZMAq/T1eSmn0d0UXmAD2gNGr30u9UFvMq0E8YN8pHj+kr8gBnItAVseYKO0SvFItQaEK1E3TDFRppv7Nj52QPStkBEl5BEWzGRPAEuHL5bKmaQYSKAvGrSzP2ZXr4+U7MteLMUlabO5ECK2IKBfOl0LbwxNjsv3AhMPgUzTx0xzLMAybT9FM8jABODAB4JLK0WhMgphpyVB3Z0L8OuWvfK/0y7HvyKLs/Llcch0cyxXFhF2L+NXRK8dDoyBXgAiNSSNSOUcU8auLiNWWyik6A4igjIlhGEIEq6Zz9OJXezDckU6gvzONbCopKgd2H5my0jyS661cnTIQofgVqDwDzWBMiDUwzcq4ofGXdBG/elVlOVI5MmOStLN5QSvBgsASv1aCejmt4GUaqAYBfnox11RORL1yAE2w1GCNCSCzBVYao1Q2hSDYs7uwxkzRy/WVIM9vavAlszCkMQnEmEjlwpMh7eGH+zvQk02hWDaxQ6nMEa7ALs+h3KQzSAAVJ3BgAmuAyk6gY5p8ZVj3V2sHXnno1Z4efsp3VfUfJWNi+QiMieN3Z5I1lSE7euVQ6kLLmFi7EhG4RZnKcYhf9Z4cVipFSuVoaPgg4lfCpdWy4buf2S92O4CLj4ky8S/q7xR0vhDAHpmyPE5sgYm1SEdZlQNUFjvBeDXBkh6gCi1nUJiWGiYe7yJ8BZzXUid+bUhVTjUtMDpdrAba1ljyZEwStTEmTdWYNLgqB9AbjNmCuwACYtmHyM/1FQD6OtLi+rulrwolS2MSZMEnjcnhibwIqoJqPgzDEGNbTedM+VjNy89+PfYR7QgOTCA5v+rEr3IqhxiTgIGJZbPtJn71vvyUDyWqeNqn4VcYLJNEVwfqcH0FgKSh9zHRil8lgzUqSohSY7JIFb9qugsD1jWUg1GrwVs48Svh1MX9WDynE1OFEu551krnkC+I3ZLefhy5QSEFJlt2j6JQMoW5GmGZ1C8mqO28F5IJQ5SkyoxJQ31MbOmjslb8CljPiBdjkkgYtoDRXi5c+f+SUsruFTgEhdwU7vBk3raoBmniR6iFMTFNs6Eak6YEJtVxvHtkWuj2ClJwF9aS3s/1Faj2y6myJuqcIKcXg9rRA5bGRD6XMAzGcg1zBFgeS27jY0Dq+8OpnBkIVWNSLpsuVTnhyoWt3ZmdMbFK8rwv/4Kqav3lw5SWiI4xOXZuFzLJBKYKJTz5uxEAtbm+AvKulKo6nAsxQZTpSgFBlBoTYhZGpgqYyBVdOzlrGROd+DUEY2IYBs4/cQgA8JvfHRG/11X7qJMNOb4Clg3/4zsr7q5DvVnbOZ20qA/dmSSOGuiMbKckPwNNSeVI36doM9uzf+bRg13IpBI4zceFVZ6YPRmTCHVNiYQhgsgnfzdi2717pXIMw1DKor3PhYJP2T9JDoKiuE9qINVo51egoo+iOWfXococVyjKgYm/Tqeg2Ux6iV8B4MSFlYBoidKyQW7NELSBH+AMXjKpRKjN1tJ5lcBk16FJ2++nQzAmMy0wmVmJqRqhBibjeSvwkAc5sSdBDdbEDjxhZ0wIXt2FAYu+fq7a5ClKxiSVTOC4+d14du8YHnqh0heoFn0J4OX86q4xkVMdUexeCb0dafRmUxjLFW05W6f41akxUQNJ9f8B/3tGJZBHJq1KDd3ONlF1aqS/LZIYEwquHt95pPqzfQLt70zj51e9KVIL6nQygelCJY0TpT+GG4ilKZuq2Z79et/4odfjyGQeQxJjpEOHK2PSuKocAHjrSQtwwwM7cOdTe3BqNXhKGJX764VU0hBBkt/z3C31YiHIC3IU98mZymnOQjfQlcahibxggwrSOPCqVBI+JjaNib/4FQC+dcUZ2D86jePm29ODdo1JtVw4AGNSqaqyTA/DVsjM6dKn6oJ2ngZmXrkwMybQBCbVVI3afK8ndCrHTk+ru1s/xkStcRd6iYgU85TOebTasLAWO3rAEq+qvXK8UjmTUkAQZSoHsNIiLx20diBuKRmZMVEDSaCyu5Unfr9JR0e7u10P+f7LGhLahdMOUBa+Eo6e2+XbbTkM5JL5ZmhMAHvJsFu1zGB3xrGA6CDvGLVVOarDbERB1+9XBc93Pb1P0P9BUisyo+L3PFvOota8IwcmUVrSA/X7JIXBgPK8eM0dMkTaRTZYC1AuDFR0JscPOVODGU0qJwgTkUgY6JLmhbDsxYBLYCKqclzmHDmNy+XCMxBqVc64i4NgaIM1pSlc2MCEVOsvHhhHoVSOpE+ODHIdJIv8wRo8TIBwjAkZrNkYk4gDE2IYZMbErexXy5i4lBEC/tde59JZcEltyezLon5nKsf6PtEFIG6QhX950sQ02MtC9DsplaVnpbaxYE/lWMdQXYnFwhfRmDt9yRwM93dgPFcUuqIggYlciebHTpBeQe7fQnOVYUQT2Mslx9lUomn25hTIjyqBiR+LKoLaolNjEkQXooMtlRPyWDKzEpa9cHP29WriB8zsVA4HJrC7cAISJahE3mEDk4JCT6uBiN+OVG3yNB1QmxIUyxRB4bxaGRMSGKqW9JrJhR7+6QYyJsQwvHSgwpjoJlpiTKZlgzWNlwZgv09+OxMSpI1MWWPEuh7BGJOFStpikZLKaQTSGsakkeJX+TNt4tcax4K8GMj3K6Hon9xEtrVCbuJ46xMvBz62zMr5aUx6JMtzglyRE0UQYe9R1LxFTmUYRRDvx5homvgFZUzcIKdywviYAPbAJGyQ4FZ1lfMwWAPsVgGdXC4885BVGBMxwLN2dXdP9eeg5cL+jIn3AFabPAnn16gYE6XvSq3iV7VRWsEjFUATDu3+/HLJtYAW+e1VxkQXyImqnKK8C9XT/PL38FtE1B1g5bj6hV4+lhx8LOjrgHxJ5IqdRkH2l8kXnV16GwHZi0KtYAsLefL2cn4t1Pk5OpB/zZaXR6ufGSCVE4KFk51FCQWXYLdWeLkdNxLOwCRYUCwYk5CW9F6QUzlhfEwAezASNq2iC0xKZavqihmTWQqHxkRTKgyEd35VNSZu7qNekJs8Rc2YUMUDoeZyYZr8q9/XqzttWglMomZLAGuR33GgGphoHuysjjFxEUba/Efq0pjoTay6Mkn0SWZJmVTC1p59uAmpHJnGjtIfw/szLf2Hm/g1KOSdrb1Xjl0kKdJ1EY47auJICJKOSiXtqRMvyL1YCG5jqlaoqZxmQX1eRBDv42ar8zGJKpUj+5gE6ZWjvi4se0HXIF8sizleZpTdAh3Z9Zk1JjMQbuLXXmWAW86vwapyCoqY0uGlEUDESjqT5xrAmKSSCRw3r1v8XG9VTlH1MfHolUMPXtT6EsBiGPaPVfxZdBNth4Yxcdut2RmTYIHJeK4ojieO6yLAXdjf4WCNFtnEsI1P5ejKhRu9QNmCoXJ9olRbKseLMSlHz5jITRwrxw6byvGrynEyJm6OxrXC5krcxMBE1VdYgaNfKqcacEreIW4p+KCwxqPVKydId2HAzqx0hZyfe7IpsUGjtgNTUmDidj+4KmeGQzVYc8tV0s/ThbJNFe8Gy0m0WpXjUrLqBbmnTZTOrwTZuKrmqhyliZ9XuSlNpPTgRblAEFSGQTd56xgTVRMk3i9X5fhMAH3SmBGCPhcxKS1IOg0JBSMJo+Jj0mhkhN7D9EzFRQl511tvGW+Xi8bEzfk1yhJ1AEJnAvgvqoB9jAVlTKYL1nUKmvIIilZpTEiTdSR0KsfuY2KalveUuqEMCnk8inLhgOxHVx3iV8MwHMyRzI67lZ5zKmeGQ2VMxlxSOTJFGKRkWO2YqjImQSZ+uckTPXhRThyyzqR+g7UAVTnVh1/WmEQNlWHQXa8OXVVOWc9s2BgTn3uWSibEOPHLm9OCpKu6od8N9XY0JHhTYVWmlZri/ApYC3ihZNZtfGZzftX2ylF8TCI09QOA10jpnECMSYj0oLxrpzL7oCLRoJBTJ81o4EeoPZVjT9FN5kvCSbp2jYmUygmpMemWNSY1BAlugYnXsTgwmeGwApPKYBgXPRfsAzyVTIgJUK7M+c2uIzjvH+/Fhi17ba+3UjlujIn/5V8kNXl6dm9FXBclY0KMTG82VXPAk3QRGHoxJvTgNUJj0p1N2ZgLbSqHKrEkHxNiNtRFKwxjArhPtm6NBHU+JWS41gzhK2APzqNOE7iBFp9C2WICah0Pnb7Or1WmIeKqHILcxDG8+NX79ZlkQswhpDOJmjGRx3ijy8RlqGLxoL2MSCNE14Hm42TCqHl+lFM5dJ2DGKwBdo1T2FQO4ExpCWsIjzm5I50Ucxv3ypmBoJtLEzLlKnVlZzr311/8di92HJzEL35rD0zUpnBu/Vq8IDd52rp3LPD7guLMY+agryOFs5YO1nyMlJLv9WZMqqmcBjImgJ010QUmtBMak22+XdqtezXf08ERmGi6FgOVXXbCAM4+bq7jGK9bOhephIFzNH9rBDLSpBy1sNINKUmYqqY9w8ItlePu/Br91PeuM5egN5vCmcfO8X2tPZXjX51HCyTt5IMyC0FhS+U0cZFzrcrxCe4Win5SIyiXTZvra61VfvTcT+aK4voGLRfuqaNcGHBeh6kAjAkAnHXsIAa7Mzhmbrfn6+KGmVX8XCPcqnJ0gUlvNoVXxnK2VA51gJXTAoDTLKgWxgSopFue2HVE0LdRMiZze7J45FMX1LU7VhmTnEcqwKrKqVy/RqUphgc6sLXarVM30VJn2EMTlnW8226Nrk3GI98rw22yVXeif/HmV+FPX3+Mdlf2miUD+M3nL2waRavzMWm0CFKmzut1ZHVr4pdUgmZd24GocPxQDx777AWBNg5hfEyASqqA+j8BVrlwVAFWug2qckxTCop9Aq5zXjUXvR0p7BvNYfPOwyLQq9XDBLDGo1xRF/T5k9NtHXUEJtTKImgF5v/7wOuQL5ZZ/DoTIey4Nc6vKnQma7tHKg2o1MCkqOSBa6nKAZydVaOm7TozybpSKmF8TGhBmGpgVQ5gF8DqqGkS+h4ctwITN6aAJv+gJXlON0t3etqLKu6uY/cXFrYgQXhkNHaykwWMJRdzu6CQSzS9q3Kia+KnQ1A2014u7P+eLmGyZteYNCKV08zAhOzYS+WKqVlQfVM2lcSFJ1Uqoe54ck/dpcKAFZyTEDeTSgS+vt11pnIGlDljyqeBHyGZMGZcUAJwYALAmoDpoRj1MOrp0XiZ7KkyJlNqYKKkBtSdUdBcrurQ2syJIwjIXrtMqZwABms0sTZCYwIoqRxNAEil0Ycn85I2Rj/ZW3ncYNfd2v2ogr72um8yyMfC5vwaUZrADbKAsVjneHBL5SRcUznNCfjcYBe/BmNMAItpjLrRor1cuHkLXUc6Ke7Xkcl8KBH0padWApM7n9ojWI56GBOhMamuA2GCHLvza/hzcIhfi/4ak5mM9p0pmwinj4l7l0r6HVXumKYpAhO59LRCS9ofMnWQBc3lqg6tzcwBB4GDMfHotaIGKw1L5UiMiS6Qm1MNTMqmRZ+6MSZ0zkEZE7kpl42ebvFi6AVRMl8sRV7x4f6ZFmNSr7+ImyW9k82LtolfrZCZoSDPc3fWztR66bhqgczgNNP5FbAvyhbb6v+svPH4+ejtSGH/WA73PvsKgNorcgAnexsmjRp5VU5AxmSmggMTWA9isWyiVDa9NSbVgU8ak0MTeTFJyKmckmT8QwuSunMPynws7Ouw1ea3HWOi+Jh4MSYqG9Ec8avz4U4nE2IyIJ2JsEVXdmvChyZgYCIr7EtlU7RDb2a1Q1jQvZqQ3EWbZklfNuvWfrhZ0qv6J8FiNmjcBUXSJn71v85qI798xMFuqwzWAPuiHKZUPZNK4KKTq+mcp3YDqC+Vo473oB4mgOJjUsPG0cmYlGo+1kxA+86UTYT8IOaLZc9mUKr7K7ElgOqJYQUmtDuTGZOEEXxyNAxDOMAC7Vca5laV4yV+JTQslTPgzZgAks6kGpiQ/sDNxyTodbfvAK1x0OydaBjQfZEbxTXax8RmsFan9qPLx/nVKX5tMWMSwscEkNxfc/ZUTkM0Jk30MQHsmqyw2hkq0Sa2ulbXV91nBnV9BeyMSS2CdbVcmDQmzb4X7YLZ+a0VyAvGVKEkdiVe4lcKXuTARNaYyM6wortwWt6VJEMJG5cNWTqT9mVMAmhMFIq2UekNv1QOYOlMSABrlSrqxa9hNSbyDlA+TjtCMCaS7XnjUzmW4DZa51frGG6uxFH7mIRFmF45gJXKoblppmhMAPvzEnYcrHzVPJvRWK2ur7rPrFVjUk8q54haLtxmm9BmoX1nyiai0uG28v9y+agu+hZVOSIwmRJ/szeEk1M5TsYkbCTc3oyJ1YzNNE0r/91CxqQrkxIPu3tgQiXDOVvKxa27cNiqnJGpggjSgNanD7xA92U8Z1VLBSmNrgdWVU794ld7VY51n1JqE78GV+UERZheOYC1I7cYk2iZn7CBUpQY0GhMgn6vTCqBC09aIH6OQvxKCKcxqU/8Srq0UdVgrc3m+maBAxNUUiW0iFJgkkkltDsHcoMl8asrY1LdoRmGNdkmEtbnhN3pyCXD7TZYZcZETmEFCUwauUAQa+ImLhyspnIOjOdtDJejC7BgTMKLX+WdbbNKf2sBLUYk/G5G2iklMSb0vNS60Lr7mOhdiVstRA7TKwewFjthsCYaZUbUXbiFGpM+qYotX0PARekcoM5yYeUzw2lMJPFrnRoT0zQtS/o2m+ubBQ5MqqDJ7OB4pSOtGyVolQtXNSZHLMYkXyyLklmxo1EWXnrowzImcmDSrqmcYtm0pS6CiF8bxZgAwKJq7xK3IHBeNZVzaEINTFTxaz0ak/ZYCP2gil+bEZjQfZHLhWtNsXTZLOmtY6i9ctqlKkf+nmE0Jo2ypG8X8Wstzrwrj7fSOT11VOWoz2hQO3rAHsTUk8oplExMFUoNadgaJ8zOb60BPYwkhHQTUakGa7slxgSw1NRFl1w27d7D5nEX9GVx4UkLsPL4uUIb0S5ISXl8u6bCuciok14jF+y3n7YISwY78Ybj9bbu1LTw4EROm3ojnLt8PpYMdtooYy/QJDOZt/RK7exhAsipnKLt50aCxk0hAvFrRzqJi1csxLnL5tmaUTq7C9dn5BYVwotf9YxJI8qFW6kxqSWQTycTuPqCZTh+qAfnLptX83nUI37tSCfwtpMr429eDV3auzJJ8TyMTBUE+95u7HizwJb0VdDujYSQbpQgMSmkMdmrBiaFMroy0s5MmQAFYxJyQjEMA99ZdWao9zQL8uRPE0vC0O9Km8mYXH76Ubj89KNc/z63p6IxOTie16beCK9ZMoBf/e3vBf5c2UvhQJWBa2fhK2CNfwpMmlHaTMGa3EixHh3OdX/6WsfvHL1yym3CmIRM5dCOfCLXBPFri6pyRqYK4tkJ+7y8f+VSvH/l0rrOo55UjmEY+Lc/c46/MO/v70zj4EQeRyYLEmMyOwOT9p4tmwhiMg5OVFM5LoyJ7PxaLpuOwIQi3aJLzpyouXZLx9QD2uWWyqZvZ1p1J9RKEaKoypnIu6beakEyYYjxQ4FJO3uYAFaQIAKTJoxPYi2oNBKIvlrGTWPS+qqcyvVNJoxAizClqqwmfjPDkh5Qy4Ub12TRD8mEATkublafKoIcoE1Vxa+sMZnlEIwJpXKy+lyl7Px6aDKPfKkMw7AGMU2ybjlzioDb2dMiLJJSHt9vYlFTGq1cIIgxOTSRj9yqnCaZA2OWmLqdQeOfFvBmBFK0OE9KovGoFyRnKie6ALQeCAuBgONClAs3SmMiiWibPVZlsXirWwbI17MeIW0t6JeuAzMmDADWw3iomspxY0yIaswXy3jp4CQAYH5PVryeBpTYmbmJX2dQDwSZLvfrTKsueI1M5fhhUOqXQ+V5UVH8NNm+IlI57S1+Ve9XUxiT6rW2MSYRjwdZ/wQ4+1e1Clb/rGDzgGpJH7WoWp6nWqsxaa0Bnvy5Xc0OTDqdgUlnZnYu0bPzW2sgqnL8UjnSYH1u3xgAYHigU0ww0yKVo7fY7hDi15lz6XVVOa6MSZMs6YNgTjV4ME1ZCxItY/LKWDw0Js5KpMbfF/qMXNEKTKIOVJOKK3GhTuv7qGB5GwVkTNya+EU0j7RDKmdkqiDGQusCE2tcdLcolTPKjAkHJgTVx8SNxksmDJG22bZvHAAw3NchcoG087Zy2W7lwjNnwMnBBX1/twlTzeMmW0ipp5IJEZyQViiqCVGkcuIifm0BY0LjhiqXKkaHjWJM7FU5rTdYIzfoYPNAl6jKqfbKKUbLLMipnGYzJuRjUmmoWbFhaIdUTi1GafVA7ko+26tyQo/qTZs24bLLLsOiRYtgGAZuvfVW3/fkcjl8+tOfxjHHHINsNotjjz0W119/fS3n2zCIVI5PuTBgsSnP7SfGpENMMEL86lKVM5MZE8Da0XlpFORFr9U7V0rn7B2tBCZRUfwqY9L24lflezcjkCK9EaVyGpFeUTUmhXJ9filRIRlSY9JTXSTzxXLFkK6RPiZNrsrpSCfFdWh1IN9SjYktlTO7xa+hr/zExAROO+00fOADH8Af/uEfBnrPu971Luzbtw/f+973cPzxx2PPnj0ol8v+b2wi6MEg41Kv9tk92RT2IYfnqozJov5OPJMeBSCncvQTB31Ou4shw0DefdL395pY0smEePBaqTEBKgLYF16ZwL5qYBKVKLJPMCaVQDcqh85GQR2PzQic6VpTMN8IQaobY9JqBos+PyhjIpt2TeZKkfuYyNe+FUH0QFca+0Zz1vPSBqmcMD4mUcAWmORnN2MSOjC5+OKLcfHFFwd+/YYNG3DffffhxRdfxODgIADg2GOPDfuxDYf6gHs1gyJ3QdplL+zvEANoSohfvatyZhJjIq8nRMt7TZi67q+tApUMR53KGegkV9l4pHJaIn5N2cuFG8mYlMomSmVTbDxaPe6E+DXgdc6kEsgkE8iXypjIFyUfk2i+hy2V0wK30f7OSmBiGfy1PpUTxsckCtjLhdmSvqH42c9+hjPPPBNf+9rXcNRRR2H58uX4+Mc/jqmpKdf35HI5jI6O2v41GupE7EXj9SlpnkUDlsYkp6Ry1F0g7XxmUiQsMyYiMPFhTMR7W7xgz626NO5rUCqHFsL2T+Wo4tdmaEwqn0FuyY3QG8lj0+ZK3OKNAY2HMPbltIOflAKTRqRyOlpQMSh3CAZad39sgUmLGJNDE3mRepytlvQNDwlffPFF3H///ejo6MAtt9yCAwcO4K/+6q9w8OBB3HDDDdr3rFu3Dl/84hcbfWo2qDtGry6VatCysL/TwZiIVI5C4f/RGYux48AE3vGaRXWfc7tA3nxSKsdrxy1fk1bvXAerHYaJ/Ypa/Epo9ULoB4f4tRkak6Rd/NqIXXJSOua07JfSYvHrm0+Yj/NPHMIVrz868Hu6MykcmSxgIleK3GAtnUxgzVuOx0S+iDktaHnheF5adH9sqZwWMSY0FwEzawMbBg2/8uVyGYZh4Ac/+AH6+/sBANdeey3e+c534tvf/jY6Ozsd71m7di0+9rGPiZ9HR0exZMmShp6nOhF7iV/lwCRhAAt6s1K5MFXl6Ht/rDiqHze8/3WRnHO7wDAMpBIGimVT0PKegYl0rVutMaG+FlZZb7SMCaHdGRP1/JrpY0JMRiNSObaKMaksudXi16HeDnzvyrNCvYd28BO56BkTAPj4RSdEdqyw6FMDkxancjrSiabPTWSwRgJgw5hZKf8waHhgMjw8jKOOOkoEJQDw6le/GqZp4ne/+x2WLVvmeE82m0U2m230qdng1Ji4i19lYexQbwdSyYSg3KyqnHh0lY0KyWpgMlnw3/22k8aEqnIs7UGDGJM2Hwcqo9OUVI6aPmrALjlhyIxJND15WgXawU/kS5KPSfy+hw7twjDSuG+2vgSwroFZnYs6UsnIy+fjgobf/ZUrV2L37t0YHx8Xv9u2bRsSiQQWL17c6I8PDEdg4sWYSH9b2N8BAJKPSVX8Wme31LiBJnqLMXGnIOVr3XKNSbc9AI5qQnQGJu09DlTGpDlVOfZJtxE7VLvHTuP8UpqBbllj4mNkGDeQWJzQslROddx3N7lUGHBegzD6o5mG0Hd/fHwcTzzxBJ544gkAwPbt2/HEE09g586dACppmFWrVonXv/e978XcuXPx/ve/H08//TQ2bdqET3ziE/jABz6gTeO0CqqpkNfAlCt2Fg1UAhOH82ubNAtrFpJqYBJU/NrqqpwedUKMKJXTFe/ApJk+JoRGBKmJhAGKQYjNjOszSbv48VxRtH5o93EVFP2d9vm2VeX19Pw3u4EfUEkfZWwi5Jlxb2tB6G/+2GOP4fTTT8fpp58OAPjYxz6G008/HZ/73OcAAHv27BFBCgD09PTgrrvuwpEjR3DmmWfiiiuuwGWXXYZ//ud/jugrRAN5F59NJTxz7DJjMtxfCa5U59dii3s+NBu0qEwJ8av7xCKnNVqtMZmrCP2iWrR6synIm/J2961JJAxbkNhM51dCo9Jd9DnTDfRLaQbkRn5R+5i0Gu0SyItUTgsYE8MwbFqbjlnMmIS++ueddx5MSoJpsH79esfvTjzxRNx1111hP6qpkCNVL3O1yt/lwKTKmCjdhQvULCyGuexaQAFG3MqFB7oyMAwrrxvVhJhIGOjrSGNkqmKx3e7iV6CyyBUDiJejQrP6JiUTBgolE7lCvFlM2sVXfEya1wW6GWgXsTixeK1gTIAKc0Ti11aUbbcLZsaojgDyROylLwHsVTnEmBDt5rCknyEThx+ExqRABknxMFhLJgwMdlmsSZQ7NXmyjQNzJp9jU1I5jsCkMZ8p/FJEKqf974UONO9MSuLXOIyrIFADk1YFj5TKabYdPUG+DqwxYdgCE79BaWNMqhoTGkRC/DrLqnKo+iFu5cKAVZkDRHu/bIFJDKon5HvWlFSOcq0btRjRGKNy4ah0RM0GVeWMTRddu5fHFf2q+LXFqZxme5gQBqRN0mw1VwM4MBHIhgpMrAVHpHJSamAyy6pyFLMsb4M1eWfe+olVFsBGuZsekPLmcaDc5XOMyuo86OcBjWMyiJWzUjntfy90oKqckam8+F27G/cFRbtUsdEGotmurwQbYzJLzdUADkwEsiFSObTgZFIJzO+plJtajAmJX2cXY5JUBYYBUzmNsCEPC7lkOMrddF/MUjmtZkwaxWSoYzO+GpPKvHRksiB+F4eANwjaRWNCoteBTm+dYaMgX4egDR5nIlrDV7UhbKkcn8BkqLcDn/39kzC3OyN2Xw6DtTZpr94spBTxq5cPRqaNLOkBO2MymzUmdsak8ZOiyiY26lkRVTnk/dEGwXAtoF28HJjEYVwFQSaVQGc62fKS7j89+xiUyyb+5HXBWwVEiT5mTABwYCIgT8RenYUJH3zjUtvPqo8JaUxmSyonqbawD1iV024akyhpfntg0vrv6QdZB9OM83Wkchr0rFC/nLgzJt2CMamkcpIJoy2en6jQ35m2ApMWfa8lg1349KUnteSzAfucwRoThlKVE57GczTxK80scZofVK+IoOLXdrg+c3ukVE6DxK9x8JuwMSYzSfxq0NiMt8aEugsfrjIm7fDsRAl6XjLJRCydeaMAa0wqiOcT2gCESeXoQIOIBHbCxySmk2BY0M4tiL9Cuu00Jo1J5ch56jhoAdK2VE4LApNGMSYOg7V4LnrEmARhJeMIMlmLK6MVBQY4MAHAgYmAPBHXUsNOjEm+VEapbFo+JjGdBMPC4eLppTFJtpnGxJbKaVC5cAwWkWaLX1WtR+OcXyufkyvGPJWjzEtxCHbDgJ6XODwrjYLsgDubxa+zdwQoCGOwpoMc3U4XSiiWZ5YBkh/UXHdceuUAdvFrlJO93cek/cdBtsmBSULRSDRKL2ExJvF+JtUS1rh+DzdwYMKpHMLsHQEKwpQL+71/qlCyfExiujsLC/V7evXKsXcXbv31kcuFowyU+uImfm2y8ytgv96N+syUKn5tg2C4FqimX3Ew7QsDS2Mys75XGNjFrxyYzHrYDdbCi18TCUMcY7pQsnxM2kBD0QyoWhGvctN205j0d6bFrrpRVTlxoN2bncoBmtOeIIzHTjujOzM7GJPZosvTwW5JP3uvw+z95grqTeUA9pLh2eZjon7NoM6v7bB7TSQMzKlaQUeayumKmcakyeJXwP58NNr5Ne6pnFQyYU+3xfR7uMFK5bR+TmgVOtJJMXdyEz9GqF45bqCc4FS+POOabPlBZT68Jheb+LVNJiESwEZ5Pr3ZlNitx2EcpFvAmDRDb6T2ymmXMVcLZAFsHErQw4ActePwrDQSVJnTwU38GNlUEt2ZJDLJBOZ0Z/zfoIGwpS+WZq2PCSGoj0k7MCYAsGSw0iV6Tldt914HwzCwsK/SS2lOd2ssrsOgFYyJbSw0uCpH+Ji0QfqwVnRJi9VMW8CpU3uUz2AcMTxQuQ7zJX+l2QZ2fq0imTDwvSvPQqFUrpkxIZp1Kl9CoTy7mvglVfFrjHxMAODzl52Mt60YxrnL5kV63Ov+9AzsGZkWk247Qw4mm1VFJAcjjVpoiTHJCY1JewTDtUCem+L8PXQ469g5uPZdp+G0JQOtPpWW4uvvPBVP7xnFyYv6Wn0qLQMHJhJef9zcut5vNfKzxK9xpo3DIP6MSReWDHZFftxTFw/g1MWRH7YhaDljwk38fDGTGRPDMPCHZ8TkYWkgli3oxbIFva0+jZZiZo3sFoPESlMFOZUzOy6xw8ckaBO/GC8SMw02xqRJ90UORholfrU0JvFP5dg0JrNkbmHMPvDIjhDEmOQKZcuSvk0YgUbD4fzqMWnKpcQzqQlZ3EH3rJm9SprBmKh9nOKcApnJjAmDQeCRHSGoG6TMmMyWmnyHj4lnKke2pJ8d1ycOoHvWzGqPdBMqtJypnPiOOZkxiYObMINRC3hkRwibj4koF47v7iwMHBoTL/Frmzm/MiqgMu5mjlk5SGiUGSGNzaoePbZN/ACrkR8we+YWxuwDByYRggKTqUIJ+dIsq8oJEZg0w+2TER4zlzGxf584MyZdUr+cLDMmjBkKHtkRolMwJmWpid/sWHhtIsaEgYRHwGEvF54d1ycOaE1g0vixoAa/cWbpemyMCU/fjJkJHtkRgjQm07NSYxLcj0IO1mZLL6E4gO5bMxc8mVFsmI+JEojEecx1ZTkwYcx88MiOEJ2SxoQs6WdLqkIOTPx23DShGgY8mRVGc5GRqnKa9pkpO9PWCMwkxqSbq3IYswBssBYhZI0JNfGbLZNHKkRgMtidQWc6icEarf8ZjcHiOV3V/zbPpbYpjIkjMInvM9ll8zGJb4DFYHiBA5MIQYHJZL6E0mzrLiwtMH477u5sCnf89RuF7wujPXDSoj7895o34ugGOOC6QX4+mqUxiXNVTk+WGRPGzAcHJhGCUjnj00Xxuzjns8NAXmCCiCePm9/TyNNh1IhTFvc39fNsFVoNCuLVdGGsGZMM+5gwZj54ZEcIYkzGc1ZgMnsYE1n8Oju+M6N+NKOJn9OVOL7jU/YxYUt6xkwFj+wI0ZmpXM6x6YL43WwJTMJoTBgMgqwxaVwTP8XHJMYspuxjwowJY6aCR3aEoCZ+MmMyW1I5tqoc3skxAsLeOLA5jEmcNws9LH5lzALwChIhOqpiztGqxiQxi8phUyF8TBgMgjxuGiV+DeNK3O7gJn6M2YDQI3vTpk247LLLsGjRIhiGgVtvvTXwex944AGkUim85jWvCfuxsQAxJnlqrz6LJg5bVQ5TzIyAsPXKaRADMJMYky52fmXMAoQe2RMTEzjttNPwrW99K9T7jhw5glWrVuH8888P+5GxgVr+GueyxLCQ58g470gZzUWmCZ2mHT4mMU6vJhOGqP7jwIQxUxG6XPjiiy/GxRdfHPqDPvKRj+C9730vkslkKJYlTiBLesJsEqcxY8KoBakmlAvPpKocAOjOJjFVKNlccxmMmYSmrCA33HADXnzxRXz+858P9PpcLofR0VHbvziAdjKEOO/MwoKrchi1QN71N4wxUZiFuKdYKZ3DjAljpqLhI/u5557DNddcg+9///tIpYIRNOvWrUN/f7/4t2TJkgafZTToUAKTuO/MwiBMEz8GgyA/I81iTOLev+qUxf3IpBJ4FZsUMmYoGrqClEolvPe978UXv/hFLF++PPD71q5di5GREfFv165dDTzL6JBNqTuzeE+AYcCMCaMW2HrlNIoxMdRUTrzH5z//yen4309fgEUDzetpxGA0Ew21pB8bG8Njjz2Gxx9/HGvWrAEAlMtlmKaJVCqF//mf/8Hv/d7vOd6XzWaRzWYbeWoNgWEY6EgnMF2oVOXMFg8TgH1MGLWhGYyJs4lfvDcMyYSB/s50q0+DwWgYGhqY9PX14amnnrL97tvf/jbuuece/PSnP8XSpUsb+fEtQWc6KQKTuE+AYRC2Vw6DAdjZi4Y18VOew9m0YWAw4ojQgcn4+Dief/558fP27dvxxBNPYHBwEEcffTTWrl2Ll19+Gf/5n/+JRCKBFStW2N4/NDSEjo4Ox+9nCjrTSRxGxZJ+Nolfw3QXZjAIzeiVM9MYEwZjpiN0YPLYY4/hLW95i/j5Yx/7GADgfe97H9avX489e/Zg586d0Z1hzCALYGeT+JWdXxm1gMaKYTSQMeHAhMGIFUIHJueddx5M03T9+/r16z3f/4UvfAFf+MIXwn5sbCAHJnEvSwyDJItfGTWAgvdGplfUJn6cymEw2hv8hEYM2WQt7mWJYcBVOYxaQOnORrIYzJgwGPECryARo3OWNtlK2KpyeOJnBAM9I41K4+iOPZueSwYjjuAnNGJQIz9gdu3MmDFh1AKRymlgsDDTDNYYjJkOXkEiRofEmMyuqhwWvzLCg8ZKI4MFlTFpJDvDYDDqR0N9TGYjZMZkdlXlcBM/RngsW9CD4+Z3Y+Wr5jXsM+wlyQYMY/Y8lwxGHMGBScTozMjdUmfPAs3Or4xa0JVJ4e6PvbmhwUJCOvZsYjEZjLiCn9KIYWNMZhFlbPMxYcaEEQKNZjDkYGQ26b4YjLiCV5CIIVflzKZJUGZMssyYMNoIzOYxGPECP6URY7YarNny+MyYMNoIqSY0CmQwGNGBV5CIYbOkn0WpHN6VMtoV8thkjQmD0f7gpzRidM5WxoSrchhtCnsfp9mzWWAw4gpeQSKGbEk/m/w8kgb7mDDaEzbGhMcmg9H24Kc0YnTO0u7CSem7ZpkxYbQRbFU5syi9ymDEFbyCRAyb+HUW5bNT7PzKaFOwKzGDES/wUxox7FU5s2d3ZisXZsaE0UZIJbgqh8GIE9j5NWLYNSazZxJMJxP40LlLMZEvYU53ptWnw2AIyJ2v07OIxWQw4goOTCJG5yxt4gcAn770pFafAoPhADMmDEa8MLtWziZgtjbxYzDaFVyVw2DEC/yURgy7JT1fXgaj1bAJs7kqh8Foe/DKGTFkxoRLExmM1iPJqRwGI1bgwCRidGRmp8Eag9GuMAxDBCfMYjIY7Q9+SiNGJpkAmaDy7ozBaA9QYMKpHAaj/cGBScQwDEO4v862qhwGo12RYsaEwYgN+CltAMhkjatyGIz2gGBM+JlkMNoeHJg0AIIx4d0Zg9EWEIwJs5gMRtuDn9IGoLej4lvXJZUOMxiM1iFZDUhY98VgtD/Y+bUB+OTFJ+KhFw7i9CUDrT4VBoMBgMjLDLOYDEbbgwOTBuAtJwzhLScMtfo0GAxGFSlmTBiM2IC3DwwGY8YjyRoTBiM24KeUwWDMeKS4KofBiA04MGEwGDMe7PzKYMQH/JQyGIwZDyuVw4wJg9HuCB2YbNq0CZdddhkWLVoEwzBw6623er7+5ptvxlvf+lbMnz8ffX19OOecc/CLX/yi1vNlMBiM0CDRK/evYjDaH6Gf0omJCZx22mn41re+Fej1mzZtwlvf+lbceeed2Lx5M97ylrfgsssuw+OPPx76ZBkMBqMWsI8JgxEfhC4Xvvjii3HxxRcHfv0//dM/2X7+yle+gttuuw3//d//jdNPPz3sxzMYDEZoCPErV+UwGG2PpvuYlMtljI2NYXBw0PU1uVwOuVxO/Dw6OtqMU2MwGDMUlviVGRMGo93R9O3D17/+dYyPj+Nd73qX62vWrVuH/v5+8W/JkiVNPEMGgzHT0N+ZBgAMdKVbfCYMBsMPTQ1MbrzxRnzxi1/Ej3/8YwwNuTujrl27FiMjI+Lfrl27mniWDAZjpuFTl7waf/8HK3DusvmtPhUGg+GDpqVybrrpJvz5n/85fvKTn+CCCy7wfG02m0U2m23SmTEYjJmOpfO6sXRed6tPg8FgBEBTGJMf/vCHeP/7348f/vCHuPTSS5vxkQwGg8FgMGKI0IzJ+Pg4nn/+efHz9u3b8cQTT2BwcBBHH3001q5di5dffhn/+Z//CaCSvnnf+96Hb37zmzj77LOxd+9eAEBnZyf6+/sj+hoMBoPBYDBmAkIzJo899hhOP/10Uer7sY99DKeffjo+97nPAQD27NmDnTt3itd/5zvfQbFYxOrVqzE8PCz+XXXVVRF9BQaDwWAwGDMFhmmaZqtPwg+jo6Po7+/HyMgI+vr6Wn06DAaDwWAwAqCW9ZvdhhgMBoPBYLQNODBhMBgMBoPRNuDAhMFgMBgMRtuAAxMGg8FgMBhtAw5MGAwGg8FgtA04MGEwGAwGg9E24MCEwWAwGAxG24ADEwaDwWAwGG0DDkwYDAaDwWC0DZrWXbgekDnt6Ohoi8+EwWAwGAxGUNC6HcZkPhaBydjYGABgyZIlLT4TBoPBYDAYYTE2Nha4cW8seuWUy2Xs3r0bvb29MAwjsuOOjo5iyZIl2LVrF/fgaTD4WjcPfK2bB77WzQNf6+YhymttmibGxsawaNEiJBLB1COxYEwSiQQWL17csOP39fXxQG8S+Fo3D3ytmwe+1s0DX+vmIaprHZQpIbD4lcFgMBgMRtuAAxMGg8FgMBhtg1kdmGSzWXz+859HNptt9anMePC1bh74WjcPfK2bB77WzUOrr3UsxK8MBoPBYDBmB2Y1Y8JgMBgMBqO9wIEJg8FgMBiMtgEHJgwGg8FgMNoGHJgwGAwGg8FoG8zqwORb3/oWjj32WHR0dODss8/Go48+2upTaits2rQJl112GRYtWgTDMHDrrbfa/m6aJj73uc9heHgYnZ2duOCCC/Dcc8/ZXnPo0CFcccUV6Ovrw8DAAD74wQ9ifHzc9ponn3wS5557Ljo6OrBkyRJ87Wtfc5zLT37yE5x44ono6OjAKaecgjvvvDPy79sqrFu3DmeddRZ6e3sxNDSEyy+/HFu3brW9Znp6GqtXr8bcuXPR09ODP/qjP8K+fftsr9m5cycuvfRSdHV1YWhoCJ/4xCdQLBZtr9m4cSPOOOMMZLNZHH/88Vi/fr3jfGbyc3Hdddfh1FNPFcZR55xzDn7+85+Lv/N1bhy++tWvwjAMXH311eJ3fL2jwRe+8AUYhmH7d+KJJ4q/x+46m7MUN910k5nJZMzrr7/e/O1vf2t+6EMfMgcGBsx9+/a1+tTaBnfeeaf56U9/2rz55ptNAOYtt9xi+/tXv/pVs7+/37z11lvN3/zmN+bb3/52c+nSpebU1JR4zdve9jbztNNOMx9++GHzV7/6lXn88ceb73nPe8TfR0ZGzAULFphXXHGFuWXLFvOHP/yh2dnZaf77v/+7eM0DDzxgJpNJ82tf+5r59NNPm5/5zGfMdDptPvXUUw2/Bs3ARRddZN5www3mli1bzCeeeMK85JJLzKOPPtocHx8Xr/nIRz5iLlmyxLz77rvNxx57zHz9619vvuENbxB/LxaL5ooVK8wLLrjAfPzxx80777zTnDdvnrl27VrxmhdffNHs6uoyP/axj5lPP/20+S//8i9mMpk0N2zYIF4z05+Ln/3sZ+Ydd9xhbtu2zdy6dav5qU99ykyn0+aWLVtM0+Tr3Cg8+uij5rHHHmueeuqp5lVXXSV+z9c7Gnz+8583Tz75ZHPPnj3i3yuvvCL+HrfrPGsDk9e97nXm6tWrxc+lUslctGiRuW7duhaeVftCDUzK5bK5cOFC8x//8R/F744cOWJms1nzhz/8oWmapvn000+bAMz//d//Fa/5+c9/bhqGYb788sumaZrmt7/9bXPOnDlmLpcTr/nkJz9pnnDCCeLnd73rXeall15qO5+zzz7b/Iu/+ItIv2O7YP/+/SYA87777jNNs3Jd0+m0+ZOf/ES85plnnjEBmA899JBpmpUgMpFImHv37hWvue6668y+vj5xbf/2b//WPPnkk22f9e53v9u86KKLxM+z8bmYM2eO+R//8R98nRuEsbExc9myZeZdd91lvvnNbxaBCV/v6PD5z3/ePO2007R/i+N1npWpnHw+j82bN+OCCy4Qv0skErjgggvw0EMPtfDM4oPt27dj7969tmvY39+Ps88+W1zDhx56CAMDAzjzzDPFay644AIkEgk88sgj4jVvetObkMlkxGsuuugibN26FYcPHxavkT+HXjNT79XIyAgAYHBwEACwefNmFAoF2zU48cQTcfTRR9uu9SmnnIIFCxaI11x00UUYHR3Fb3/7W/Ear+s4256LUqmEm266CRMTEzjnnHP4OjcIq1evxqWXXuq4Jny9o8Vzzz2HRYsW4bjjjsMVV1yBnTt3AojndZ6VgcmBAwdQKpVsNwEAFixYgL1797borOIFuk5e13Dv3r0YGhqy/T2VSmFwcND2Gt0x5M9we81MvFflchlXX301Vq5ciRUrVgCofP9MJoOBgQHba9VrXet1HB0dxdTU1Kx5Lp566in09PQgm83iIx/5CG655RacdNJJfJ0bgJtuugm//vWvsW7dOsff+HpHh7PPPhvr16/Hhg0bcN1112H79u0499xzMTY2FsvrHIvuwgzGbMHq1auxZcsW3H///a0+lRmLE044AU888QRGRkbw05/+FO973/tw3333tfq0Zhx27dqFq666CnfddRc6OjpafTozGhdffLH4/1NPPRVnn302jjnmGPz4xz9GZ2dnC8+sNsxKxmTevHlIJpMOVfK+ffuwcOHCFp1VvEDXyesaLly4EPv377f9vVgs4tChQ7bX6I4hf4bba2bavVqzZg1uv/123HvvvVi8eLH4/cKFC5HP53HkyBHb69VrXet17OvrQ2dn56x5LjKZDI4//ni89rWvxbp163Daaafhm9/8Jl/niLF582bs378fZ5xxBlKpFFKpFO677z788z//M1KpFBYsWMDXu0EYGBjA8uXL8fzzz8dyXM/KwCSTyeC1r30t7r77bvG7crmMu+++G+ecc04Lzyw+WLp0KRYuXGi7hqOjo3jkkUfENTznnHNw5MgRbN68WbzmnnvuQblcxtlnny1es2nTJhQKBfGau+66CyeccALmzJkjXiN/Dr1mptwr0zSxZs0a3HLLLbjnnnuwdOlS299f+9rXIp1O267B1q1bsXPnTtu1fuqpp2yB4F133YW+vj6cdNJJ4jVe13G2Phflchm5XI6vc8Q4//zz8dRTT+GJJ54Q/84880xcccUV4v/5ejcG4+PjeOGFFzA8PBzPcR1KKjuDcNNNN5nZbNZcv369+fTTT5sf/vCHzYGBAZsqebZjbGzMfPzxx83HH3/cBGBee+215uOPP26+9NJLpmlWyoUHBgbM2267zXzyySfNd7zjHdpy4dNPP9185JFHzPvvv99ctmyZrVz4yJEj5oIFC8w/+7M/M7ds2WLedNNNZldXl6NcOJVKmV//+tfNZ555xvz85z8/o8qF//Iv/9Ls7+83N27caCv3m5ycFK/5yEc+Yh599NHmPffcYz722GPmOeecY55zzjni71Tud+GFF5pPPPGEuWHDBnP+/Pnacr9PfOIT5jPPPGN+61vf0pb7zeTn4pprrjHvu+8+c/v27eaTTz5pXnPNNaZhGOb//M//mKbJ17nRkKtyTJOvd1T4m7/5G3Pjxo3m9u3bzQceeMC84IILzHnz5pn79+83TTN+13nWBiamaZr/8i//Yh599NFmJpMxX/e615kPP/xwq0+prXDvvfeaABz/3ve+95mmWSkZ/uxnP2suWLDAzGaz5vnnn29u3brVdoyDBw+a73nPe8yenh6zr6/PfP/732+OjY3ZXvOb3/zGfOMb32hms1nzqKOOMr/61a86zuXHP/6xuXz5cjOTyZgnn3yyeccddzTsezcbumsMwLzhhhvEa6ampsy/+qu/MufMmWN2dXWZf/AHf2Du2bPHdpwdO3aYF198sdnZ2WnOmzfP/Ju/+RuzUCjYXnPvvfear3nNa8xMJmMed9xxts8gzOTn4gMf+IB5zDHHmJlMxpw/f755/vnni6DENPk6NxpqYMLXOxq8+93vNoeHh81MJmMeddRR5rvf/W7z+eefF3+P23U2TNM0w3EsDAaDwWAwGI3BrNSYMBgMBoPBaE9wYMJgMBgMBqNtwIEJg8FgMBiMtgEHJgwGg8FgMNoGHJgwGAwGg8FoG3BgwmAwGAwGo23AgQmDwWAwGIy2AQcmDAaDwWAw2gYcmDAYDAaDwWgbcGDCYDAYDAajbcCBCYPBYDAYjLYBByYMBoPBYDDaBv8/3mgFYsPJDP8AAAAASUVORK5CYII=\n"
          },
          "metadata": {}
        }
      ]
    },
    {
      "cell_type": "markdown",
      "source": [
        "## DQN Agent with LSTM history summarization module\n",
        "\n",
        "Now the DQN agent can handle partially observable environments with history summarization."
      ],
      "metadata": {
        "id": "MHojaMMrqEru"
      }
    },
    {
      "cell_type": "code",
      "source": [
        "# Add a LSTM history summarization module\n",
        "\n",
        "agent = PearlAgent(\n",
        "    policy_learner=DeepQLearning(\n",
        "        state_dim=128,\n",
        "        action_space=action_space,\n",
        "        hidden_dims=[64, 64],\n",
        "        training_rounds=50,\n",
        "        action_representation_module=action_representation_module,\n",
        "    ),\n",
        "    history_summarization_module=LSTMHistorySummarizationModule(\n",
        "        observation_dim=1,\n",
        "        action_dim=100,\n",
        "        hidden_dim=128,\n",
        "        history_length=history_length,\n",
        "    ),\n",
        "    replay_buffer=BasicReplayBuffer(100_000),\n",
        "    device_id=device_id,\n",
        ")\n",
        "\n",
        "info = online_learning(\n",
        "    agent=agent,\n",
        "    env=env,\n",
        "    number_of_steps=number_of_steps,\n",
        "    print_every_x_steps=100,\n",
        "    record_period=min(record_period, number_of_steps),\n",
        "    learn_after_episode=True,\n",
        ")\n",
        "\n",
        "torch.save(info[\"return\"], \"DQN-LSTM-return.pt\")\n",
        "plt.plot(\n",
        "    record_period * np.arange(len(info[\"return\"])),\n",
        "    info[\"return\"],\n",
        "    label=\"DQN-LSTM\",\n",
        ")\n",
        "plt.legend()\n",
        "plt.show()"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 1000
        },
        "id": "hewvpLU_qHhO",
        "outputId": "1d64c41b-0adb-4aa9-a5ee-3f1e3140286e"
      },
      "execution_count": 6,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "episode 5, step 100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 10, step 200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 15, step 300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 20, step 400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 25, step 500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 30, step 600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 35, step 700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 40, step 800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 45, step 900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 50, step 1000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 55, step 1100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 60, step 1200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 65, step 1300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 70, step 1400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 75, step 1500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 80, step 1600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 85, step 1700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 90, step 1800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 95, step 1900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 100, step 2000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 105, step 2100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 110, step 2200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 115, step 2300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 120, step 2400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 125, step 2500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 130, step 2600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 135, step 2700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 140, step 2800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 145, step 2900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 150, step 3000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 155, step 3100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 160, step 3200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 165, step 3300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 170, step 3400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 175, step 3500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 180, step 3600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 185, step 3700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 190, step 3800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 195, step 3900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 200, step 4000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 205, step 4100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 210, step 4200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 215, step 4300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 220, step 4400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 225, step 4500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 230, step 4600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 235, step 4700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 240, step 4800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 245, step 4900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 250, step 5000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 255, step 5100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 260, step 5200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 265, step 5300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 270, step 5400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 275, step 5500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 280, step 5600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 285, step 5700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 290, step 5800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 295, step 5900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 300, step 6000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 305, step 6100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 310, step 6200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 315, step 6300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 320, step 6400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 325, step 6500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 330, step 6600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 335, step 6700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 340, step 6800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 345, step 6900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 350, step 7000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 355, step 7100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 360, step 7200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 365, step 7300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 370, step 7400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 375, step 7500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 380, step 7600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 385, step 7700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 390, step 7800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 395, step 7900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 400, step 8000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 405, step 8100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 410, step 8200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 415, step 8300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 420, step 8400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 425, step 8500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 430, step 8600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 6.0\n",
            "episode 435, step 8700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 440, step 8800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 445, step 8900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 450, step 9000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 455, step 9100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 460, step 9200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 465, step 9300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 470, step 9400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 475, step 9500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 480, step 9600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 485, step 9700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 490, step 9800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 495, step 9900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 500, step 10000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 505, step 10100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 510, step 10200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 515, step 10300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 520, step 10400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 525, step 10500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 530, step 10600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 535, step 10700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 540, step 10800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 545, step 10900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 550, step 11000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 555, step 11100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 560, step 11200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 565, step 11300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 570, step 11400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 575, step 11500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 580, step 11600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 585, step 11700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 590, step 11800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 595, step 11900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 600, step 12000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 605, step 12100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 610, step 12200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 615, step 12300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 620, step 12400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 625, step 12500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 630, step 12600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 635, step 12700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 640, step 12800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 645, step 12900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 650, step 13000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 655, step 13100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 660, step 13200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 665, step 13300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 670, step 13400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 675, step 13500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 680, step 13600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 685, step 13700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 690, step 13800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 695, step 13900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 700, step 14000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 705, step 14100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 710, step 14200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 715, step 14300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 720, step 14400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 725, step 14500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 730, step 14600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 735, step 14700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 740, step 14800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 745, step 14900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 750, step 15000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 755, step 15100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 760, step 15200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 765, step 15300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 770, step 15400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 775, step 15500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 780, step 15600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 785, step 15700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 790, step 15800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 795, step 15900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 800, step 16000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 805, step 16100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 810, step 16200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 815, step 16300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 820, step 16400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 825, step 16500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 830, step 16600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 835, step 16700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 840, step 16800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 845, step 16900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 850, step 17000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 855, step 17100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 860, step 17200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 865, step 17300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 870, step 17400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 875, step 17500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 880, step 17600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 885, step 17700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 890, step 17800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 895, step 17900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 900, step 18000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 905, step 18100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 910, step 18200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 915, step 18300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 920, step 18400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 925, step 18500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 930, step 18600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 935, step 18700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 940, step 18800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 945, step 18900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 950, step 19000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 955, step 19100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 960, step 19200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 965, step 19300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 970, step 19400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 975, step 19500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 980, step 19600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 985, step 19700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 990, step 19800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 995, step 19900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1000, step 20000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1005, step 20100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1010, step 20200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1015, step 20300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1020, step 20400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1025, step 20500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1030, step 20600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1035, step 20700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1040, step 20800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1045, step 20900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1050, step 21000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1055, step 21100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1060, step 21200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1065, step 21300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1070, step 21400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1075, step 21500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1080, step 21600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1085, step 21700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1090, step 21800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1095, step 21900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1100, step 22000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1105, step 22100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1110, step 22200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1115, step 22300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1120, step 22400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1125, step 22500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1130, step 22600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1135, step 22700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1140, step 22800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1145, step 22900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1150, step 23000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1155, step 23100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1160, step 23200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1165, step 23300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1170, step 23400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1175, step 23500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1180, step 23600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1185, step 23700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1190, step 23800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1195, step 23900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1200, step 24000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1205, step 24100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1210, step 24200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1215, step 24300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1220, step 24400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1225, step 24500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1230, step 24600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1235, step 24700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1240, step 24800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1245, step 24900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1250, step 25000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1255, step 25100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1260, step 25200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1265, step 25300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1270, step 25400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1275, step 25500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1280, step 25600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1285, step 25700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1290, step 25800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1295, step 25900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1300, step 26000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1305, step 26100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1310, step 26200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1315, step 26300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1320, step 26400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1325, step 26500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1330, step 26600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1335, step 26700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1340, step 26800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1345, step 26900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1350, step 27000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1355, step 27100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1360, step 27200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1365, step 27300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1370, step 27400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1375, step 27500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1380, step 27600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1385, step 27700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1390, step 27800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1395, step 27900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1400, step 28000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1405, step 28100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1410, step 28200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1415, step 28300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1420, step 28400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1425, step 28500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1430, step 28600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1435, step 28700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1440, step 28800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1445, step 28900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1450, step 29000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1455, step 29100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1460, step 29200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1465, step 29300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1470, step 29400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1475, step 29500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1480, step 29600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1485, step 29700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1490, step 29800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1495, step 29900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1500, step 30000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1505, step 30100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1510, step 30200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1515, step 30300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1520, step 30400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1525, step 30500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1530, step 30600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1535, step 30700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1540, step 30800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1545, step 30900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1550, step 31000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1555, step 31100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1560, step 31200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1565, step 31300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1570, step 31400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1575, step 31500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1580, step 31600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1585, step 31700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1590, step 31800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1595, step 31900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1600, step 32000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1605, step 32100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1610, step 32200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1615, step 32300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1620, step 32400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1625, step 32500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1630, step 32600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1635, step 32700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1640, step 32800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1645, step 32900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1650, step 33000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1655, step 33100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1660, step 33200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1665, step 33300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1670, step 33400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1675, step 33500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1680, step 33600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1685, step 33700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1690, step 33800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1695, step 33900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1700, step 34000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1705, step 34100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1710, step 34200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1715, step 34300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1720, step 34400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1725, step 34500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1730, step 34600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1735, step 34700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1740, step 34800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1745, step 34900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1750, step 35000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1755, step 35100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1760, step 35200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1765, step 35300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1770, step 35400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1775, step 35500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1780, step 35600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1785, step 35700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1790, step 35800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1795, step 35900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1800, step 36000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1805, step 36100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1810, step 36200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1815, step 36300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1820, step 36400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1825, step 36500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1830, step 36600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 6.0\n",
            "episode 1835, step 36700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1840, step 36800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1845, step 36900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1850, step 37000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1855, step 37100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1860, step 37200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1865, step 37300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1870, step 37400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1875, step 37500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1880, step 37600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1885, step 37700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1890, step 37800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1895, step 37900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1900, step 38000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1905, step 38100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1910, step 38200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 6.0\n",
            "episode 1915, step 38300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1920, step 38400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1925, step 38500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1930, step 38600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1935, step 38700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1940, step 38800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1945, step 38900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1950, step 39000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1955, step 39100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1960, step 39200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1965, step 39300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1970, step 39400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1975, step 39500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1980, step 39600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1985, step 39700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1990, step 39800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1995, step 39900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2000, step 40000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2005, step 40100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2010, step 40200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2015, step 40300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 2020, step 40400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2025, step 40500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2030, step 40600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2035, step 40700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2040, step 40800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2045, step 40900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2050, step 41000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2055, step 41100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2060, step 41200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2065, step 41300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2070, step 41400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2075, step 41500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2080, step 41600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2085, step 41700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2090, step 41800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2095, step 41900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2100, step 42000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2105, step 42100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2110, step 42200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 2115, step 42300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2120, step 42400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2125, step 42500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2130, step 42600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2135, step 42700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2140, step 42800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2145, step 42900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2150, step 43000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2155, step 43100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2160, step 43200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2165, step 43300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2170, step 43400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 2175, step 43500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2180, step 43600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2185, step 43700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2190, step 43800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2195, step 43900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2200, step 44000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2205, step 44100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2210, step 44200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2215, step 44300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2220, step 44400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2225, step 44500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2230, step 44600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2235, step 44700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2240, step 44800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2245, step 44900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2250, step 45000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2255, step 45100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2260, step 45200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2265, step 45300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2270, step 45400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2275, step 45500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2280, step 45600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2285, step 45700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2290, step 45800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2295, step 45900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2300, step 46000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2305, step 46100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2310, step 46200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2315, step 46300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2320, step 46400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2325, step 46500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2330, step 46600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2335, step 46700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2340, step 46800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 2345, step 46900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2350, step 47000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2355, step 47100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2360, step 47200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2365, step 47300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2370, step 47400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2375, step 47500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2380, step 47600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2385, step 47700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2390, step 47800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2395, step 47900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2400, step 48000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2405, step 48100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2410, step 48200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2415, step 48300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2420, step 48400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2425, step 48500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2430, step 48600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2435, step 48700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2440, step 48800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2445, step 48900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2450, step 49000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2455, step 49100, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2460, step 49200, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2465, step 49300, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2470, step 49400, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2475, step 49500, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2480, step 49600, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2485, step 49700, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2490, step 49800, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2495, step 49900, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2500, step 50000, agent=PearlAgent with DeepQLearning, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BasicReplayBuffer, env=RecEnv\n",
            "return: 3.0\n"
          ]
        },
        {
          "output_type": "display_data",
          "data": {
            "text/plain": [
              "<Figure size 640x480 with 1 Axes>"
            ],
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiYAAAGhCAYAAABVk3+7AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAACdb0lEQVR4nO39ebgdVZX/j7/rzHcektzcm3kkjAmQIAREUFFEVPDbjUrTgq1i0w2fB37a2mD3p526O6itNto2YtsabT+IQwt2I4MIJMhMQiIJgQCZSe7NeOfhjPv3R529a9c+VXWqzqkz1Lnr9Tx54N57hjp1qna9a633WktjjDEQBEEQBEHUAaFabwBBEARBEASHhAlBEARBEHUDCROCIAiCIOoGEiYEQRAEQdQNJEwIgiAIgqgbSJgQBEEQBFE3kDAhCIIgCKJuIGFCEARBEETdQMKEIAiCIIi6gYQJQRAEQRB1gydhcuedd2LlypVob29He3s71q5diwcffND28evXr4emaaZ/iUSi7I0mCIIgCKIxiXh58Lx583D77bdj+fLlYIzhxz/+Ma644gps2bIFp512muVz2tvbsXPnTvGzpmnlbTFBEARBEA2LJ2Hy/ve/3/TzP/3TP+HOO+/Es88+aytMNE1Db29v6VsIIJfL4dChQ2hrayNhQxAEQRABgTGG0dFRzJkzB6GQuySNJ2Eik81m8ctf/hLj4+NYu3at7ePGxsawcOFC5HI5nH322fjnf/5nWxHDSSaTSCaT4ueDBw/i1FNPLXVTCYIgCIKoIQcOHMC8efNcPdazMNm2bRvWrl2LqakptLa24t5777UVDStWrMAPf/hDrFy5EsPDw/iXf/kXnH/++Xj55ZcdN3DdunX40pe+VPD7AwcOoL293esmEwRBEARRA0ZGRjB//ny0tbW5fo7GGGNe3iSVSmH//v0YHh7Gr371K/zgBz/Axo0bXUU00uk0TjnlFFx99dX4yle+Yvs4NWLCP9jw8DAJE4IgCIIICCMjI+jo6PB0/fYcMYnFYli2bBkAYPXq1XjhhRdwxx134K677ir63Gg0irPOOgtvvPGG4+Pi8Tji8bjXTSMIgiAIIuCU3cckl8uZohtOZLNZbNu2DX19feW+LUEQBEEQDYiniMltt92Gyy67DAsWLMDo6CjuvvtubNiwAQ8//DAA4Nprr8XcuXOxbt06AMCXv/xlnHfeeVi2bBmGhobw9a9/Hfv27cMnP/lJ/z8JQRAEQRCBx5MwOXLkCK699lr09/ejo6MDK1euxMMPP4x3vetdAID9+/ebyoEGBwdx/fXXY2BgAF1dXVi9ejWefvppqrAhCIKYBjDGkMlkkM1ma70pRIUIh8OIRCK+tvLwbH6tBaWYZwiCIIjakUql0N/fj4mJiVpvClFhmpub0dfXh1gsVvC3qphfCYIgCMKJXC6HPXv2IBwOY86cOYjFYtQcswFhjCGVSuHo0aPYs2cPli9f7rqJmhMkTAiCIAhfSaVSyOVymD9/Ppqbm2u9OUQFaWpqQjQaxb59+5BKpXyZh0fThQmCIIiK4MfdM1H/+P0901FDEARBEETdQMKEIAiCIIi6gYQJQRAEQRB1AwkTgiAIgsjzsY99DJqmQdM0RKNRzJ49G+9617vwwx/+ELlczvTYp59+Gu9973vR1dWFRCKBM844A9/85jcL+rZomoZEIoF9+/aZfn/llVfiYx/7mOP2XHzxxbjlllts/75x40a84x3vQHd3N5qbm7F8+XJcd911SKVSps9i9W/RokXiPTRNw+23317w+pdffjk0TcMXv/hFx+30ExImBEE0HIPjKdy5YRcGhqdqvSlEAHnPe96D/v5+7N27Fw8++CDe/va34+abb8b73vc+ZDIZAMC9996Liy66CPPmzcPjjz+OV199FTfffDP+8R//ER/5yEegtgjTNA3/8A//4Ot27tixA+95z3uwZs0aPPHEE9i2bRu+853vIBaLIZvN4o477kB/f7/4BwA/+tGPxM8vvPCCeK358+dj/fr1ptc/ePAgHn300aqPkaFyYYIgGo6fbzqArz70KoYmUrjtvafUenOmPYwxTKZr0/21KRr23EMlHo+jt7cXADB37lycffbZOO+88/DOd74T69evx9VXX43rr78eH/jAB/D9739fPO+Tn/wkZs+ejQ984AP4xS9+gQ9/+MPibzfddBO++c1v4rOf/SxOP/10Xz7b7373O/T29uJrX/ua+N3SpUvxnve8B4BeytvR0WF6Tmdnp/hsMu973/vwi1/8Ak899RQuuOACAMCPf/xjvPvd78b+/ft92V63kDAhCKLhGJlM6/+dStd4SwgAmExnceo/PFyT997x5UvRHCv/UveOd7wDq1atwq9//WvMmDEDx48fx9/8zd8UPO79738/TjrpJPzsZz8zCZMLLrgAr732Gm699Vbcf//9ZW8PAPT29qK/vx9PPPEE3va2t5X1WrFYDNdccw1+9KMfCWGyfv16fO1rX6tqGgegVA5BEA1IJqeH0TPZup+4QQSIk08+GXv37sVrr70GADjlFOto3MknnyweI7Nu3To89NBD+MMf/uDL9lx11VW4+uqrcdFFF6Gvrw8f/OAH8W//9m8YGRkp6fU+/vGP4xe/+AXGx8fxxBNPYHh4GO973/t82VYvUMSEIIiGI53VTYpcoBC1pSkaxo4vX1qz9/YLxpgpLeQ0as5qbsypp56Ka6+9Frfeeiueeuop09/+3//7f/jLv/xL8fODDz6ICy+80HF7wuEwfvSjH+Ef//Ef8dhjj+G5557DP//zP+OrX/0qnn/+ec/ekFWrVmH58uX41a9+hccffxwf/ehHEYlUXyaQMCEIouHgkRISJvWBpmm+pFNqzSuvvILFixdj+fLl4ufzzz/f8nFnnnmm5Wt86UtfwkknnYT77rvP9PsPfOADOPfcc8XPc+fOdb1dc+fOxUc/+lF89KMfxVe+8hWcdNJJ+N73vocvfelLrl+D8/GPfxzf/e53sWPHDjz//POen+8HlMohCKLhyOTLOjPZXJFHEoQ7HnvsMWzbtg1/8id/gksvvRTd3d34xje+UfC4//mf/8Hrr79uWwY8f/583HTTTfj85z9vKitua2vDsmXLxL+mpqaStrOrqwt9fX0YHx8v6fl/9md/hm3btuH000/HqaeeWtJrlEvwJSxBEIRCmiImRBkkk0kMDAwgm83i8OHDeOihh7Bu3Tq8733vw7XXXotwOIy77roLH/nIR/CpT30KN910E9rb2/Hoo4/is5/9LK6//nq8973vtX392267Df/xH/+BPXv2mAyydhw9ehRbt241/a6vrw/33Xcftm7dig9+8INYunQppqam8JOf/AQvv/wyvvOd75T02bu6utDf349oNFrS8/2AhAlBEA0Hj5RQxIQohYceegh9fX2IRCLo6urCqlWr8O1vfxvXXXedGFj3p3/6p3j88cfxT//0T7jwwguF4fSrX/0qPve5zzm+fnd3N/72b/8Wn//8511tz9133427777b9LuvfOUruPzyy/Hkk0/ihhtuwKFDh9Da2orTTjsN9913Hy666KISPrlOZ2dnyc/1A405uXfqhJGREXR0dGB4eBjt7e213hyCIOqcG+9+Eb99qR8XLp+J//rEucWfQPjK1NQU9uzZg8WLFyORSNR6c6rC1NQUrrjiChw4cAAbN27ErFmzar1JVcPp+y7l+k0eE4IgGg4jYlL3911Eg5BIJPCb3/wG1157LZ544olab06goVQOQRANh1GVQ6kconokEgnceuuttd6MwEMRE4IgGo50jsyvBBFUSJgQBNFwUCqHIIILCROCIBoOLkjSVJVTUwJQW0H4gN/fMwkTgiAajnTeW5KlVE5N4D0wJiYmarwlRDXg37NfvU/I/EoQRMNBLelrSzgcRmdnJ44cOQIAaG5uNs2YIRoDxhgmJiZw5MgRdHZ2Ihz2Zy4RCROCIBoOY4gfpXJqRW9vLwAIcUI0Lp2dneL79gMSJgRBNBw8UkLm19qhaRr6+vrQ09ODdDpd680hKkQ0GvUtUsIhYUIQRMMhqnIolVNzwuGw7xcuorEh8ytBEA2HGOJHVTkEEThImBAE0XBkKZVDEIGFhAlBEA0HN71SKocgggcJE4IgGo40zcohiMBCwoQgiIaDzK8EEVxImBAE0XDwIX6MUfdXgggaJEwIgmg45GocSucQRLAgYUIQREORyzHIQRKqzCGIYEHChCCIhiKtREhImBBEsCBhQhBEQ6EKEUrlEESwIGFCEERDUShMKGJCEEGChAlBEA1FQSqHhAlBBAoSJgRBNBQFEROal0MQgYKECUEQDUVaESJpMr8SRKAgYUIQREOhpm6owRpBBAsSJgRBNBRq6kaNoBAEUd+QMCEIoqFQUzcUMSGIYEHChCCIhkLtW0J9TAgiWJAwIQiioVAjJtT5lSCCBQkTgiAaCtVjQn1MCCJYkDAhCKKhUIUImV8JIliQMCEIoqFQhQiZXwkiWJAwIQiioVA9JdRgjSCCBQkTgiAaCrUKhyImBBEsSJgQBNFQFFTlULkwQQQKEiYEQTQUqhChVA5BBAsSJgRBNBSFnV8pYkIQQYKECUEQDQWZXwki2JAwIQiioSDzK0EEGxImBEE0FGqEhBqsEUSwIGFCEERDoXpKKGJCEMGChAlBEA1FYbkwCROCCBIkTAiCaCgKza+UyiGIIEHChCCIhoLMrwQRbEiYEATRUBSaX0mYEESQIGFCEERDkSmYLkypHIIIEiRMCIJoKFSzK0VMCCJYkDAhCKKhUM2uNMSPIIIFCROCIBoKXpXTFA0DIPMrQQQNEiYEQTQU6XyEpCmmCxNK5RBEsPAkTO68806sXLkS7e3taG9vx9q1a/Hggw86PueXv/wlTj75ZCQSCZxxxhl44IEHytpggiAIJ3jEJBHRlzeKmBBEsPAkTObNm4fbb78dmzdvxqZNm/COd7wDV1xxBV5++WXLxz/99NO4+uqr8YlPfAJbtmzBlVdeiSuvvBLbt2/3ZeMJgiBUuKckISIm5DEhiCChMcbKup3o7u7G17/+dXziE58o+NuHP/xhjI+P4/777xe/O++883DmmWfie9/7nuv3GBkZQUdHB4aHh9He3l7O5hIE0eBc/5NNeGTHYZza144d/SO47PRe3Pnnq2u9WQQxLSnl+l2yxySbzeKee+7B+Pg41q5da/mYZ555Bpdcconpd5deeimeeeYZx9dOJpMYGRkx/SMIgnAD72OSiOrLG3lMCCJYeBYm27ZtQ2trK+LxOG644Qbce++9OPXUUy0fOzAwgNmzZ5t+N3v2bAwMDDi+x7p169DR0SH+zZ8/3+tmEgQxTeF9TLj5lcqFCSJYeBYmK1aswNatW/Hcc8/hr/7qr3Dddddhx44dvm7UbbfdhuHhYfHvwIEDvr4+QRCNC/eUJCJULkwQQSTi9QmxWAzLli0DAKxevRovvPAC7rjjDtx1110Fj+3t7cXhw4dNvzt8+DB6e3sd3yMejyMej3vdNIIgCKMqh8yvBBFIyu5jksvlkEwmLf+2du1aPProo6bfPfLII7aeFIIgiHJJ53i5MEVMCCKIeIqY3HbbbbjsssuwYMECjI6O4u6778aGDRvw8MMPAwCuvfZazJ07F+vWrQMA3HzzzbjooovwjW98A5dffjnuuecebNq0Cd///vf9/yQEQRAwzK9NMTK/EkQQ8SRMjhw5gmuvvRb9/f3o6OjAypUr8fDDD+Nd73oXAGD//v0IhYwgzPnnn4+7774bf//3f4/Pf/7zWL58Oe677z6cfvrp/n4KgiCIPEaDNYqYEEQQ8SRM/vM//9Px7xs2bCj43VVXXYWrrrrK00YRBEGUSmFLevKYEESQoFk5BEE0FCJiEuXlwhQxIYggQcKEIIiGwmiwRqkcgggiJEwIgmgoeFVOU5RSOQQRREiYEATRUKgt6SliQhDBgoQJQRANBfeYGBETEiYEESRImBAE0VDwqpwEzcohiEBCwoQgiIaioI8JRUwIIlCQMCEIomFgjBVMF05TxKRs9h0fx8bXjtZ6M4hpAgkTgiAaBtnoSuZX//g/P9uC6374PHYfHav1phDTABImBEE0DHIzNdn8yhiJk3I4NqoPah0YmarxlhDTARImBEE0DHLPEt5gDQAoaFIevDfMVDpb4y0hpgMkTAiCaBgyWTmVYwgTarJWHrw3zGSK9iNReUiYEATRMHCjq6YB8YixvNG8nPLIUMSEqCIkTAiCaBh4xCQaCiES0sTvqWS4PPh+nSRhQlQBEiYEQTQM/AIaCWsIS8KESobLgzepo4gJUQ1ImBAE0TBwARIJadA0TURNqGS4dBhjoq3/ZIqECVF5SJgQBNEwiFROWF/aeNSEzK+lI4s6SuUQ1YCECUEQDQMXIJGwLki4QMmQx6RkMiRMiCpDwoQgiIaBX0QjIX1p4wKFqnJKR9535DEhqgEJE4IgGgbebyOaFyTcY0IThksnI6XByGNCVAMSJgRBNAxpUZWTj5iEKJVTLukspXKI6kLChCCIhiEjVeUAhvmVUjmlYza/UuSJqDwkTAiCaBjUqhye0slSKqdk5IqmKUrlEFWAhAlBEA0Dv4iGlYhJmlI5JUNVOdXlzg278Pl7t03ridgkTAiCaBj4RTRK5cK+YTK/kjCpON9+9HXc/dx+vDk4WetNqRkkTAiCaBhEH5OCcmFK5ZQKlQtXD8YYpjL6Pp7OIpCECUEQDYM8KwcAwlSVUzbyviNhUlmyOQaewUlOY6MxCROCIBoGHhkR5leqyikbeQAi9TGpLLIXikdOpiMkTAiCaBhEH5OCcuHpe/dZLhmlj8l0NmVWmpTk56GICUEQRANgdH4Nmf5LqZzSkUVdjpkvnoS/pDJSafY0TpuRMCEIomEQs3LC1GDNL1RRN5UiYVIp5J4xycz03c8kTAgi4Dz88gDOX/coXth7otabUnOMVI65wZpc8jo6lcY7v7EB6x54pfobGEDUNJiXapFDQ5N429cex/c27vJ7sxoSUzM7HyMmqUwO7//Ok/j//Xyrb69ZSUiYEETAefzVIzg0PIU/vH6s1ptScwqH+OVTOVLE5OVDI9h1dBz3v9Rf/Q0MIGpzOi/CZNO+Qew/MYH//eMhvzerIZFTOX5GTPafmMC2g8O4d8tBHB9L+va6lYKECUEEHL6ApaZx6JeTVlM5FhETfic6lsxUeeuCiZrK8VKZMzal7+Ojo/V/MawHUhWKmMivtXnfoG+vWylImBBEwEmRMBFklAZrVuXCfJEeT2aowsQF5aRyxvPi7/h4yjQMkLBGjk75GTEhYUIQRFVJ5vsdpLLT18XPUVvShy1SOVP5MsxMjk1rg6FbCsyvHoQJj0plcwyDEylft6sRqZTHZEoqPQ6CF42ECUEEHErlGBidX+3Nr/KCP07pnKIUREw8pHLk/UvpnOJUymMiH/PbDg7XfSkyCROCCDgkTAxE59d8CseYlWPdVp18JsUpx/w6RsLEExXzmEhdZNNZhpfeHPbttSsBCROCCDjCY0KNr4xy4XzEJGIxK2dKEnAkTIqjekNKSeUAwLEAVIPUmnTFIibm19q0r77TOSRMCCLgkPnVQJhfRbmwc8RkPFnfIe16IK0IXi/ChFI53jCZXytUlQMAm/bWtwGWhAlBBBxufiUjp2R+zUdKrMqFJ8lj4gm1a663qhzjsSRMiiMb2CvhMVkyswUAsGnvCeTquEqKhAlBBByewqGIiXF3zyMmUYuqHHk42igJk6Jksqr51f1xJu/fo5TKKUo6Yx3ZKxf+Wmct6EJTNIyRqQzeODrm2+v7DQkTggg4/EJLHpPCqhyr6cJUleON8iImlMrxQqpCs3K4x6QtEcFZCzoB1HfZMAkTggg4FDExUKtyjHJhO48JCZNilNPHhISJNyrXx0R/rXg0hDULuwAAm+vYZxKp9QYQBFEeImJCwqSwKids32ANoKocN6TL6GNCVTnekM9hucS3XPhrJSJhrM4LkxfquDKHIiYEEXBExIRSOUbERK3KsTG/8lkuhD08YhKL6JcLt6mcdDZnSkcMTqRJPBdBjpgk0/6nchLRMM5a0ImQBhw4MYnDI1O+vYefkDAhpg0vHxrGgRMTtd4MX8lkc6LPBC36UsQkxPuYFCkXTpEwKQYXde0JPcDuVpjIaRzu9Tk+HtyoSTbH8PSuYxWNsqVM/Xb8i5jw7ywRDaEtEcXJve0A6rdsmIQJMS04MZ7Cld99Ctf84Llab4qvyFESEiaFfUzC4WIN1qiPSTG4qGuN68LErfeBX8DjkRB62uIAgu0z+d3LA/iz/3gOX33w1Yq9h6klvY8Rk6QQJmEAwJpFejpny34SJgRRMw4OTiKdZdh/YqKhppzKixcJk8IhflGLqpwkmV89wUVdWyIKwL3HhPcwaY1HMLM1+MJk97FxAED/8GTF3qPSQ/ya8sJkQXczgPot4SZhQkwLhifT4v9Hp9IOjwwWpvJC8pgUpnIsza/kMfECN7+2eUzljCX186wlHsGsBoiYnBjXpyNXspFhumLlwkYqBwC6mmMAjM9Ub5AwIaYFsjCR/z/oyFGSVCYHxhonGlQKti3ps1SVUypGxMRrKkd/XEs8gln5iEmQK3MGayBM/DqfuV8lno+YdLfowmRwgoQJQdSMRhUmScUgp06CnW4YqRweMSlM5UyS+dUTWeEx0VM56kA4O3iarDUeboyISf4iXsmUqSp6/BJBoionoguTLi5MxutzLSRhQkwLGleYmBeu6V4yzO84eRWIdcSEPCZe4PvUeyqHCxMplUMRE0fUGwu/DLDcF8RTOd2UyiGI2jNthMk0N8ByARIV5cJmjwljzLTPKJVTHL7vRLmwa/Orvm8bxmMiIiaVq+RKF0RM/Hkv/jq8KqerJW9kTmc9NcyrFiRMiGlBowoTVYg0sjCZTGXxn0/ucexFw1M2RrmwOZWjCrmpdK5gSN10YefAKH789N6iVWo8YtIqRUzceB+4sbhRqnKG8mmPanlMAPdps2KoVTmt8YioXKtHnwkJE2JaMNKgwmQ6RUzuf+kQvnL/Dnzr96/ZPoaHwqPqdOH8762Mm+PTtJfJ//3Ndnzhf17Gk28cc3yc6jEB3F2cx1KNEzFJZXJiUnIlzzE1FetXxGRK6WOiaRo6m+vXAEvChJgWTJuISbZxL7K8ffbwhP33J6pyQqr5lQsT/ndNtFgfm6YG2L35vhzF2pKrVTmAu3TOuIXHZDyVxUQA9/eQdPGuZMREPZ/9iJhksjlx/HOPCWD4TOrRAEvChJgWyGJkpIGEiXpHVclFs9bw79DpM6ZzfIif9aycSenOkXcynY4G2GQmiyP56EWx84H3MYlHQoiF3c/LkRustcTCIo1wbLT+7tCLcUISJpWMmKipHD8iJnKnYx4xAQyfyQmKmBBEbZg2EZNpIEycPiMXIFGb6cJyoykuTEanYZO1w8NGSqXY+SAMxeGQuON2I0z4fm2JR6BpmlSZU5+D45yQq1f8Sq9YoVbl+BExkaNb8YgUMRElwyRMCKImkDAJPkbExPrCkMsxcB9nxKZcmAuTeCSMlmkcMTkktVUvKkykKFRTTL/j9pLKaYnrzwmyz0ROd+QYKmaYVs9fXyIm4pgPQdM08ft67v5KwoRoeHI5hpGpxhQm06mPSbFUTlpqohaxabBmjH8PoTV/wZyWwmTIgzCResPwdIyb7q+8eR33psxs1S+EQRQmarqjUucZT+Xk9bQvERMubrio5NRz91cSJkTDM5rMQK5ubCRhMr0iJs5VEXITtajwmCipHKmfA4+YTMdeJp6ESU5O5eQjJi6ECd+vLTF9Pwc5YjKkRBX8nPwrwwUPTzP6EzExd33lUMSEIGqIau5zquoIGurCFXRhsmnvCfzNL/9ouViOFImYyMJEVOUoqRw+WbhJMr9OR2FycMjweRQTJmlR6eQtlTMmeUwAYFZrAkAwu79WK2LCz18+ydmPiIk6wI/Dza9DdbgekjAhGh6+8PK76NFkBrkiTaWCQmG5cLCFyfef2I1fbX4Tv93WX/A3L6kcETFRUjlUlaPjLZVjRExEKseFAJbLhQE5YlJ/d+jFUA2ilYqYqO3/fY2YRCliQhB1A19453Y2AQAYa5xKjEoN/aoV/Hs5rtxVZ7I5EdmwawnOG4GFQ5ow+UUKGqwZHhMjldO4vV/s6JfMr8XKhTPSfhXCpEjEJJdjGM8/hneMDfK8nBNKVKFS/YLSBZOc/YuYxKPkMSGIuoELk1ltcRHObBSfSaN1fuXNt9Q71BFJSNpFheSUA0dusMYYMy3S07UqhzGGg4PmiIlTi3kebYqGNSRi7jwmE9Lf1YjJsQB6TNTj0a9W8SqprDmV40fEREQJI0oqR4qYuBkxUE1ImBANDxchHU1RdDRFTb8LOo0mTPhd9qByh6p23rRaSOWUA0cWKdkcMxkBeVXOdPOYjExlxH4G9Lt0J6HB92skZKRyigkT7i8JhzTRO0Ouyqm3C2Ex1HRHJVKmjDHJY+J/xMSuKieZybmeGF0tPAmTdevW4ZxzzkFbWxt6enpw5ZVXYufOnY7PWb9+PTRNM/1LJBJlbTRBeIGLkPYGFCaN5jGZyIsENbwsf1+MGekFGXWAn/7/IenvTGmwph8L002YcH9JV3MU4bxwczofRCQqbKRyiplfjYqcsEir8UF+qWwOI5PB2uf8eOSdbyvhMZGPaV+rcjLWVTnNsbAYy1BvPhNPwmTjxo248cYb8eyzz+KRRx5BOp3Gu9/9boyPjzs+r729Hf39/eLfvn37ytpogvBCY0dMGqsqh9/Jqwul+n1ZeWnS0p09R46YZHJMlAs3RcOi8dd0S+Vwf8ncriZX5wP37kRCIXHXXayPCd+nPCUB6ObL9nwkIEjdX6fSWUzkj8uedkNc+Y3cjt7PqpykTVWOpml1Oy8nUvwhBg899JDp5/Xr16OnpwebN2/G2972NtvnaZqG3t7e0raQIMqkkYUJFyKxSAipTC7wwsTOY6J+X6lMDoibn5tRJgsDijDJ5oRpczpX5fBS4b6OJownszgxnrItoWeMmTq/cp9CsdC/2vWVM6stjpGpDI6OprCsp6yPUTV4OW04pGFmaxxvDk5W5DxLZ4yIib9VOebJwjKdzVEMjEzV3bycsjwmw8PDAIDu7m7Hx42NjWHhwoWYP38+rrjiCrz88suOj08mkxgZGTH9I4hSkYVJe6MJE26Wy19kg5zKSWVyIuqhLpRq5YjVgp22SOWE1YiJRVXOaMCEyRd+sx3X/fD5ktui81TO3M6moueDnF6IhkKG+VVK5WzYeQTv+dcn8McDQ+J3o0lzDxNOECtzePSuqzkm/DKVmJeTzFf6aJqeAgP8SRnZlQsDhs9kqFGESS6Xwy233IILLrgAp59+uu3jVqxYgR/+8If4zW9+g5/+9KfI5XI4//zz8eabb9o+Z926dejo6BD/5s+fX+pmEoS4qDVixIQvXPwOK8gREx4tAfTFVL74WUZMFETERErlaJpmarImd34NYsSEMYafPrcfG187itePjJX0GlyYzOlMFD0fTE3rZI+JFDG5d8tBvDowauo9o/Yw4fS06f7CAalcud7h/pLulqgoua1IxCRb2GHXz6qceLTwct/VUp+9TEoWJjfeeCO2b9+Oe+65x/Fxa9euxbXXXoszzzwTF110EX79619j1qxZuOuuu2yfc9ttt2F4eFj8O3DgQKmbSRCNncrhLaxF6DfIwsS8CMsGWDcek0y2MGIi/5zO5kzlwoYwqa+KBCfGkhnh+egv8eJuCJPiHpOM1LTOblYOfz25aZudMJmT7yV0aCg4HhM5YiLMrxVJ5eTTspIw8bUqxypi0lyfE4Y9eUw4N910E+6//3488cQTmDdvnqfnRqNRnHXWWXjjjTdsHxOPxxGPx23/ThBesBImxZpKBQV+R8UvAI0SMQH0CwK/kLmJmKRzheZXQI+gTCGnlAsbqZzxlN4JOBQyC5p6RN4PB0u8uHNRoAsTfR/YnQ/m+UOG+XXSJEym8v81hAlvWqemcuZ2JgoeW+8YEZOYOEYqEzEx/GJ+poycUjkiYhLkVA5jDDfddBPuvfdePPbYY1i8eLHnN8xms9i2bRv6+vo8P5cgSqGhIybKbI0ge0zUyEWpEZOoEjEJS23p5Z4OXMwxZm4IVs/I+6GUi3s2xzAwkhcmHU3obIoVvK4M9+1omh4xSSjlwvLryVGQYhGT/uHgREx4xUpncwzxcAU9JhmpkZ2fEROevowUXu67m/V1o96qcjwJkxtvvBE//elPcffdd6OtrQ0DAwMYGBjA5KRxglx77bW47bbbxM9f/vKX8bvf/Q67d+/Giy++iD//8z/Hvn378MlPftK/T0EQNuRyrLE9JhnF/FqBBbNajFtETDiFwsTC/MrLhcPmZU2eMCwqFCJhJKIhYY4Nis9E3g/9JQiTI6NTyOYYIiENs9rirj0m3LdjeExyptfj/8/v+sdsqnL6OngqJ4gRk6jwaVQyYhIN+xsxSTpU5TSEx+TOO+/E8PAwLr74YvT19Yl/P//5z8Vj9u/fj/5+wwQ1ODiI66+/Hqeccgre+973YmRkBE8//TROPfVU/z4FQdgwlsqAFxY0coO1hjC/qhETkzAxCwdL8yuvylFSMibzqxTW1jRNVD8EpcnaiCli4j3qwAVBb0cC4ZBW9HzISqXCgNE9lF/sZIGRY8DhfPRkTERMjD4mgDGv6vh4qmgvlHqhah6TvAiMRULCZOuPx8QhldNcn/NyPHlM3LQR3rBhg+nnb33rW/jWt77laaMIwi94f4Z4RDeUVVuYGIbMykx/4AskN78GOpWjRkyk3hr8ghwJacjkmE0qp7AlPWCel2NU5eiPaY1H9BbtAREmZo+J96jDQclfAqBouTC/i+eRJbUqR/W5HBqawryuZimVY74YtjdF0BILYzyVxaGhSSyZ1er5M1Qb2WPChVclbgBSkvmVR0z8EG+TNg3WgPod5EezcoiGRvaXyP8dmUojZ9HW3E+yOYbLv/0k3vedJyv2XgUekyBHTJSqnCELjwnvg2FpfrWryhERk1xBsyljwnDwhMnAiJFGcUu/1MMEMM6HoSJ9TLjYSyjCRE3J8J/HbPqYaJoWuMocETFpiSEe4WW8lU3lGOXC/lXlOKVyBsedBzlWGxImREMzoggTfofIWOUbaw1PprHz8CheHRit2HvxC3QjVOWoUQt+Qchkc+JCx4WJZcTEpiqHR6vSWXODNcCINI1NBU+YZHMMR0a9XdzlHiYAilapqRObm5QGa6owOVhEmABAHxcmAellwlOK3c0xMVumEsIkJZm3/YyYOAkTXi6cyuZMgx1rDQkToqFRIyaJaFic9JUuGZYvtGoprB/kckwak944fUya8xc/Hl4ekUSDGATnoSqHX1SzOSaFtfX3aJVKhoOAmnLxGnWQ29EDQEezkcpxM7G5SbqTz+WYeH+eEuC9VcSsHAthErSS4RNSKoevHZUuF5YjJuVGMpw8Jk2xsBDp9dTLhIQJ0dCowkT+/0r7TOTURCWaeKVMQ78ax2Myr0u/aJ7IlzDy76k1HjHMlw5VOWHV/Co1WOMXFJHKifFUTv3cLTqhmoC9XtwP2aRy0llmOf+GR6FUjwmgl6Hy11uzsCv/+rpQGbfpYwLoZcqlbHstmExlxYW9szkqRUwqcD5njFSO3KW13JuNZMbeYwIYUZN6qswhYUI0NLUVJpWNmMgLFq9+CHIqh1fl8Ismv4OTv0OnO1ZelVNgfs2ndmQfieoxCZr5lQsFrxd3HtHgPo+WWFi8ltX5oHbTjUu9MCZTWZGOOWdRt2l7nFI5QfKY8KhdNKyhNR4RHpOKRkzCISQihgAsV5gYTQULIyaA3p8FqK8mayRMiIaGL7btNY6YqMZOP5AXR94vItDCJL+P5nU1A9AXSsaY6Tt0Eiaij4lNubAsPnizKV41EjSPyZKZLQC8CZOJVAaD+Uon7jHRNOeSYWF+zYu7UMjwP5wYT4nJu6sX6RGTg0OTYIxJ5cIOwiQAHhO5VFjTtAp7TIy0WTSsQcsfxskyfSZq+lJFVOZQxIQgqkMtIyaV9pjwEG1camEd5FQO30dz86mcVCaHyXRW+g4jjlURGbsGa/m7fX6xjIQ08Rhhfg1IxIT7ok7pawfgrS09j1C0JSKiiguQzoeJwvPBqtKJp9N2HR3XXy8ewUmz2wAAo1MZHBtLiWohvn9l5kgek3qqBLFCLhUGUFmPScbwmGiaJiIc5YigdDYnvgu7VE49NlkjYRIgDpyYwPEAjQuvBxraYyItZLGwfcTktcOjpkm99QqvCpjZGhd3pifGU6bvMOYgwIxUjjliwlM7o/moiHzn6CaV89rh0bpJ9QwrwkQd5DeVzmLHoRHLC76oyMl7PDhOvUxEgzUpCsV9JruO6tON53Q2oTUeEefV60dGxWObLe7SezsS+W3NiQiOG/Yfn8CxKq9/csQEgKPHZP/xCTy+84j498yu40LYuSGVNaciuc+knMoc+bm2EZO8AXrIw3dRaUoa4kdUn+GJNC755kbM7WrCY5+5uNabExishEmxplJ+MV4lj0k8EjYu2Iow+eOBIVzx3afwgVVz8O2rz/J9G/xkQmrK1d0cw8DIFAbH06aSb9Gq22KxNlI55vst7qHgURF5gS5WlbP1wBCu/O5TuHxlH777Z2eX/Nn8QE5rndKnRyjUVM6X/ncHfvb8fvzwY2vwjpNnm/7GRUxfPmLBcRLqVm3+C4VJIv/fJgxPpvH6Yf33LbGw5WDEeCSMWW1xHB1N4tDQpIhGODE8kcYl39qIOR0JPP43F0PTCl+3EohS4RazMFGF8fBEGu/61saC6MYtlyzHLZec5Oq9jIiJ/tn0iEm6rIgJ95domtkfJFOPg/woYhIQBkamkMzksPvoeF3lAusdtY+J/P8Vj5gkqxMxiUdCYsHM5JipmdueY3q4/cDghO/v7zfjolw4YlosTRGTsEPExLZcOG9+FRETY9njVTmjNh6TPcf0i+yL+wZL+ET+Mp7KiggGj5gMTqSF6GWM4fFXjwAA9hwr/L75fuRVGBxnj0lhm/9E1JzK4X1J5uQjITsP6xETK+Mrh/tM3HavfXNoAqlMDnuPT2Dv8eody7z7cFeLvo8MYWw+/vj6HAlpOGNuBxbnPUAbdh51/V7pCkZM4vn0kBXkMSFKRr7j3p1fLIniiItacw08JlWLmBjCBDBftPk2eAkp1wq+j1riYXS38KmnKeF9kFM56oUBkBqs2fQxGUtZREwSzqkcLmb6h6dq7kPhx2ssHEJPW1xEe7h35ODQpJj0a2XmHbMp4e1o0n+26utj1eafe0x2H9HXIV5FxcXG63lhYuUv4XAR43YQoSwcX9h7wtVz/IB3H+Zizs7LxY/d3o4E/vf/vBU/+fhbAADbDw67TqNy8ysX3354TIxSYes0DmCkqchjQnhGPrh3HRmv4ZYECyePSaUbrJk8JhWsytE9JlLfA+mizaM26Ux9mwwBI6rUHIuYFkurcuGkhdAyupTamF8tIiYilWMT0ZL7m+w5Wtvzjgu09qZovrW7uVHZZimqY5WaGrcp4XVTlWMyv+YvcrybsZzKAYDX8qkcq4ocjlGZ4868KwuTzXurF73iF+tOIUysvVyTSnPAeV1N6G1PIJNj+OObQ67eS/QxifgXMZlMOZcKA/U5yI+ESUCQL3I8t0s4wxgTXUNrXpVTgbvtVFY/JmKRkCl9kczKgiiAERNJmAxJqZz2pihi/C7SKmIi7u6dy4XlRbrYrBz5O6z1eSdXJwHGxZ17R+RIgtXn4cJMHaznKEyy9qkcDjfTcoHCX4enyazwmsoZnTK27YV91YuYqFU5duXCfH3mok3TNFFCvcllhEdN5fgRMVGHVlrB01S8oWE9QMIkIEykZWFCERM3jCUzIidvEibNNajKqUDEhF+cef7YygDLt6Hey4hzOWa0pI+HbT0mTmXRae6HKCgXtq/KEX1M7FI50u93140w0Y9f3laelwxvkiIJVqkpnspSIxmO5leL+UM8lcPhIoOndDhOHhOvbenN38N41aoT+cW6q0i5MF+f5X1zTr4b7iaX/iQuTOI+Rkyc5uRwuOgayvcNqgdImASEyVT9LJBBQeTkpdkTQOP0MUlJszUAIB4uXDT5NtR7xERuh94Si4gSxsHxtHW5sEW5ZsamwVo07FSVo7/PeDJjuSiPmSImtb0hUI3c8sWdD4zkWHlMSkrlWPUxke6+Nc0o/+1ThEmbk8ek01tbetWcvLlKZmR5gB9gXy7M1+dmKUq0Jt8Nd/O+QVdToI2W9LzLrg8RE4c5ORwenczkWMUHm7qFhElAkD0m+05M+NbgJ5dj+NXmN/Hdx98Q/372/P5AdxDlWPlL5J9HJtOmCha/kS+2lajKMSIm+qJjVcrIoxC87LNe4dupaXrYWW76ZOkxsTg++eKvtqQvLBeWqnLyEZNMjlm+ZjmpnH3Hx3HfloO+HWPq8Sxf3LfsH4Ssq6wiQONJ64iJmz4mUYtyYQDoaYuLv81ui0PWhC1x+4shj/YcGU0K0ZzNMfxm60HLickjU+ZtcxuFKAfGmCihNapy9M+Uzpqr3/j6LEdMTu5tQ0ssjNGpDF6TRKMdbvqYDAxP4Veb33S9PhsRE/tLfSIaNgZn1okBlvqYBAQ5lZPNMew/MYFlPa1lv+7Tu47jb375x4LfN8fCuOLMuWW/fi0pJkxyTA9vtyeiBc/1g4p3fpVmawCwSeXkIyZ1LjRlf4mmaSK8fHQsKS6y5oiJg/nVrlzYImIi+yDGk5mCO0tTCuHYOLI5VjAk0I5b/3sbntl9HDNaY7hw+SxXz3HCTpj0D0+JNM7M1jiOjSUtza886mAfMSl8jtVgxIR08Z0jRUki4RB62xPC0OqUypnREkMsEkIqk8PA8BTmdzfjzg1v4F9+9xo+vGY+vvqnKy23fUF3M/afmHDt2yiHyXRWHGdqgzVAFxKJkL4v+PosN5SLhEM4a0EXnnzjGDbtGxQl3na48Zjc/uAruG/rIYRDwAfPmlf0M7hJ5fDPN5GaxInxFBbOaCn6upWGIiYBQS0588uIx8sL+zoS+NCaeaL+fsClW76ekctMZRLRsLjztmrD7ReVnpXDm4zxO6sge0x4RInfcfILwf4TRs8KfVaOQ0t6Za4Lh6d2rFpzh0KauFt0ijIA+n51m3pgjOHlQ8MAdE+EH6jCZK5kIOXG14tOmpXf7sLjjYsVtYxXjiCq6Syr3jByxGSOkr6Rf251ML+GQpooGT40NInJVBY/fGovAOueO1yYvOPkHgDAtoPDZXkv3MDTYSHNqLYxVb9Jx6BalcNZ48EAy0VgzMFjsi9/PritzBTCxKEqBwDet6oP15y7wDRTrJaQMAkI6oXNL2HCF96zF3Tha3+6ChcunwnAvuFUkLCLmMi/q6TPxNzHpALlwmrEJMAeEyNioi+gPGLCP0tLLIxoOFRaxERJ7aiLtFNljvq7N1yed8fGUqIizOsEYDvUgZSz2xPQNH1f8NTGxSt0YWJ1/nKxYmd+TWVzwpPAsTS/SsJENbzKwsQpYiI/9tDwJH65+YAozbU6J3lVzqlz2jGrLY50luGlN4cdX79c5AnJvDmZabhepvDGI6EIEz51eZOLEmdR/u8QMTk6qpt+3R5ThsfE+VJ/22Wn4J8+eAaWzio/Cu8HJEwCAj/w+Z2+X71MjJNPPwnaAjbUzAk3wqSSvUzMnV8rkMrhHhMlYpK08JjkGFwZ8GqF3PUVMCImHP59xW3Mh4D9ED+1fFgNa7c59DLhv+MX4F1H3AkT2aDutiS2GOrxHIuEMKs1DkD/bruao1g5ryO/3fYiSxUMrfGISNWoosDS/CpdfPs6zO3t5Xb3Tg3W9Ofq+3T/8Ul8/4ndBZ9Thgut9kQEa/LVLpVutGY1IVnTNMsbAFFRFjV/5jPndyIc0nBwaLJgrpFKMY8JY8wQJi4nM09ZVAsFARImAYG7vnme0q/ur+pixaeOqmazIDJ9IiaK+VWOmEjbUM9RkwlFIDfFwqa7vHZFmFhFTMQQP8UDonpC1EXaaZAfPz/4BX/3MXc3BHIFT79PaVGr41mOUKxe2CUuopPprEmIpjI5sc/UFIumaWjPiwj1fLAa4idHnNRUjhxBcWqwpj9WFzF3P78Pbw5OCgHpFDFpS0RN1S6VxE7IWRmwjaqcwmPr1PyaXSxqklbSZmrEZDSZEf9/yOVU6SkxgZyECVEBeIXHGXP1BXLXkTFfas75YtwmhInz7JAgoYa+ZSotTLI5ZgqLj6esy1HLgUdMuCCxvJOTogD17DNRIyaAeaaLHCUArD0mVgPngMIqHXWYWYtNLxPGmDg/Vs3vBOA+YiKnWv1K5VjNfZKFwJpF3aaLqCxKZdFlVS1jdz5Y7VM5XVGQyunwnso5PKJHAa45dyEAfe1Ro3vc79GWiOAcybdRyao6u9RXzKL766RDZGL1Qnc+k7Ra/q9ETI6NGr1b+ocnXX12N+XC9QgJk4DA77hPndMOTQNGpjI4NlZ+aZddxGSUIiZloVbhMIaC/H258M6v/EJbNGJSx5U58pwcTldLoTCxawkOSAPnws4RE3WRbrXxmCQzOWGo5RETt71MZGFyeGTKl2iV1dynOVLqZM3CLsSlLsByLxO5VFoVboD9+WAVhXJtfnUoF1Yf2xwL48a3LxM/q+vP6JSRVjmlrx1N0TBGpjJ43aVQLAW78mqrqN2EjfkVMHwmLxSJmBR6TMwi/KgkTNJZhmMumsy5KReuR4K1tQ3A9oPD+Mr9OzxfELnru6s5inld+Xy3DwZYtelSI0ZMrISJU+8GP+ALlXxNtCrhLAd5Vg5QOGBMjdpk6tljkrSImFgIE3mKsnpXLVrS21TlcOyEiZrKkYXK6flI5bGxpKtjRq7EyTFdnJQDY8zyeOY+jVgkhDPmdUDTNMvU1LhN11eO3flgFTHhwiQeCaGr2XxuyUKJN6+zQ37s1W9ZgFltcXFhl7cjl2Oia21bIopoOISzFnQCADZVsD296r/jWKdyzC3pZXhlzqsDI443fHxfG7Ny+PgF/bWPKkLEjXdp0mW5cL1BwqTK/OvvX8d/PrkHv32p39PzxCyGWEQ4p/0oQ1QNXu0NJEz4nAt18QSMxX2oQsJECL5YRCy2Ez43WZOnCwOFERM1alPPTfPUqhzAbIBVza9A4eex72OiChPzstdqc8zz77A5FkZ7Ioredv1CWqzz8lQ6K0peudB36wmwYyKVFcJSFibcc3bu4m4RTeK9WWRhZdf1lWMXMcnmIyZy1Km3QzfcnjS7TVSryK8zqy2OcEjDrLa442ea29mM1ngEsUgIn3jrYtvtGEtlRPM4vj95am3HoRHH9ygHO4+JU1m+VSpndnsCve0J5BgcIzxG59d8xCRqHzEB3HmXeLo3EQnWpZ4arFWZN/ML1oDHOyiRw4yGsWRmKzbsPOpLxGRMyaPyu5xGSOX0D/EeLU0Ff5PnQ1QCee5LNqf/XKmISdzGY6IabuvZ/CoiJnF3ERNA/5zyhUD0MfFYLswb7KkXZVW0L+1pwcDIFHYdHcdZC7psP8ve4+NgTL+InjanHc/uPlG0IqMYfNuiYc10V7526Qz8+ONvwSl9beJ3VhOTRXM1m94itqkci8GIy3rasP4vzhE9j2Q0TcNPP3EuBidSpu/PiqZYGPd86jxompHW6WiKon94yrQdfNtjYWO0xIz8a1fyBqpYKkeuDOPrc7PD/h0YmSroRyWTVsr/udDk6RhVmLjxLgW1KoeESZXh4Tf1ICuG3MBnaY++IFQ6lcMYK7gjCgpT6SyO5/siqAY9AKaW55WAi4KWWARZxnBszP/ur0bExLolvZqaqOe29JPpIhGTfNQrEtIQ0vT0iH5hMKIHxqwc53JhdZG2Kx1XzY9LZrbiqTeOFz3veCRz6axWz1N07ZDTOOo5yZuqcVpFyb/xecRnsSnhtdsHVn1MAODiFT2227qit832byo8RcaxSikZFTnGthvrVOVuoKzKhQHnDstWHhPAMAw7CRNj9lW+KkeJmHBPSTikIZtjro4pY7pwsIRJsOI7AWdkKi0Uvldhwg/8pljY11SOelfAT/iM4k8IGjzM2RwLo72pcDHmFR+DFRr1zaMjzfGwuIvye16O6jFRK1YCGTGR7jj5fBLAuHDKU5TVyhy7VE5YuaiqqRy7aAG/sHPRvnRW/oagiOGS/33prFYhisutzHGqMFMxGsYV9tGx85jYR0wKO79WEqvtGJUqcjiGSb/yEZPCcuHCxmdWs3Jk+ODDSYdutWkllWMXMVkxWxd+7iIm5puXoEDCpIr0S3lm1chUDCNUaAiTA4MTZbdlHhPzM4z8NL8hG00GN53DT9o5nU2WUR9+0TtRqVSOdKHlUQD/IyZKVU7YXLGiRkzquVzY6o5TjpjIF2S7tvQZi4Fz+s/m719dpO2FST7qlT83luZnUxXrZcIjKkt7WqRBe+V5TJyM3Cq8Gkb+/u38Ehz7qhzrEuxKYS1MjB4mnGqY9L1ETOT12QqefnMUJlnz8VvgMclfM7i/xs0xRVU5RFFkhXvMQ8Qknc2Jg7Y5GsHM1hjaEhEwpuezS4UxVuDWD4U08f9BNsAelISJFTz/PTie8r2/CGBETFpiYeGb8LvJml3ERHhMlEWwnsuF3XpMAOsLAyBFTIo0WFPD2nYXZSPKoP+d3xDsOz7uGH3iJcVLZraaJgCXgxdhYmV+NS6y1hfOYhETt4MLy8V7xKSSqRwuTJ09JvL6bFWVAxiRFLsbScaYlMpxjpicOV9Pf7nxLbkd4ldvkDCpInIb4aOjSdcXRPmCloiFoGmaWCTLaU0/mc6CV1zKuef2KoRJKw2/EMztTFj+nd+NZ3IMoxVoF887mTbHjYjJuM/CJGknTPL9TdQqoHr2mLitygEMc6Dalj6TtY6YqP4I9e7RrlR2XLmY97Yn0BQNI51lOHCicNAcoF9geNXOsp4WcfyVK0ysmqvZ0WoxVqLkVI6NobhSWHldrIVJ5W+e7PaZU8rULpXDhYGdx0Q+N60iJrkcE32rVs7rBKDPYyoWMacGaw3GK/0j+Mj3n/F1vLa8OKWyOYxYjBm3gh/M4ZAxp8HwmZRugOULV0gzK30/jGWvHx7FNT94tirjya1wqsgB9BOVf+ahCvhMxoX51fCYTPgsgFKK+VVt/KRWAVXLYzKVzuIT61/A+qf2uH6OZedXm4gJ74ipRkzsGqwV62Nid1EWlSxSNHFJ3mdi5+86PJLEeCqLcEjDgu4WcfyNTGXKOp+8pXIK+5gUS+XY9zHhUahapnLMkStAmumVyhR0QC12/H3zdztx8z1binZOLdpgLb9vrNZnlWKpHPnctKrKGZpMi749y3paxesVKxmekqo5gwQJExse3D6AZ3efwH+/+KZvr6nmBN36TET+MhoWfom5+SZr/WU0bhqTSghlH4YfdyP/+8dDeOqN4/i7e7dXJFVSDB6dskvlAMaFrxI+E8MzYfQxqXjERC0XrpHHZPvBYTz66hH8xx/cCxN1Vg4AzGiNYVZbHD1tcXRaREzkz8MYM5qBqQ3W1KocVZjkK36SmZzpDtTqwrQoXyJrl0Ll/pKF3c2IRUJoiUfExbacmTmeUjkWnWyLRUzabVIjVuXClcTZY1IY1WUMovkaZ8v+ITz66hH8p40w+d4Tu/GbrYeKpsFHpwqPSUCKmKTN/YLk9VnFkzApaEmfw5FR/djpbokhGg6J5nTFInHkMWkweLe9ER9DhWp5l9vKHLkih8MP9HKaZo3b5FD9yN/y8ObOw6N4fOeRkl+nVAyPiXUqBzAMsIMVKBk2qkzCaI7zBmuVKhdWUzk8YlKbqhwePvZSim0VMYmGQ3j4lrfh4VveZjJfGh0xjc8jd4FVIyRqaqeg82ssIjr0yikEq26p84qYWXkEk0dWAPhSMlyKMPESMTE8EDnTjQRP5aiVTZXCKWLSLgkTu9b7+nNTlr8H9PQfXzOLdfC165YrTOY8YuKiVwj/W9Km0pFvU0gz/DzyccoFyMxW/WbKrXdpKkOpnIZCTHT0UZhwsxJXr64jJhZzGKzaIntFGOKU3gZ+mF+npPz/nRt2lfw6pcAYkzwm9hET7mGoRC8TY/ZLRJgR/Y6YpPL72Nb8WqNUTlpasJ36NshYzcoB9DvELqVRV9wiYiK323ealRMNawVGzlBIs0xlWJkf+QXBzni4S+phwvHDZ+KlXLjVYiihXYUJR76oymuKXXqsUjj3MTE+u6ZptiXD/LlW5fny74oKE5v1kUcyuMiwWp9VinlMUqIsWxLgUjPBAyf0Y4d3053rotornc0Jwa42Fax3SJjYwA8Uv1zf2RzDQD6Ue/oc3VXtPmJitKPnGCdH6Rc7uzp9HjItJ1o0mTIWtxf2DmJzBWdaqAxOpMVde2+HfcREVOZUJJUjRUwqVi5s3fk1KcqF1aqc6qTU5Aubm32bykhVZzadM2XEsS+JX1l0OZUL2y3Qlq3Q8+e+fDHv63AWGaJUWBImQsyUUTLszWOiP8bcx8R6Ui5HblkuXzzt5g9VCvE9TDibXwH5Bsq8RvN9lcrmCgzSchTJSZgkM1lxTBa0pBfCOG8yTxWvfEkUTeUw02vz/+eZId4xfFarLky4d8lJ7MppyTilchqDlM8Rk2NjSaSzDOGQhtPm6PMtPAuTqKymrXs5eMGuhJDfiViFQt3CIyY85XTnht0lv5ZXjLBn3LGxUGUjJkbnVyO07l/ExKq8sOisnCpHTAB3+1a+EDrddXJULw1gXEABq3LhwjSQipUwsUp1GmkZa5HBm6vJqRw3F5FieEvlFPYxKTYrJxIOif0qXzztmtZVCv75RpOGqdUQJubPbueFs/oOOfJj1S63MvLap7bxVyMmTpOFOW49JvLIBU3TxE3Hm4PmiInwmDiUDPP30jRz9CUIBGtrq4ghTPyJmPD8cm97ArPzd11uhQlv1y3fTVrNa/CKyDvHrCMm5Xx2Hsn56NqF0DTg968cxuuHR0t+PS8UKxXmcGEyOOHPdywjBsDFKxMxSWeZGGxm25K+Rh4TWTAMudi3PJcfi4QKoh1WxJXGUwCQzhn/r6ZqZKFiZwK0jJhYhPJ5CP3YWLLg3JtIZXAoHxU1R0z047Acj4mncmFHj4nTXX2hMMmKlvTVFSaMGSJixML8Kv88YhMxAQqbDMqVak4REy5omqLhguNJNV9brc8qTTFuZLVJ5WQKUzmAEWlRhclcF74lY4CfvSm3XiFhYoPfEZNDkhmTh+OOufaY6NvS5LPHxC6H6seEYZ5KObWvHe8+dTYA4K4n/ImaFKvyOVSkuRqnu4LmV1PEpAIeEzn6YTtduGBWTuGxwhjzvWpK3jariif1Pa16mDjhFDGJhrWCRVhe7O3KJq28DYb50XhOZ3NUvMaAUmXDS4hVX4zwA5Q4yI8xVlpVzlShMLFL5QDWPghR6VSlzq+xSEjsX/6Z+bYXChM7j0nh57b62UmY2PnvgELztdX6rNLk1mMSUbsU6/udT6s2IiZGetDu/A1qRQ5AwsQWfqBMpLKi+2E58PzynM4mzMwfXF6rckzmV4vKBK/YlRCKE76MlvTySXHDRUsBAPdtOSjK3mQe3NaPs7/yCJ7bfbzo6/7oqT04+yuP4OVDw7aP4XetxYRJVwXLhcel76wSVTmyt0j0PVAu2FwI8QVdbbDGGMOH7noGH/n+s76KE1kAWYm+T/3XZlz6r0+IY8RqTo4TVrNy7Ab4AeYIip0PwNpjUpj+0DTNNgJi+EvMU3f5cTgwPFW0d4YVk2nD7+BGmLTxiElKH8SZyzEhlJ2EiVV3UmF+rVLEBCj8Luw8Ju5SOUrExKUwsavIAQrN18UG+AEuPCZFIiY88jgzf1PLvXOT6az42+Z9g1j9lUfwk2f2AghuczWAhIkt8t2YH94Avoj1dTSJiIk/VTmlb9toEfOrH1U58WgYZy3owsm9bcjkGP54oFBQ3Lf1IE6Mp/DzFw4Ufd2Htg9gcCKN5/fYm2mNfe2cyjEG+VUgYiL5E3jExM+W9IaLX0Mof9FQUzl8wezM9+lQS8tHJjN4Ye8gnttzwt9tk95H9Zikszn8/pXDeO3wGF4d0FN74y4WdhnLIWoOJZsm82uRVI6cerJLddrNv9kttaKX6WmLIxzSkM4y11FSGX4BjYQ0V/uIn885pu8XOX1h5zEBjLt6eXinXTfdSiILE8aYFDExizK73iumih41YjLlMmJi08MEKGzw56Yqp7jHpND8ChR6Q3jEJBENi9Jhvt59/eFXcXw8hZ89r6+jQZ0sDJAwsUVeXNUcZinIvoee/MF1fCxp6r9gx0S68ADzNZVj28eknKqc/DbnLyIn5SdiWo2M5yWWm/YNFn1dHg532rZ+F6XCgBExqURVjnyx5Yubnx4THikzufgLUjn6d9DZpH9ONZWTzFpXtZS9bQ5VOSfGU8Ibw42iExZzcpywmpVj1YSL4yViwr0cmWxOfA71NefYmFnl4X0ykXAIs/PnfCk+EzmN48Yr0BwLi2qOsWRG3FhFQpqjCdLqrt7oY1KbiMlEKivWSLcRkxGHiInXVI4qSoHCkQhW67OKiEbZpnLMpf8c9TX5TS1g7mWyZf8gnt2t36ztHBjByFRaRL6CZnwFSJjYkszKi175FxS5E2l3Swyapt/ReKlaMEdMyq/KsSsh9MP8aoQRzS301ZHx6WwO+/IdGPefmMARh062Oank2mnbDg25S+UY5cLpkkLsdjBmhM5b4hGRoqiEx0SuMlEv2ONKxKRAmEh3xn5W7DhV5cjpS34hH/foMbGKFhptywsvJObeEO5SOXKUVI0y2PUysephoj6nlCnDvHTWTRoH0NNNYpDfVAZj+ZRsSzziKGys7uozUmSuWsh+H/69hkNagT+ozWImEH8epzCVI/cxsV/Xx218LUDheeZHxCSVsY5MyaIiHNJM86Nkgfy9jUavqBzTu99OuihjrldImNhgdTdWDvLFMhIOYUb+oujGZ2Ic+IV9TMrp/GrXDZIv7uX0MeEXDX6nwO8i1ZHxB05MmLwPTlETXnIN2IvFdDaHw6PuhAm/YGdzzNdGesmM0dioORYWFwm9X4c/AkBMFlb6Hsh/4+KIX9BUj4ksRso5juy2DSiMmFgJkwmL49sJq4iJXeUGoEZM3FXlcH+VVaWQ4TExREYux7DnWGEPE+M5pZcMe2muxmmVStTHivQw4Vjd1adz1TW/AubvQo6EqaLKqkO1bBQGzL1cAHNVjmO5sEN5tXpTKI+fsKN4HxNrASiLipmtMZG2BYxj6sk3juN3Ow4DAM5a0AkA2LT3hNT1NXiX+eBtcZVIWdyNlcpUOivuHLnKnenBZ8JDhfIdg92EVS/Y9THhudtUprBBkVumpFI1wMi7v3FkzGS0VIehveAw9E8Og9t9JwPDU2BMv6DMUDqGqsQjYXGX7mc6Z8LUlyNi8j345eVICg9PYSonmc2ZojZ8oVejIvKF3c/Jw2bzq3nxl4UJ/+7HXZSyylhFC+18CIC5OZhdVY5dxMTqYm4lMg4NT2IqnUM0rGFeV6EgLqctvZeKHE6L1P212JwcjmPEpEapnBEb4ytgfQM1LqV+gMJeTKMuPSZ24zoAi4hJfq1zGpQnt/y3is6KGw0loidHTLi/hMMF8u9fOQzGgEtOmY2rVs8HAGzaOxjYAX4ACRNb5EW8nOoUwFjAWmJhtDfpBzo/yI65iphYVeUYHpNSKyrsmi7JJXKlNlmbUvKui2e2QNP0xUAO7/O7Zi4QNu21j5jIYXA7388hyfgacrGYVqIyh+/XRDSkTxyV5nr45TNJWkVMpAVTjtqIiIkSFTH1AfExleMYMZGE+N7j48hkc75ETOwqNwBzczC3VTlO5bWyMOHnHk/jLJrRYhld4D117FrZO1GKMJF7mbjpYQIU3tXncgz8GlrziEm88LNbeUyGlONNnbCt9naxq7jk6S/LqhzF32e1PqvI4sAq/S4arCkRk7gpYmIWJqqH7q8uXoJzFnUBALYcGBRrN6VyGgirRa9U5DQOD0d6qcwxWtIXekwYK/1u127xDYc0IRRK+ezpbE6Y5ngYsSkWFieSnM7hwuR9K+cAAHb0jxTkhTnyHaqaV+bwCa48MlUM4TPxsTJHTIOWLrTCZ+JT91d1sjAAxMPG8SHfDRqpHPuIia+pHMVjIgtnOWKSzjIcGJz0yWOST3dYRExKSeU4dUrl1V4Tqax4PPdOWaVx9Ofkj/2j49iyfxBb9g/i5UPDrszvpUVMDP+FVdmzFbwJGE8dy/OHqmt+zUdCJI+JleC0SuWoURB1nVDXFrt0tVP6Sy1Xt1qfVWRxYJXOMVI59h6TWYow6ZOEyVsWdWP1wm4sndWKzuYoptI5vLh/sOC9gwIJExv8FSaFDb9meehlYrSkL6zKAUpP5ziFeMupzJH7IMgnhZUBlt9pvnX5TMzpSCCbY9h6YMjydeUGVXbbddBlczVOJdrSi66v0kLFL7p+RUz48SkfB7JI4ZGKRDQkvoO0chGUjxs/za8paSZPMpMzLcSqEN91ZMzwUJVVlWN/AZMXe7tFmvs3kpkcptJZ2zQnfw1eqslvOnZZTBWW4cfj60fG8MF/fxof/Pencfm3n8Qdv3/N7mMKvHR95cjChAs/q30jY5QLc2Eizx+qojBpNkSiXXM1wLoRpCpMnKpyrB6vPs/aY6JU5Viszyo8cgpYd39N2ZRly8erXSoHAG64eAkAfSDl6gV61OSpN47lXyN4l/ngbXGV8LNc+KDU9ZXjRZhYml9NwqS0i4qTwaucyhy5D4K8nXzRlkuGd0tDz9Ys6gZgn845ZPKYOKdyirWj51RikJ/c9ZXTHK98xMQkTPLejpZYRCx2airH5DGpUMQEMIs+frzzbd11dMzI53uOmLgTJvLNvp0waYtHRIntiHRBtIsyqD6T3Q4VOQCworcNl542G/O7mzC/uwlz8lGXHz61t+iUW35Xz9PAbmizSuUUSZWpHpO0af5Qrc2vVqkcPqwwI6JyqqFVTUWrwkRN/XDGHYSpfVWOdw8PJ2VxPgPOHpNZrXFcu3Yhrn7LArx9RY/4PV9H+agNp3lh9Yr7I32aIS+u5QyzA4y8spxe8CRMLJpHaZquwLmfwCtO0zOB8iYMy11fZSe9iJjkF/ET4ylx8iye2YI1i7rwP388hE02k4hlj4ldxER4TDxHTPyblyPPyeFULmJivEc4pCEc0pDNMbHgNsfD4m63oFy4QubXlBLBGxxPY55+Eyc8VWcv6MSzu09g99Fxo6rBZcQkbhkxsb+AaZqGaFhvcGbX0yEU0tCeiGJ4Mo3hyXRRw2hfRwIvvTksonhGDxNrYRIOabjro2vEz7kcw6X/+gRePzKG//fcPvz1xctsP6/TZ7OjRRImWWZ/nsskYua26bL/ohadX4cmU0VSOfrvsvnOti3xiOdUjp0otGs+CZjN14wxTORn5TilcgB9PRyetG5Lb5fKcYqYaJqGL19xesFrrcn7TKxeIyhQxMSCXI6ZFmo/PSacUjwmqrlK3DnalKA5Id+5O6dyvF+wkzYdB7kw4VESvpjP7WxCUyyMNQt1pf/ivkFLU5ocMbEbFdDvsh09pxLzciwjJj73MuH7WL3D4mZYLvhaYhHxmIJyYdljkvVnu6zeR45GcSF+3pIZAPIRk5S3iIna4RaAY/UGYNzxO1085Dv1YsJErrIZnUrjSP5z2aVyVEIhTYxq+OGTe22HuwHFP5sVRionK26svFblcP9LSIMrI7lfiO9hwtlj0hwzBuxxAcKFBm8FoJpfuXeks9nsKVJx+v7lcy6dZWJWTrGuvGqqTMbW/OrgMbHjjLkdJlM8VeU0CGooutw+Jk4eEzctqu1c3+U0WeMnntX0TKC8tvRqqTCHzxDZf2ICyUzWMAzm7zJX9LahLR7BeCor2pUbr5nFcUU8WBlgD3pM5XQ2+5/KsWqx3uLzvBwrjwlgLJr88zTHwuIuTD2uTR6TjJ8RE/P78G2ZSmfFnei5iw1hMmHhyXFCHPfpwlSO3cWX3/Grx6SMLEzGHMpFAaMion9oSqRxZrXFLc23dnzgzDmY05HAsbEkfv3iQdvHGRdnL1U5vFy4+GfhqBfOWvQwAQy/z2gyI4SDXSSMf998jeaP59+Pmjrl6x7/u10vEydhIp9zqWzOVVUO4NzLhJ+bTp1fZ7a5EyaJaBgr53VIPwfvMh+8La4C6oW+nIgJY8zSY8JLv4Ym0o7mVT1UaG2uKqct/WgRp355wsR6quWstjja4hHkGLDv+ISozlkyUxcs4ZCGsxbqYchNSj8THglpjoXF5y5oRT1l3GH1ea3K8dNjYnEh8DtiYpeT5j/zVE5LXPKYOPYx8d9jwsUA95jI/pJV8/WFc3AiLUa6ey4XzrpL5QBGybBTWNssTIxuqVbIHhO74X3FiIZD+MSFumnx+0/ssq3QcWq3b4fcYM1uirgKjybxC2ctepgAxvfAmHFTZ/fZ1ZQzFyb8+5FvXrI5Jj4b/7tdxMRJzMnRiGQ6a6zPxSImSqpMhp+LjlU5LoUJAKyW0jmUymkQ1Du+coTJ4ERaCIdeaahcR1NU5P6Pj9lfFPU8pv7/6oEvepmUksqxGOkuU04qZzJtncrRNA1Leox0jhoxAYBzuDBROsDKUSe+baopmU9w7myOFr075FSkKschYjLpcx8T1dimpnL0iIkbj4n/fUz4TCieJuNpy1mtcTTHIuKudSA/hsB9g7XC456fo+02F7BwPpXjdPdoTuXkJzM7eEwAVZhY+0uc+Mg589HRFMXe4xN4+OUBy8fwi6vdZ7PCqirH7lzn8GOJXzh5Sq7aEZN4JCy+Jy5a7QSnWj3I28zzY0v2B8oiZW5RYWLfxyQU0sQ5NTKVMdbnIgLAyfxazGMSj4Rsj0UrzsmnxfXXCN5lPnhbXAX8TOXwC+qstrjpIhIKaUb3VwcDrNpFVKacVE6xqoM2aWHjZLI5PLLjsK2TncNTOXGLE3WpqMwZt7zT5Ep/095BU/8LuQzYqkwQkJuruYuWAOZ5OX4hz8nhVCpioqZy4mrEJBYRYiWdUcuFK9uSvqddv3jz5nX8OOd3fqofo9SIidMEWk7URcSkvamwTLVYKmdgZAqvHealwt6FSUs8guvWLgQA3LlhV0GzRMaYlKYqrcGaiI4WqxoRERN9v/IITjWNrxwuErlotY2Y2KRyeHR6Mm10guWRo1g4JMq9rYRJJpsTa5jd98/XXnktdFuVY+kxyZ+bdlU5s9rirgY4clYvpIhJw+FnxERcUDsKPQ9uKnO4uo5FQgVekHJSOcXMfVapnPtf6sf1P9mEdQ+86vjaRivkwsOL31W+0j+C/ScmTL8DgDPndyIS0jAwMiXulgBzGbBdmslorubOXwIAXS183H3KVbMrNzj2MfHJY2JrfhXCJB8xiYcRjVQ3lcNfa3Y7j5jo26IKEzXCUOzCyVE9JvLFx+4Cxi8KTgZQK/OrXRRnZmsc0bCGHAOe2308/3m8pXI4152/CPFICNsODmPnYbO3ys1ns8IUMfHYkj4pyoXzKbkq9jDh8O+Cf3a7aJG6Fhgek2bxGB4xkr9TtaGejHzzYPf9q+eZ1fqsolY9yRjmV/P5zL+z3nb3axqgd7Reno9EF/ve6xESJhaowmQslSl5+qyV8ZXjpjLHyVhl1QHTLcWc+lbpkpfeHAYAvHZk1PI5HLUdvQxfvJ947ShyTH//Hil32hyL4LS5uv9gs5TO4Wmavo4mqX+BeVE5nt+PautmJ3gqJ8ech3p5YdKiKqepShET1fwq9zEpNL/KVTk+ml+FMMlHTBSPCf9+1At5s8tUjjwTCDAuSuGQZmtAvPWyk3HDRUtxxtwOy78D5koNp5b0gB7x5JE57m8oJZUDADNa41ic91kdHjGvBW4+mxXcT2ISJi4brAmPiYiYVP8yoTaTs4sWqf2W+Dk8szUmomT888tRsHYnYSJFVux6gIjI5KRhMi+GsX8LbwKSIpVjFjcXLp+Fv7hgET7z7hVFX1/lix84DX9xwSKcv3Sm5+fWGhImFvBFv6vZMGGpZWducRQmLubliFJhi4t8PGq+c/RC0VSORVSCp16KTUgVUy0tTmq+eBuLeUtBiHKN8JkYBljeL0L3mFhHTHg6pqvI8D6ZaDgkXs+veTnCYyL3MeFVORWclSP/zO/kmhw8JhUzv2bMwoSLJF6BVn7ExOhjoqc6DD+AXbj73af14tbLTnYse+UXQzcN1gCzmT0eCRXMLvGC3R28m89mhfWsHJct6VXzaw0jJhx786vqMcl3yZV8ZvwmTBabxv4uPB/dzBYqiEy6SJc4eky4+VW50WiKhfGF95+GtUtnFH19lQuWzcQX3n9aUVNuPULCxALe06EtYRhUS03nHHLoq+FmwjAXJgnHiEkpqRznEkIr8ysXJkdGk44XsqRNVQ4ALJjRbOrEaZWXP0fymXDkyiZ7YaJfAHlvErfwqEkx74xbHPuY+NT5VURMojapnEmpj4moynFoSe+jx4QfG72KMCn0mBjfvaa5N+nJ6atUNldSnw8rvPQxAcwNExfPbCmr14edMCn1s5nNr/ZzX2REOasyK6fa5lfA8PtwilXljE7p3V/luUKtik9u3EKYWEVJ3Qi5uCJMrNZnFWPCsHvz63SF9oIFcrvvcmbGAM4t0l15TGyaqwHynWPpVTluTnhAP5m4OGAMGBiesnyevM1WqZx4JIwF3Ub+1yovvzrvKN95eBTDk2kwxqT9aF+Vw1MGXGi4RUwY9qn7q+gRU8FZObYRk/wxwXPzeudXFy3pfYyYJNWIybj+HcpVOfrf42K/tMTcRwTkz5zK5Erq82GFVVWOozCRbjbsOr56fW/1QlnqZ2vNC+F0loljwW0fk2Qmh1yOIZOtvfkV0EWrXTRNXp/HU1nTRG25ZBowlwA7ekxciFJ+nvHnu0nlqMJPhqc/7ToTTzdoL1iQkhZ9tYGPV9ykctyYX5ujzq2RvVLMqd+uCLI9x8YhFww4pXOmbDq/cuQQvlVeflZbHItmNIMx4MX9gxicSAuXfG+Hm4iJN2HS3exv99dqzspRK59UodISi4jwsLPHpAIRk464eO3xVLYgYqJpmrige/FPqHOiRJ+PMk1+ohX6hNtUjiRMSvSXqO9tl8rxHjFRS/WLpxtkIT2VySKdq735FdAFgl00SvaY8H0XDWtoioZNUSPAOmIylswUdJB2JUxEWX7eY2KxPqs4p3Ksh/hNV2gvWJAyRUxKbzSWyuREq2qrElYhTFykcqzyhKKPSRlVOXZ5VG6Um0xnkc7mTIP3APOkXxWjXNj68JLLRO3uNHnUZNPeE0IEzWzVS64NsWj+TkTExKMwERGTSnpMfI+Y5KtybCImHKc+JuYhfv53fm1vior0zImxlCFMJHMyb67ntu8MkJ8TxQ29poiJP8JEPh+dIyZGFLTUihz1vYcnrCMmXnqYAHr6RU6NtcTsL+4c2RM2mcpKEZPaml+duunK6zPfdx1NUWiaViBMZO+InCpS54G56ZRrlAsbXq5i8CpF6+nClMqRob1ggdwe2Ogs6D1icnhkCozprzPD4mI5y0Ufk0pV5bgtF+aP5W23OfJAPZUpm061HH53GdKAhTOaLR8j+0zUdFi7TfM3HvHo9pjK4Y/3LWKSdPCYuKzKOTaWxLceec02MlXMY8JpiRsekxyDqSTa5DGxmJXz0ptD+O7jb1jOJLIjk82Bv0UsHBJptQODE0JAz2wzvh9+LHid5yH7q0qNKqjwixWPDIZDmqPvxdeIic3sllIG+HHkc9tN87pQSBP7VS9Trg/zq9P3KtaCpBEx4d+jPGEZkM2vUUTDIXGzUDD4TzIc26F6uVxV5bjymFR/X9cjJEwskEsxy/GYHJJ6mFjdrczIN/mZSGVt76RFxMRi4RZlk2VU5diVEEalO67RqYyImPCT2TGVw2fl2FxsTulrB6Av5nbleHxC5tYDQ9h3XO93wi8EVlGsZCYrLvqle0x8jpiUMSvn7uf2445HX8cdv3/d8u8pm74Hao5anpUDmKMmcvrGKmLyzw+8gq8/vBMP2XQktUI22MYihjDZmZ991BqPmBpRnTpHPxa8tNvmrw346zFpi0cg21xaYmFH38vczibEwiHEIiFR7lsqdqmcsTKiQS0mYeLu+fLFk3+XxfpzVAK3wsQUMZGMr4BxzhWmcsKmx6n73I1ZmJ9nw/koq5uIieOsHJsRE9OV4HVeqQJmYVJ6KkcucbWiNR5BSNPvZEenMpadAx1TORXs/AroC/1UOomRqbQQJmuXzsDvXznizmNic5Ktmt+Jf/3wmThpdpvtayyd1Yqu5igGJ9J4ZMdhAEY6zKpiiIdUwyHN8yLu57ycbI5Zdo3k3+1EOotcjhUNq/P9+7wyM4jDxajddGGOPCsH0MUIXyCLmV9H8qWUz+85gfetnOO4veL1pdeMhkNi376e732jCpC3r+jB1/5kJc5Z3A0vyNFCv1I5oZCG9kRUXKiKVbG0xCP4wXVrEA5pnlJRVtj11eBphlKaZMkRO7fPb4qGMYQ0JlM5ZHK1Sy+oHhM7WqX1eaRAmFhHTPjv25uiODQ8VSgGXayNRr+gfCrHRcTPyfxq12BtukJ7wQI5lWOXNnDDIakpmBVW0zFVhPm1yqkcQBqQNWmkct66TG/W45jKcajK4Vx51lxxt2yFpmnCZ8IvzjynbyUWjYqcqOeyTd6vxo+29HLkyypiwpgh3Jzg6b09x8YtU32Gi18xvzp4TABzZY4saJMWwoS/xwtS2XYxkvmUkKbp1Rw8GsUjJuro9lBIw4fOme854lCJiAlgviC6ERtvO2kWLlhWfgMre/Nr6Z9NjoZ6ESYA95bVR1WO02eXb1LUiIlaLsyjT/x7tY2YKJEVK/h5x1P85TZY4/uaPCY6tBcskKty+EVwzGUIXuagQ6kwp1iqaFJETKyqcko3v45Z+CDstu2NI6OYSGURCWk4L9/op5yqHLeskSZkAsZ8EvGdSPtssMRSYfk5fnhM+PcVlvL1gG4s5FmBCRc+k2OSAXPzvkJhUKwlPYeX4RoGWCPVYja/WnSjzL/HzoER1x4r/vqxcAiapomKJz5PxmvKxg45WuiXxwRQ7tR9eD2v71tYLlz6Z2stIZUjpxsyNRriB3hP5aSzDEdGp0zPLajKSZlvxoqlz9xETIwBqy6qchw8JvLNMOFRmKxbtw7nnHMO2tra0NPTgyuvvBI7d+4s+rxf/vKXOPnkk5FIJHDGGWfggQceKHmDq0HSp6ocp1JhTrHXn3DqY1JG51d+V+BsLNP/tuXAEAC9ORrvQTKazNherAyPSXkn2TmKMDE8JvlSP2lUAK+o8VqRAxipHD+qcsal70v2J4RCmijXnHBRMixHSTZZpHNsW9KHzccJrwzilRVyyqbYdGH+HjkGbNk/VHSb5efwkDT/PvjFgQ9PKxfriInPwqSKM0b4+44mMyaDcjmfTb6wuv0s/JydTBnm11pETNpdRkxaY4YviM/W6lQiJkYqx+wd4SMIVDFYzH8HWHu5itHk1MckU7u0WT3iaS9s3LgRN954I5599lk88sgjSKfTePe7343x8XHb5zz99NO4+uqr8YlPfAJbtmzBlVdeiSuvvBLbt28ve+MrhblcuPRUDp/v4iRM1H4hKpNp/6tyMtmcSBE5e0z0v23NC5Ols1rRHIuI1Ee/TTrHaVaOF06f22G6g+hTUjmM6eIEKL0iBzAunsOTaU8VKFaIMmyLOyhjXo6zyJWbkQHAJsuIifOsHEC/oHCBwCMmsuHV7DEpNL/KwmWzjddFJa3c+ak9ZfyLmEhVOfmZSU5lpW4xpXJctsj3A/l95QtlOZ+t1WIkQjGszK+1iJgkomHxHTuJslBIE83keIS6vUCYZPP/dZnKSRVPc5ciTJzMr+QxMeNpLzz00EP42Mc+htNOOw2rVq3C+vXrsX//fmzevNn2OXfccQfe85734LOf/SxOOeUUfOUrX8HZZ5+Nf/u3fyt74yuFUfEQlsqFKx0xsRY+E45dVEtL5biZngkYJyb3l/D+I9wzY5fO8UuYxCNhrJqnD12LhUOY2RIXr8tPYC7oeNfWLo/t6AHjDosx606QXhARLov96nZezvBk2iQUth8cLrjLMiIm9h4TOWrDf2+OmDi3pJd/59Znot75dTZXRpjELMyvfqRe2j16TPzCrny1nIhJqyli4u684Hf1U+msYX6tQcQEMIRDsR4ufN/wiImayhlVPCYFqZwJNWJSPM2tplzcmF+56HMSJtEIlQsDZVblDA/r02a7u+0d9c888ww+/elPm3536aWX4r777rN9TjKZRDJp3DGOjIyUs5mekSMmVs28UpkcPverP2L/iQnxu5Z4BF/8wGnSkLq0OCHmOHpMykjllFiVw+8comHNtlxX3zbzYsY/25zOJuzoHxF3KCrFyoW9sHphN17YO4i+TnPJdWsighPjqbygaxIVNaV4TCLhEDqa9GqMwYkUZniYTqzC77asFjW383J4Gqcj36Ds8EgSWw8MmQZ5yelGGbMwMbaBC4VMVu5j4tz5VRYmWw4MIp3NFQ01q9ulRrD8FiaVTOX48Xpe33s8lbURJt4FtzmV4+5cTFiZX2vUW6OjKYojo8min70tEQWGp4QBXi0XHlfKhVuKlAvzPiZuZuVw3DVYM6rhsjkmyrAZYyZvFlGG+TWXy+GWW27BBRdcgNNPP932cQMDA5g9e7bpd7Nnz8bAgH1vhHXr1qGjo0P8mz9/fqmbWRLFUjnP7D6O+7Yewov7h8S/P7x+DD95eq94DI8mdDZHLcuAOcVSRY6zcqKlpXLcVOTo22b+Oxcm3MxbPGJS/kl2ySk90DTgzPmdltvGF+5S29FzuPdhYNi+2Z0buH/E6vty2/1Vbt2+ZpEu+jdLk5Yz2ZzwIagLZFxa2OSoDRcUKZcek2yOiSFusXAIU+kcdhwqfoOgpnLUCNasVnuR7gXrBmt+V+VUdyqrWjIsT06ulvlVrsrhx1i4Bp1fAWO9KVaxpe4b/h225aNE40l9wJ9I0SSMcmHAqiqHD3F1HzHx4jEBzAZY+ZxUpwtPV0reCzfeeCO2b9+Oe+65x8/tAQDcdtttGB4eFv8OHDjg+3s4IRsL2y0iGruO6BUG5yzqwvc/uho3vX0ZALMXQPhLbEqFOcVSRZOii6pDVY5H8+uoizp9fdvUiIm+QPDUVFFh4hCNccuaRd343S1vwz9eaRa/agqs1AF+nMUz9UVw97GxUjcVgHVzNY7beTnysLs1C3UDsJxKkRcyp4hJiylikq/KyR/bjDHHPiby31aLbSjuM1FTOapQlLu+lkNMKtfkd5t+R0yqmcqR35tfKJOZXFmfrawGa6ms8FvVqhvp169aifv/z1uxSrkpUSkQJs2FDdYm01nRkbhYVY7qRbFCjTRbrc+FzzHOTTmdY2pKSBETACUKk5tuugn3338/Hn/8ccybN8/xsb29vTh8+LDpd4cPH0Zvb6/tc+LxONrb203/qoncVVNUgORVNwDRbOwti7vx7tN68efnLQQAvNI/IhzdB134SwBzgyArnKcLl5fK8RIxmdESE34BIUxsJgxPZfxL5QDA8tltBSKJ3w35FTFZ2qOLLi46S4V3dm222LelREzOyUdMXtw/KO5gZdHgnMopjJjwRVA1u6oeEzkKd8EyPYVkVbasUhAxUYTijBZ/za/Hx/TvXdOMibrlUKuqHPm9+YWSV705Tdd1Qt5+twMOTX1McrWblQPoN0anz+1w9TgZtY/JeDIj1omQZnxGK2HCGBOGeqeIWSkRk5A04kD2jMml+lSVo+NpLzDGcNNNN+Hee+/FY489hsWLFxd9ztq1a/Hoo4+afvfII49g7dq13ra0ilgN8cvmmPB7CDNo/i67tyOBeV1N+bJKffFW57vYYQgfO/Or/1U5bu4IALPpTJ4FMschlZOW0gxe5594QY00DQrza4nCJP/5dikzgbwyLiYLW0RMXM7LERGTtjhO7m1DSyyM0akMXjusNynjx2dIKyzllO+45O9XNb+qx4ydUAlpwLlLdGHywt5BIc7tENHG/HYkomFx7HY1R33r0xATwkTfV60uhtS5oZ6EiTBrlvjZyu9jEoz5LarpWTW/5pjRF4j39ZEfJ1dBTaSyojeJ31U5gLEmmudU6fs5HNJq0v6/HvG0Stx444346U9/irvvvhttbW0YGBjAwMAAJieNC9S1116L2267Tfx8880346GHHsI3vvENvPrqq/jiF7+ITZs24aabbvLvU/iMbOBrjoXFwcKjITxiIk/GVUPu/KLdVyRiYpUqknGqyuHq26qiwgm1nt8O+U6ERxQAI2IyMDxl6rkAmHOndtOF/UD15pwoo1wYkIVJmRETISQtIiYu5+XIEZNIOISzFuQHGuYjFvLxqc5yKRYx4Yugesyo5lejHDmMM+Z2IBYO4dhY0mT4tiJlUV3AoyZ+GV/17coLk/z37pdRtR5SOfxCWa6pt5xUjt6SvrbmV7fI+yca1sTFX69K039/ZCQvYKXHWvWO4TdtcmTFioKqHI/CZDJVWLZf7wKwmni6ctx5550YHh7GxRdfjL6+PvHv5z//uXjM/v370d/fL34+//zzcffdd+P73/8+Vq1ahV/96le47777HA2ztUZO5aht40em0jiSv3AskUadqybFQy56mADOVTnZHBMXCD9TOW6mZ8rbBhjRIQDoaUsgHNKQybGCduk8d6pphXcVfiJ3f51KZ8X7dpZQLgwY/pn+4SmxOJXCuBiZXkbEJL9PZ+arg9aIScv6sSWLBpWiHhMRMbH3lMh/j0VCSETDOCNftl2sbFhtsAYY6TU/hQn/nMfGuDAp3/gK1FfEpNxW+/Ix6LUl/ZTU+bVW5le3yD1eOpqiQqxrmtHjZGBEX49lgSaXhnMxKM/JcRrgqHpB3EaHExYlw8Zk4frez9XE05lXLIwLABs2bCj43VVXXYWrrrrKy1vVlJTS7rstEcHwZBojUxmMJfVQf09b3HRCcC/Alv1DyGRzYoCf21SOVVWOfPBa3YGX28ekWNWBXcQkHNLQ257AwaFJHByaRG+H8Rm5ETducTfvJ3KkiftLIiHNdS5dpbM5hhktMRwfT2HPsXFXuW0rDE9Q6R4TfrHlF3J+bG3ayyMm1u3o1d9ZVeWkbSImqvmVvwc/xtYs6sLmfYPYtPcE/nS1va8sZbHI8g6bM8sow1bhooyH6BsiYtKsCpPyWu2bUzke7+jroI+JW+T9I4sNQP8OR5MZDAwXChPeO4aXaHe1xMSNRTEhF1eEiFPlpQwvCLAyv5Lx1aC6Z15AUEdQ6xfoSYxOZUROW46WAMDynla0JyIYmcpg+6ERcSKUEzHhFzlNsy695YtzNseQyeZcd2h0Mz0TMJ+csscE0H0mB4cm0T88CcBoHe9Xc7ViyIJOVOS0xMoSQ0tnteL4+AnsOjpWsjAx+pjYV+UUm5UjUjn5C/mZ8zsRDmk4ODSJFX//IPjtgVVEyuQxkRZL/vt0Rn+2KmbtqnL4ObBmYTfuwm78fNMB3Lf1IAD9IvaND63CO06ebfs8QIqY+CpM9Nfn371fc23aEnqLc8bqKWJSvjBpc9lgTdzRp2rb+dULbRbpGQ4XZHyOjtrPRe0dwzvtFlsb5fPMbn22oknavxyrc2a6Q3vCAnWgklyaKvwlyoU6FNJEWeWD2/qRyTfQ6WlzGzGxFyZN0bDlBVf2cHiJmrityuluiWHJrBYsndWCeV3Npr/ZlQyL5mo+lAo7IVczceNrqf4Sjh+VOdwY3dNe+L3zKI/TFONsjuHEuOExAfRF8u0rZgHQv2e+kHHviUy8SMRE9ZgYplhmioiqLe/PXdKNma0xfTpyOoepdA6DE2k8ssNccadW5QB69VpI0//rF/zCwL0BfqVyQiENaxZ2YXZ7HPO6nG8q/EbtqzFSZn+WzuYYls5qwfKeVtfipimA5ldZdKnCpDW/7w5zj4my5qn73EjFFouYGMe33fpshZwq41hFGac7FDGxQK0saIsbF8FdR/QLjypMAN1n8vjOo/jfPx4CAPS2J4q6rPmCkcrmMJXOmiINE/k5OXb5S1m1JzM5uK3EVFsz2xEOaXjo5rdB01DwOQxhYi4Z5pOF3ZrBSkWONBkD/Mq7OHEfza5jpVXmjE6l8eqA3oSMi1SZmXmhcUzx5cgcH08ix3TznVz6/P2PrsGh4UlRMaBpxrRlGVuPiU1VTls8guMZff+lsjkRhTOEi/5zeyKKJ//2HSKa8/MXDuDfHn9DCFGOlcfkmnMX4oNnzXUd7naDaqz2s0vrPZ9ai3Q2V/Gon4rfEZNwSMOD+fPXbVWPuSU995jUuTBxiJjwCIlVKkd+vCFM8vvcQ8TEbUUOYD0vJx0QAVhNSJhYUJjKKYyYqKkcwKjMOSTSOMW7XMoXj9GpjFmYpJwv8qH8kLZUNuepZNhtKgewDy9yYaK2ped3ApU0vgLGXeTIVNoY4FdiqTCn3IjJlv1DyDFgfncTZltETHgqQx7Qp8Iv/N0tcdMFIRTSCqJWVthX5ZjNr/wYb01ERGVLOsvADwmr6cWJaBjz89OlZ+d9RQUzfHi+vKCc0t+lRs3H+ylM9LLN6ooSwH/zK+A9PSD6bEjm1/pP5dhHTPj6eniEp3KchcmY0rbeDjl14+UmzCqVQ+bXQmhPWGDtMQGGJtLYd1wvl7SKmKya32lSvcX8JYC+CMpVPzJOzdU4YpiZh+6vbqZnFsOuLf2kQ3mzn5giJvkLqzowziv8O91zbBy5XHGjtwqvmjlnoXXKoicfMTk+liwos+bIpcKlYL6Ts/CY5C82yfxiKAtjudGT3SweTpPFnR9QvfHtasTEj8nCtUaUr07p5avlml9LQdzRp4Jpfi2MmOh/4+LbbcSkuMfEWN+89GtqkoQfx+omYLpDe8ICO4/JK/0j+XB3yDKMnoiGTaZJN8JEfv0xpUxVeEwc7jZLqcwZc5lHdYJPGO4fVlM5vOtrZQ+tdimKNTRRXg8TzryuZsTCISQzOdsBhU7wUtrViwrTOIAe0dE0veETF1MqakWOV4pV5fBFMJk2vicemZENsKmsc+SrmDCptJFPvjAA1R+4Vwnki+roVNrX4YRuES3p08Exv6rlwjKqKdouYqKWCxevypEjJu6/HyuPCUVMCqE9YUFSyZPziMkf39SnKS+e2WKbs10jeQvmdLgbWGZXmTORP3ibHRR5Kd1f3fYxcYKLrhPjKVNYstpVOWPJjLgbKrXrKycc0rBopp6q8NpoLZ3NYeuBIQBGea9KJBzCjPw2HrNJ56gVOV6x72OilAtnjV4oPMoni1u57NuKppj++ylFmKSzhR6TSqBuVyMIk2g4JKKjw5NpcZGsqjARF06jg7PaXbjekNMuVuXCMm5TOcXWRlNk0sNal7CqyskLQBImBrQnLLDzmPC7XLnjq8oa6aLkPmJi3ctk0qEdPYfX03urynFXq+9EeyIins97tgBA0scBfk7w7yTHgDcH9ffvLtP8CrhrTZ/LsYIL8o5DI5hMZ9HRFMUyizQfh/fyUBvTcfxN5UgRk4jSYC1t9EKJKaIFKIwaqsghf5mqRUxUYeKyHLbekS+UIpVTxc/GhYnsW6v3zq8RSdDZpXI4BamcfO+Y/uEp7Dk2LrwoRVM5Nl6uYlhFGtNULlwA7QkFxphtKodj5S/hmCImHlM56oRh0Y7eSZh49JhksjlRhljOWHdN0yxn5vAqjUpX5TRFjVEBB/Jt0kudLCzjpjX9VXc9g4u/vsEU9eCTd1cv7HKsgOCCw1aYjJUnTDRNE8etvGCqHhMjYhIylQxzjIiJ9fdom8qhiElZmIVJ7VI5gBHBrdUQPy/wfVRofjUfv1Z9TABg42tH8fZ/2YAHtg3kH1cklSMdf57MrxbnDZULF0J7QiGTY6IkMx42SiVlllpU5HBmtMZx7dqFuOSUHix3iKzI2PUy4cLEz1TOb7f1YyKVRXdLDHPL7NPAL57yBdpI5VT20JJHBRz3qSoHMKqtdtsIk8lUFpv3DWJgZArrn9orfs8n766x8ZdwilXmHM03gprZWvpnuWr1PFywbAYWzjCOU7WPCRcesUioIM0jP85OYBheBOuZO5UufVTvLv1qsFZr5L4aIz5U5XhFvuDy9SgIZawfWjMfq+Z1YOU8c2NENfKh/rx2yQwsy/d54f8WzWjGhctnOr5fJBwCv//wFDGJ2XtMYpH638/VojHOZh+xGinvJWICAF++wtscILkcWcZNVY6XeTmMMXxv424AwMfOX2R7N+wWHqHgDc4A406g3Nd2Ax8VoG5PORRL5chpq588sxc3XLwULbGwML6usanI4RSNmJSZygGAf/rgGQW/E+IjY+UxKTRQ81SP3SBGKxMfUNj/pFKox1cjVOUANqmcKoouTdOH4E2ms+L9672PCQB85t0r8Jl3ryj4vbrv1EhIT3sCv//0RSW9Zzyi7ycvVTmJaKGgr1YlW5CgPaFgLUzMi97imfYRk1KQG7jJ8Iu8Y1VO1H3EZONrR/FK/wiaY2Fcu3ZhqZsr4BEKPqsGkDq/VqE5lfq9lGt+BYyIydHRpEn0cOS01chUBvc8vx/7jk/g2FgSsXCo4I5NpZgw4VU5PT4OvAMshvhJERMjlSMJk2IREykkLXeMter8WgkKPCYNEjHhwuTYaEoIxWqLLn5Xz42gQb5gFjO/lgM/BkupyjGbX6uT/gwStCcU+EGiN1nSF3M5TDynI+H7cC+7iMmEq4iJe4/J9zbuAgB85JwFZff8AIwIhVz6yju/VjqVA5gvRrH8QK7yXzOK2e26KLBK53BhwheRH/xhD57ZfRwAcPrc9qKCzCr9xUlmjJkds1rdVXS5JVrgMTHKga1SOcJjYvM9ctN1NsdM3pRqjXBXPSbVnmtTKbgwOTg0IX5X7TQVv3jyVjv1XpXjRLFUTjnELbxcxbA2v+arcsj8KqA9oWDVUlu+AC4pksYpBbn0VYZX5TiFCkUL8ayzMNl6YAjP7j6BSEjDJy9cXM7mCqwjJtUpFwaMXiaA3o7er2nGvDX9bot0Dm/B//5Vc9DTFsfAyBS++chrAOzLhGWcqnJ4tCQWDqG9yd+LkZ3HJB4JIZYXEXK00LiLcza/ArVpFhVXqiLqvdeGW7gw4ZVmLbFw1VMp6k1FkPdtsaqccrAymRcj4dDHhCImBrQnFKw6XrbG9ImjgLPxtVRs+5gUaUkPuI+YfG+DHi254sy5rquFisFTJ3LEhG+Hl7xrqcipHD/8JRzRmt4hYrJ4ZjM+/lZd4HGRscaFMBGpHIuICX+dma3lTUm2oqAlfdY5lVOs7DcaNiKK1ka+6qVyGiWNAxQKk1qYetX1JgjmVztUYeJnZC0uUjlezK+FnV9pVk4hJEwUrBbkUEhDaz6P6NTDpFSMuS/WHhPnPibFPSa7jo7h4R16GdwNFy0pa1tlui3Mr9WqygHMFyQ/KnI4TiXD3Pw6p7MJ15y7wLQNVoP7VHhVztBEuuA788P4agc/nvn8EzmyYURT7KcLq3CTJGDOlyer1ZJeMr9Ws2ql0nBhwgVwLT6belMRBPOrHXKERG4x4Afc4F3SED+Lc4b6mBjQnlCwMyLxRjzFKnJKwc5jwuc2lFuV84sXDoAx6CXMs9vK3VwBn+Yrp3Imq5jKaTOlciohTOxTOX0dTWhLRPHn5+km4iWzWlyJo46mqLgzOj5mbkt/rMweJk4UpHKkxVCt2AGAVMZowGaH1aTUahn5Gj1iwif71uKzqeduoM2v0trptzeQr8stpZhf07Iw0f8/yPvZbxrnjPYJuxz5p991El7YewLnLi4erveKXSrn8EjxC5WbWTn78w3I3nbSrLK2U0WUC0+kwBiDpmnSdOFqp3L8u7PklTn7jo8jk82JHDtjTNzJ8llJN1y0FMfHknjP6b2uXjsU0jCzNY7+4SkcHU2a0mqVjJgUtKQXx3m4QLQAxSMmQJGwdIXv/sIhDZGQhkyONVTERG2pXg8RkyCbXyPhEBLREKbSuYLmauXyVxctxQPb+vHWIj1PZKz6mBw4kY/CdviTYm8ESJgo2OXW/7+z5+H/O3teRd6z3aIlfTbHRHtkJ0+I4TGxT+UY3gV/L3hcmKSzDGPJDNoSUalcuPLqX84XlzvAT2ZOR5NYzA4MTory8BPjegmnpgGzO/R92dEUxdf+dJWn1+fCRK3MKXdOjhMF5cJSRCRegscEkHqZpArNr9Uw8sUiIWRSWVFu3wionUspYlI+rfEoptJJ3/06l5w6G5ecOtvTc/g5k84ypLM5RMMhUf3HvW0EpXIK4GWU1cz38cVnKp0TF4ejo0lkcgzhkIaeNvvSUTezciqVImiKhcWJxn0mRrlwcFM5oZAmKnN2HTF8JjyNM6s1XlZEyK6XiRCQlfCYiHSNniJImjwmhVU5SSmiYodl6WPep1KNEe78PRoxlcNpr4X5tYE8JoDRht5LyqVSyOviVDqL8WQGh/IT2vmaQ5AwKaCad3wcOffJ0zkH8ymD3vaE48LgJpVTyTtx7qs4kfeZVLMqR2485af5FZBa0x8zhAn/TvrKrGqaZVMyLObkVCJiokRFLD0mFv1InASGpcekil0s7TozB5nCiEkNUjmKp63eh/gVg6+v9dDrJh4JiQrPyXQWe47pPrbulpivN1dBh4SJQi0c0tFwSFzIeTpH9TLYUWxWzngyg/F8qL0S3gVhgM2XDFezj4kpYuJjKgeQDLBHDAOs8Z2U1/zMrmS4Gh6TlJXHJH8MmSMmRgM2O/gFbNIqlVOF88euM3OQiUVCJlFfizRVQSonAEP8nODCxG/zaynI1WxTqZyo/KtEG4ogE+wjrgJUc2GVUQ2w/CI4p8hFUFTl2PQx4Wmc5li4Iiem2v11sqrlwpWLmPCycLlkuH/YH5OaXSqnslU5qsdE6mNiNcTPi8ck/53Lk7mrETHhx34jRUwAc9SkFp+twPwa8IgJF3f1MuhRToHyyr9KVHsGGRImCrWaW2AnTIqlDWJFUjmVMr5y5O6vjLGaRUw6fazKAYw7mN3H5IhJcTOyG6yEyXgyIxrqVeK7iqkt6aWISCkN1gBZmORMr13seX7BP1MjRUwAVZjUIpWjdH5tkIhJPaRyAHMK1IiYkDCRCfYRVwFqFzExV+ZwQ1Sxi2CxVE4l0wOAuWQ4nWVivkaiCuXCnc1RREIaomENM1r8/XxyJQ6PBh0UUazyhAkXHnJVjuj0GY9UJLJlN104Jplfkx7Nr4mY2WMilxtXw/zKxWilju1aIQuTmnR+bbCICT/fZtSJh0MuGebm+iWUyjFRHxKyjqi3VE4xP4PR+dU5lVMJQyUgp3LSoiJH3q5K0hyL4NtXn4WQ5q0ttNvXntvZhINDk9h9dAzdLd2u02vFsIqYvLh/EIA+CLASCB+JOl04HBLzcEqNmHBhIjdoq0Yq5+8vPxVP7zqGC5bOqPh7VZP2Gqdy1GhnkPuYAMD1b1uMmW0xfGjN/FpvCgDjvJlIZYT5lSImZkiYKFRrCJmK2svkkMu782Iek0pHTLol8ytP42ha9fbfe8/oq9hrL5nVkhcm41g5r1OYVf1K5Yyn9HLBlngEm/bqwsTNIMBSsJuVk4iGEI2Y/wa4a7DGfUTc/Go1mbuSnDqnHafOqYyQqyVyxKS9xlU5kZDm+9ymatPX0YS/vnhZrTdDwIXJriPjSGZyiIVDmNdFzdVkKJWjUA8ek4lUBoMTukApO5VTQUMlIA3ym0gJcZSIhAO/mAHmmTmHR6bAmB5BKDck3CL1f+ERrU37TgBwN2+nFPjxnGN68z4jYhIu8J/IJlYv5tdalNo3IvVkfg16D5N6hEeTtx8aBgAsnNEc6AnOlYD2hkKtUjncmDWazAiTZWs8UvSOqdisnIpHTMQgv1RVK3KqATfA7jo6JvwlczubyhZdmqaZ0jlHRqew7/gENA04u0LCRE6tpLM5ITziUWmIX8ZcsQN462OSoimpviAbuWvdkj7oXV/rEb5/Xz40AoDSOFZQKkehVpMeZfOrKEt14WXg6jtVo6qcLlGVk65qRU414AvG7qPjvvlLODNbY9h/YgJHR5PiOzq5t71ioXv5AjOZyiKbdynHwkZVTkpJ8wBFIiZKHxND1DfG918rah0xScipHBKZvsPPm13Uit4WEiYKRiqnuournMpx6y8Bind+rbzHxKjK4ReohhEm+V4m+05MYO9xfRBin0+Dtvj3cWwsiT3H9NdeU6FoCWCOYowljWGRcsQkrRhjAee0TIH5VZw7dDErBy5MEtJ3U03kiEnQS4XrEb5/Wb6CkSImhdBRp1APVTkHPfTLMFI5hR4TxhiOjemlrpUSJjzsnM0x4WeptnG4UvS0xdESCyObY3h293EA5RtfOXIqZ3PeX7JmUeWEiaZpQpzIwiQWDtkaY2ORkGPaytZj0iDff63gwqRW/VnMqRwSmX6j3rgtIWFSAK0gCvXQx8RtO3oA0mRYJsLznJHJjLjIzGytTA1/PBJGSz402Z8XVI0SMdE0TURNXtynV82U246eM6tVf519JyawPZ9rXlOhihwOv/sezwuTcEhDJGx0fhUek7zQiBe5W7frY0LCpDzmdzfr/61RpUaCzK8VRW1tQD1MCqFUjkKthEm7ZSrHvccE0LddPuiPjulCoaMpWtY03GJ0tcQwnprEobw3phoD/KrF0lmteOnNYWTyos/viMmGnUeRzTHM6Ui4EqLloAuTLEbzwoQLEsNjku8KKxljnRCpHMVjQobJ8ljW04pf/OVazO+ujTAh82tlkfdvT1u8JiXh9Q4JEwWxKFe9XJhHTDIipO7GzyB7AJKZrEmYHBHG18p2POxuieHNwUkhqBqlKgcoHK7ll8eEfyfDk3pZeKWjJYARlucREy48CrrCuiz7VVvSUyrHP96yuPLHgx0JqSV90Jur1SOyMCF/iTUkTBRq7TEZmUrjxIR+5+rmDjoSDiES0pDJsQIDbKWNrxze/XVguLFSOUBh/tevqhz1O6mkv4SjpnK48Cgwv/LmakW+x6aYan5lptcjgkksHEJI03veUH8N/5GrniiNYw0ddQq1FiYTqSxSmRw0DZjd7u4iKCpz0nbCxJ+LqR28MudQAwoT+Y6mqzmK5pg/Wr5AmCysRsREP0742AN+jMfynV9FubDniAn3mBiDAYngomma+G4pYuI/FDEpDq0gCskadX5Vh3X1tMVdiyN+Z6tW5hyt8JwcDo+Y8C6mjZTKWTijGXxt9stfApj7yrTFI1jR2+bba9thpHLMAkLMyhEN1vJ/L/I9yg3WGGPU+bWB4NEw6mPiPyZh0kPCxApaQRRqFTGJR8Km9/RyEbTrZXJstLKlwhw+L4fX5VfSaFttEtEw5nXpVRJ+CpNENCwMz2ct7KpK9YNI5aR4xET/nqIiYpI3v7qNmMSMfgzJTE48n1I5wYeLzij1MfGdJsnDs2QmpXKsmNZH3Sd//AJWf+URPL3rmPhdKn+3WAsDX7sUNZnjwWRpNy+HR0wqbX7tbDa/vt+TfmsNN8DO6fA3JcYFYyUbq8nwY5r3MeHHjdGSXj9+DI9JkYiJdI5MpbNkfm0gRCqHIia+w0VfPBKqeCVeUJnWK8jIVAbHx1M4nm9CBtS2F4PcUMmLydJuwnC1zK/dylC7RANFTABg7dIZAPyfY7NqXic0DXjnKT2+vq4dXICMqR4TZYhf0mXEJCI1Z5tMZ43OryRMAg+/uaA+Jv6zZGYrYuEQzlsyAyHav5ZM66ocMYBuQhImNcyTy3MxPKVyotapnGpX5XAayWMCANdfuARXnDnXtRnZLV/705W49b0no6fC5mROQbmwML9aV+W4ERiJaBjpbAaTqSz1MWkgRCqHvkvf6e1I4NnPv7Mmc5CCwrQ+6sQAuvG0+B1fXGtRWVCqMOEiSk7lZHMMJ8ZrFDFpoKocQK9S8FuUAHrEoVqiBJCqcmxSOZkcQy7HpHOg+Pcoz8up5blD+AtV5VSW7pYYiT4HpvWe4aZNy4hJLYRJ3EjleMk9WkVMjo8nkWNASANmtFQ4YtJi7lzYaBGTRiGm9DHhwkOeh5LK5oTAdXMO8JD/lJTKofkqwaeJIiZEDZnWRx1PQZwYrxePiREx6fNgtLTymPA0TndLvOJ54sJUTmNFTBqFggZrSioH0NM5XiIfRlv6nKcUEFHfULkwUUum9QrCUxA8YpLLMWEArIXHhPcyiUdCBekRJ0RVTtYQJnyqcKUrcgD9gieLKhIm9UlUqcoRnV9DsjBhnqKGci8TIerD9P0HHf69kvmVqAXTWphwjwmPmKSkC3stq3LmdjY5jptXMTq/Gh6TahlfOXLUhIRJfcJTLGPKrJxQSBNegnTWiHx49ZjwBm28LwoRXHg6lvqYELVgWh91/GI6WCfChPcx8drIS6RyMoWpnKoJEynCk6BQfl3CIyR86J4cFTR6meQ8RUyExyQlR0zo+w861MeEqCXTegUxyoX1qpyUdGGvxeK6dukMzGyN47Izej09z8r8Wm1h0t1sGGApYlKfqEZGuYEaFyGy+dWTx4T6mDQUb10+EzNb47hw+axabwoxDZnWhdS8mmQynTX1YYiFQ55SKX5x2pwOvPB37/T83ladX6s1J4djipiQMKlLVGEie0HkCcNezK8JaZAfzcppHM5fOrOktYgg/GBaryCt8YjIuw9OpOqipXYpC4F1VY4+6bd6ERNZmEzrw6puUb0fpohJ/jxIZ5in6ho+92MynaWqnAaDRAlRK6b1CqJpmqlkuJalwuVgNcSPV+XUImLSRBGTukSNZJg8JiKV461RmlUqh3pfEARRDtN+BZFLhoMaijY8JrWrypHLm+MkTOoSR4+JML96jJhEJfMrRUwIgvCBab+CyBGToIai1aqcZCaL4Und0FubcuFg7b/pQqHHpLAqx+wxKS4wEzGLPiYBO38Igqgvpv0Kwg2wg+P14TEpBaOPib79PI0TDWvoaIraPs9PuvJVOZoWvIjTdEFtFS9HtkQqJyO1pHfxPfJJ0pPpHNKZ2jUnJAiicZj2K0iXVDIc1D4MaipnYFg3vs5sjVfNwDYj32G2ORom01ydogpu+TiPSxET0WDNReSL9zGZTFHEhCAIf5jW5cKAjcckYAurmsp56c0hAMBJs9uqtg1LZrbiT86eh8Uzm6v2noQ3IkoXT9ncyit2UtmcJ4HeZFEuTOZXgiDKYdoLE1NVTmCFibkqZ9O+QQDAOYu6qrYNoZCGb3xoVdXej/BOQSonYuUxYSIl6MbEbD0rJ1jnD0EQ9cW0X0FMEZOs+46X9YTRxyQLxhg27T0BAFi9sLuWm0XUGQWpHAthksp4jJjIqZyACnuCIOqLab+CGIP80oEvF05lcnhzcBKHR5KIhDScOb+zthtG1BUF5cJS1Q0XE+lsTgyD9FQuLLekD9j5QxBEfTHtV5BuaZBfUO/45FTOpn16tOS0uR3ibpYgAItyYek4j8nlwtnSGqwF9fwhCKK+mPYriCgXnghuH5OYLEz25v0lC6vnLyGCgbPHRP9bMiNV5XhoST+ezCCTY5bvQxAE4YVgXYErADe/JjM50ZQsaKFooyonK4TJmkXkLyHMFLSkt/CYTKayYLq+cNdgLR8xGU1mLF+XIAjCK9N+BWmOhcVCyvt/BG1h5Xe248kMdh4eBQCspogJoRCNqB4TKZWT//8xjwKDp3K4mHH7PIIgCDum/QqiaZrwmQyMBFuY5CPpWDyzpWqt6Ing4MZj4lmYWPiYoqFgnT8EQdQXtILAqMw5HFRhovSboGgJYUWhx0RqSc+FyZQuTCIhDeFQca9IQkn3RMMaQi6eRxAEYUewrsAVojtvgOWpnHjgPCbm7a1mYzUiOKgeE1moqKkct718QiHNnBIK2LlDEET9QasIDAPsSP5uMWgRk0hIg3yTSo3VCCvkVE4sEjLNNIoqqRwv54CczlF9LARBEF7xvIo88cQTeP/73485c+ZA0zTcd999jo/fsGEDNE0r+DcwMFDqNvsO7/7KCZow0TRNhOW7mqNYOqulxltE1COyaFAjIjx6UoowkdM5FDEhCKJcPK8i4+PjWLVqFb773e96et7OnTvR398v/vX09Hh964rR2awIkwAurrz765pF3TTdl7BETt2owkSkcqZ4Ksd9cz5TxCSA5w5BEPWF5yF+l112GS677DLPb9TT04POzk5Xj00mk0gmk+LnkZERz+/nhe7mqOnnmIdFuV7gF5o1ZHwlbJAFtyo81KocTxETyXwdtDlTBEHUH1VbRc4880z09fXhXe96F5566inHx65btw4dHR3i3/z58yu6bV0BT+UAwMxWvTz4/KUza7wlRL2iekys/jae8mZ+BYCmqP3rEgRBeKXiq0hfXx++973v4b//+7/x3//935g/fz4uvvhivPjii7bPue222zA8PCz+HThwoKLbGHSPCQDc8ZEz8R/XrsEZ8zpqvSlEnRINO3hM8j/zRmklm18plUMQRJl4TuV4ZcWKFVixYoX4+fzzz8euXbvwrW99C//1X/9l+Zx4PI54vHoNwroawGOyrKcNy3raar0ZRB1jVR4sfnaYo1OMpmjhlGKCIIhSqckq8pa3vAVvvPFGLd7aEjViQnlyohHRNE2IEzvzq/Gze5+V7DGhAX4EQZRLTa7AW7duRV9fXy3e2pKCiAkJE6JB4akWO48Jp/SISfCM4wRB1BeeUzljY2OmaMeePXuwdetWdHd3Y8GCBbjttttw8OBB/OQnPwEA/Ou//isWL16M0047DVNTU/jBD36Axx57DL/73e/8+xRl0hQLoykaxmQ6C4CECdG46AIka9EF1jmC4oTsMQliGpQgiPrCszDZtGkT3v72t4ufP/3pTwMArrvuOqxfvx79/f3Yv3+/+HsqlcJnPvMZHDx4EM3NzVi5ciV+//vfm16jHuhqjmJyOC9MaHElGhQuQNRy4YKIiYdzwBwxoVQOQRDl4VmYXHzxxWDyjHOF9evXm37+3Oc+h8997nOeN6zadLXEcGg4mEP8CMIt3OSqHuNq6iYedX8OyB4TEvUEQZQLrSJ5ZAMsCROiUYmIiEmRVI6XiAmVCxME4SO0iuSRDbB010c0KlGbiIlaTROPemhJT+XCBEH4CK0ieeSICZULE42KnceksK8J9TEhCKI20CqSxxQxocWVaFD4sV1MiHgR57IfhaKNBEGUC60iebpbjEF+JEyIRiXq1mNCnV8JgqgRtIrk6SSPCTENsPWYFKnScYL6mBAE4Se0iuThHpOQZlQuEESjYRcxUQWFlw6ucsREFTgEQRBeoVUkD/eYUCiaaGRitqkcc1WOl/OA+pgQBOEntIrkWTKrBfO6mnDekhm13hSCqBjnLZmB5lgYq+Z3mn4vD/gDSk/lUMSEIIhy8dz5tVFJRMPY8DcXIxyiltpE43L925bgLy5YZJmujIZDSGe9z4uSUzleWtkTBEFYQauIRCQcgqaRMCEaGzsPlSxGSp8uTEsKQRDlQasIQRAAzCXDpU4Xppb0BEGUC60iBEEAMBtX1c6wTsjRFYqYEARRLrSKEAQBoPRUjqZpIp2jVvcQBEF4hYQJQRAAUHJVDmCkcyhiQhBEudAqQhAEgNI9JoBhgKUBmARBlAutIgRBAChPmFxz3gKsXTIDp83p8HuzCIKYZlAfE4IgAKgeE/fmVwD464uX4a8vXub3JhEEMQ2hiAlBEADMVTnkFSEIolbQ6kMQBIDyzK8EQRB+QasPQRAADI+JpgERGs1AEESNIGFCEAQAI30Tj9BoBoIgagcJE4IgABgekxi1lScIoobQCkQQBAAjlROPeqvIIQiC8BMSJgRBAACiET19QxETgiBqCa1ABEEAAGJh6t5KEETtoRWIIAgAUsSEhAlBEDWEViCCIAAYKRyKmBAEUUtoBSIIAoAsTMj8ShBE7SBhQhAEACCaj5RQKocgiFpCKxBBEACkcmESJgRB1BBagQiCAAB0NUf1/7bEarwlBEFMZyK13gCCIOqD957Rh/FkBu84ZXatN4UgiGkMCROCIAAAiWgYH127qNabQRDENIdSOQRBEARB1A0kTAiCIAiCqBtImBAEQRAEUTeQMCEIgiAIom4gYUIQBEEQRN1AwoQgCIIgiLqBhAlBEARBEHUDCROCIAiCIOoGEiYEQRAEQdQNJEwIgiAIgqgbSJgQBEEQBFE3kDAhCIIgCKJuIGFCEARBEETdEIjpwowxAMDIyEiNt4QgCIIgCLfw6za/jrshEMJkdHQUADB//vwabwlBEARBEF4ZHR1FR0eHq8dqzIuMqRG5XA6HDh1CW1sbNE3z7XVHRkYwf/58HDhwAO3t7b69LlEI7evqQfu6etC+rh60r6uHn/uaMYbR0VHMmTMHoZA790ggIiahUAjz5s2r2Ou3t7fTgV4laF9XD9rX1YP2dfWgfV09/NrXbiMlHDK/EgRBEARRN5AwIQiCIAiibpjWwiQej+MLX/gC4vF4rTel4aF9XT1oX1cP2tfVg/Z19aj1vg6E+ZUgCIIgiOnBtI6YEARBEARRX5AwIQiCIAiibiBhQhAEQRBE3UDChCAIgiCIuoGECUEQBEEQdcO0Fibf/e53sWjRIiQSCZx77rl4/vnna71JdcUTTzyB97///ZgzZw40TcN9991n+jtjDP/wD/+Avr4+NDU14ZJLLsHrr79uesyJEydwzTXXoL29HZ2dnfjEJz6BsbEx02NeeuklXHjhhUgkEpg/fz6+9rWvFWzLL3/5S5x88slIJBI444wz8MADD/j+eWvFunXrcM4556CtrQ09PT248sorsXPnTtNjpqamcOONN2LGjBlobW3Fn/zJn+Dw4cOmx+zfvx+XX345mpub0dPTg89+9rPIZDKmx2zYsAFnn3024vE4li1bhvXr1xdsTyOfF3feeSdWrlwpOlquXbsWDz74oPg77efKcfvtt0PTNNxyyy3id7S//eGLX/wiNE0z/Tv55JPF3wO3n9k05Z577mGxWIz98Ic/ZC+//DK7/vrrWWdnJzt8+HCtN61ueOCBB9jf/d3fsV//+tcMALv33ntNf7/99ttZR0cHu++++9gf//hH9oEPfIAtXryYTU5Oise85z3vYatWrWLPPvss+8Mf/sCWLVvGrr76avH34eFhNnv2bHbNNdew7du3s5/97GesqamJ3XXXXeIxTz31FAuHw+xrX/sa27FjB/v7v/97Fo1G2bZt2yq+D6rBpZdeyn70ox+x7du3s61bt7L3vve9bMGCBWxsbEw85oYbbmDz589njz76KNu0aRM777zz2Pnnny/+nslk2Omnn84uueQStmXLFvbAAw+wmTNnsttuu008Zvfu3ay5uZl9+tOfZjt27GDf+c53WDgcZg899JB4TKOfF//zP//Dfvvb37LXXnuN7dy5k33+859n0WiUbd++nTFG+7lSPP/882zRokVs5cqV7Oabbxa/p/3tD1/4whfYaaedxvr7+8W/o0ePir8HbT9PW2Hylre8hd14443i52w2y+bMmcPWrVtXw62qX1RhksvlWG9vL/v6178ufjc0NMTi8Tj72c9+xhhjbMeOHQwAe+GFF8RjHnzwQaZpGjt48CBjjLF///d/Z11dXSyZTIrH/O3f/i1bsWKF+PlDH/oQu/zyy03bc+6557K//Mu/9PUz1gtHjhxhANjGjRsZY/p+jUaj7Je//KV4zCuvvMIAsGeeeYYxpovIUCjEBgYGxGPuvPNO1t7eLvbt5z73OXbaaaeZ3uvDH/4wu/TSS8XP0/G86OrqYj/4wQ9oP1eI0dFRtnz5cvbII4+wiy66SAgT2t/+8YUvfIGtWrXK8m9B3M/TMpWTSqWwefNmXHLJJeJ3oVAIl1xyCZ555pkabllw2LNnDwYGBkz7sKOjA+eee67Yh8888ww6OzuxZs0a8ZhLLrkEoVAIzz33nHjM2972NsRiMfGYSy+9FDt37sTg4KB4jPw+/DGN+l0NDw8DALq7uwEAmzdvRjqdNu2Dk08+GQsWLDDt6zPOOAOzZ88Wj7n00ksxMjKCl19+WTzGaT9Ot/Mim83innvuwfj4ONauXUv7uULceOONuPzyywv2Ce1vf3n99dcxZ84cLFmyBNdccw32798PIJj7eVoKk2PHjiGbzZq+BACYPXs2BgYGarRVwYLvJ6d9ODAwgJ6eHtPfI5EIuru7TY+xeg35Pewe04jfVS6Xwy233IILLrgAp59+OgD988diMXR2dpoeq+7rUvfjyMgIJicnp815sW3bNrS2tiIej+OGG27Avffei1NPPZX2cwW455578OKLL2LdunUFf6P97R/nnnsu1q9fj4ceegh33nkn9uzZgwsvvBCjo6OB3M8RT48mCKKi3Hjjjdi+fTuefPLJWm9Kw7JixQps3boVw8PD+NWvfoXrrrsOGzdurPVmNRwHDhzAzTffjEceeQSJRKLWm9PQXHbZZeL/V65ciXPPPRcLFy7EL37xCzQ1NdVwy0pjWkZMZs6ciXA4XOBKPnz4MHp7e2u0VcGC7yenfdjb24sjR46Y/p7JZHDixAnTY6xeQ34Pu8c02nd100034f7778fjjz+OefPmid/39vYilUphaGjI9Hh1X5e6H9vb29HU1DRtzotYLIZly5Zh9erVWLduHVatWoU77riD9rPPbN68GUeOHMHZZ5+NSCSCSCSCjRs34tvf/jYikQhmz55N+7tCdHZ24qSTTsIbb7wRyON6WgqTWCyG1atX49FHHxW/y+VyePTRR7F27doabllwWLx4MXp7e037cGRkBM8995zYh2vXrsXQ0BA2b94sHvPYY48hl8vh3HPPFY954oknkE6nxWMeeeQRrFixAl1dXeIx8vvwxzTKd8UYw0033YR7770Xjz32GBYvXmz6++rVqxGNRk37YOfOndi/f79pX2/bts0kBB955BG0t7fj1FNPFY9x2o/T9bzI5XJIJpO0n33mne98J7Zt24atW7eKf2vWrME111wj/p/2d2UYGxvDrl270NfXF8zj2pNVtoG45557WDweZ+vXr2c7duxgn/rUp1hnZ6fJlTzdGR0dZVu2bGFbtmxhANg3v/lNtmXLFrZv3z7GmF4u3NnZyX7zm9+wl156iV1xxRWW5cJnnXUWe+6559iTTz7Jli9fbioXHhoaYrNnz2Yf/ehH2fbt29k999zDmpubC8qFI5EI+5d/+Rf2yiuvsC984QsNVS78V3/1V6yjo4Nt2LDBVO43MTEhHnPDDTewBQsWsMcee4xt2rSJrV27lq1du1b8nZf7vfvd72Zbt25lDz30EJs1a5Zlud9nP/tZ9sorr7Dvfve7luV+jXxe3HrrrWzjxo1sz5497KWXXmK33nor0zSN/e53v2OM0X6uNHJVDmO0v/3iM5/5DNuwYQPbs2cPe+qpp9gll1zCZs6cyY4cOcIYC95+nrbChDHGvvOd77AFCxawWCzG3vKWt7Bnn3221ptUVzz++OMMQMG/6667jjGmlwz/3//7f9ns2bNZPB5n73znO9nOnTtNr3H8+HF29dVXs9bWVtbe3s7+4i/+go2Ojpoe88c//pG99a1vZfF4nM2dO5fdfvvtBdvyi1/8gp100kksFoux0047jf32t7+t2OeuNlb7GAD70Y9+JB4zOTnJ/vqv/5p1dXWx5uZm9sEPfpD19/ebXmfv3r3ssssuY01NTWzmzJnsM5/5DEun06bHPP744+zMM89ksViMLVmyxPQenEY+Lz7+8Y+zhQsXslgsxmbNmsXe+c53ClHCGO3nSqMKE9rf/vDhD3+Y9fX1sVgsxubOncs+/OEPszfeeEP8PWj7WWOMMW8xFoIgCIIgiMowLT0mBEEQBEHUJyRMCIIgCIKoG0iYEARBEARRN5AwIQiCIAiibiBhQhAEQRBE3UDChCAIgiCIuoGECUEQBEEQdQMJE4IgCIIg6gYSJgRBEARB1A0kTAiCIAiCqBtImBAEQRAEUTf8/wEysz3oFCx43wAAAABJRU5ErkJggg==\n"
          },
          "metadata": {}
        }
      ]
    },
    {
      "cell_type": "markdown",
      "source": [
        "## Bootstrapped DQN Agent with LSTM History Summarization\n",
        "\n",
        "Leveraging the deep exploration value-based algorithm, now the agent can achieve a better performance in a much faster way while being able to still leverage history summarization capability. Note how top average performance takes around 20,000 steps in the graph above, but only about 5,000 steps in the graph below."
      ],
      "metadata": {
        "id": "FnWvC2tGqKc2"
      }
    },
    {
      "cell_type": "code",
      "source": [
        "# Better exploration with BootstrappedDQN-LSTM\n",
        "\n",
        "agent = PearlAgent(\n",
        "    policy_learner=BootstrappedDQN(\n",
        "        q_ensemble_network=EnsembleQValueNetwork(\n",
        "            state_dim=128,\n",
        "            action_dim=100,\n",
        "            ensemble_size=10,\n",
        "            output_dim=1,\n",
        "            hidden_dims=[64, 64],\n",
        "            prior_scale=0.3,\n",
        "        ),\n",
        "        action_space=action_space,\n",
        "        training_rounds=50,\n",
        "        action_representation_module=action_representation_module,\n",
        "    ),\n",
        "    history_summarization_module=LSTMHistorySummarizationModule(\n",
        "        observation_dim=1,\n",
        "        action_dim=100,\n",
        "        hidden_dim=128,\n",
        "        history_length=history_length,\n",
        "    ),\n",
        "    replay_buffer=BootstrapReplayBuffer(100_000, 1.0, 10),\n",
        "    device_id=device_id,\n",
        ")\n",
        "\n",
        "info = online_learning(\n",
        "    agent=agent,\n",
        "    env=env,\n",
        "    number_of_steps=number_of_steps,\n",
        "    print_every_x_steps=100,\n",
        "    record_period=min(record_period, number_of_steps),\n",
        "    learn_after_episode=True,\n",
        ")\n",
        "\n",
        "torch.save(info[\"return\"], \"BootstrappedDQN-LSTM-return.pt\")\n",
        "plt.plot(\n",
        "    record_period * np.arange(len(info[\"return\"])),\n",
        "    info[\"return\"],\n",
        "    label=\"BootstrappedDQN-LSTM\",\n",
        ")\n",
        "plt.legend()\n",
        "plt.show()"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 1000
        },
        "id": "xuuCmTfoqMg9",
        "outputId": "5d909471-e9bb-4726-93ac-fa2e41e9de8c"
      },
      "execution_count": 7,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "episode 5, step 100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 10, step 200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 15, step 300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 20, step 400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 25, step 500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 30, step 600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 35, step 700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 40, step 800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 45, step 900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 50, step 1000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 55, step 1100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 60, step 1200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 65, step 1300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 70, step 1400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 75, step 1500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 80, step 1600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 85, step 1700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 90, step 1800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 95, step 1900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 100, step 2000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 105, step 2100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 110, step 2200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 115, step 2300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 120, step 2400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 125, step 2500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 130, step 2600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 135, step 2700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 140, step 2800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 145, step 2900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 150, step 3000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 155, step 3100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 160, step 3200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 165, step 3300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 170, step 3400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 175, step 3500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 180, step 3600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 6.0\n",
            "episode 185, step 3700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 190, step 3800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 195, step 3900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 200, step 4000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 205, step 4100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 210, step 4200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 215, step 4300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 220, step 4400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 225, step 4500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 230, step 4600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 235, step 4700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 240, step 4800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 245, step 4900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 250, step 5000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 255, step 5100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 260, step 5200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 265, step 5300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 270, step 5400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 275, step 5500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 280, step 5600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 285, step 5700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 290, step 5800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 295, step 5900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 300, step 6000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 305, step 6100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 310, step 6200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 315, step 6300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 320, step 6400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 325, step 6500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 330, step 6600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 335, step 6700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 340, step 6800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 345, step 6900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 350, step 7000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 355, step 7100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 360, step 7200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 365, step 7300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 370, step 7400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 375, step 7500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 380, step 7600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 385, step 7700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 390, step 7800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 395, step 7900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 400, step 8000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 405, step 8100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 410, step 8200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 415, step 8300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 420, step 8400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 425, step 8500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 430, step 8600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 435, step 8700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 440, step 8800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 445, step 8900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 450, step 9000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 455, step 9100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 460, step 9200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 465, step 9300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 470, step 9400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 475, step 9500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 480, step 9600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 485, step 9700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 490, step 9800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 495, step 9900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 500, step 10000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 505, step 10100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 510, step 10200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 515, step 10300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 520, step 10400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 525, step 10500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 530, step 10600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 535, step 10700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 540, step 10800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 545, step 10900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 550, step 11000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 555, step 11100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 560, step 11200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 565, step 11300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 570, step 11400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 575, step 11500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 580, step 11600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 585, step 11700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 590, step 11800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 595, step 11900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 600, step 12000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 605, step 12100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 610, step 12200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 615, step 12300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 620, step 12400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 625, step 12500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 630, step 12600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 635, step 12700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 640, step 12800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 645, step 12900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 650, step 13000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 655, step 13100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 660, step 13200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 665, step 13300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 670, step 13400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 675, step 13500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 680, step 13600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 685, step 13700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 690, step 13800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 695, step 13900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 700, step 14000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 705, step 14100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 710, step 14200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 715, step 14300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 720, step 14400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 725, step 14500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 730, step 14600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 735, step 14700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 740, step 14800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 745, step 14900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 750, step 15000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 755, step 15100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 760, step 15200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 765, step 15300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 770, step 15400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 775, step 15500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 780, step 15600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 785, step 15700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 790, step 15800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 795, step 15900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 800, step 16000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 805, step 16100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 810, step 16200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 815, step 16300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 820, step 16400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 825, step 16500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 830, step 16600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 835, step 16700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 840, step 16800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 845, step 16900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 850, step 17000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 855, step 17100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 860, step 17200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 865, step 17300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 870, step 17400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 875, step 17500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 880, step 17600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 885, step 17700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 890, step 17800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 895, step 17900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 900, step 18000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 905, step 18100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 910, step 18200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 915, step 18300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 920, step 18400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 925, step 18500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 930, step 18600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 935, step 18700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 940, step 18800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 945, step 18900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 950, step 19000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 955, step 19100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 960, step 19200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 965, step 19300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 970, step 19400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 975, step 19500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 980, step 19600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 985, step 19700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 990, step 19800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 995, step 19900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1000, step 20000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1005, step 20100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1010, step 20200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1015, step 20300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1020, step 20400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1025, step 20500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1030, step 20600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1035, step 20700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1040, step 20800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1045, step 20900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1050, step 21000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1055, step 21100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1060, step 21200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1065, step 21300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1070, step 21400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1075, step 21500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1080, step 21600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1085, step 21700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1090, step 21800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1095, step 21900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1100, step 22000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1105, step 22100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1110, step 22200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1115, step 22300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1120, step 22400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1125, step 22500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1130, step 22600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1135, step 22700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1140, step 22800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1145, step 22900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1150, step 23000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1155, step 23100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1160, step 23200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1165, step 23300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1170, step 23400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1175, step 23500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1180, step 23600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1185, step 23700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1190, step 23800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1195, step 23900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1200, step 24000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1205, step 24100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1210, step 24200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1215, step 24300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1220, step 24400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1225, step 24500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1230, step 24600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1235, step 24700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1240, step 24800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1245, step 24900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1250, step 25000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1255, step 25100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1260, step 25200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1265, step 25300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1270, step 25400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1275, step 25500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1280, step 25600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1285, step 25700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1290, step 25800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1295, step 25900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1300, step 26000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1305, step 26100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1310, step 26200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1315, step 26300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1320, step 26400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1325, step 26500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1330, step 26600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1335, step 26700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1340, step 26800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1345, step 26900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1350, step 27000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1355, step 27100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1360, step 27200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1365, step 27300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1370, step 27400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1375, step 27500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1380, step 27600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1385, step 27700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1390, step 27800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1395, step 27900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1400, step 28000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1405, step 28100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1410, step 28200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1415, step 28300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1420, step 28400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1425, step 28500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1430, step 28600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1435, step 28700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1440, step 28800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1445, step 28900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1450, step 29000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1455, step 29100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1460, step 29200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1465, step 29300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1470, step 29400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1475, step 29500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1480, step 29600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1485, step 29700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1490, step 29800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1495, step 29900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1500, step 30000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1505, step 30100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1510, step 30200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1515, step 30300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 7.0\n",
            "episode 1520, step 30400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1525, step 30500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1530, step 30600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1535, step 30700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1540, step 30800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1545, step 30900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1550, step 31000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1555, step 31100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1560, step 31200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1565, step 31300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1570, step 31400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1575, step 31500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1580, step 31600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1585, step 31700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1590, step 31800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1595, step 31900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1600, step 32000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1605, step 32100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1610, step 32200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1615, step 32300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1620, step 32400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1625, step 32500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1630, step 32600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1635, step 32700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1640, step 32800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1645, step 32900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1650, step 33000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1655, step 33100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1660, step 33200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1665, step 33300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1670, step 33400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1675, step 33500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1680, step 33600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1685, step 33700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1690, step 33800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1695, step 33900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1700, step 34000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1705, step 34100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1710, step 34200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1715, step 34300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1720, step 34400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1725, step 34500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1730, step 34600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1735, step 34700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1740, step 34800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1745, step 34900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1750, step 35000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1755, step 35100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1760, step 35200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1765, step 35300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1770, step 35400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1775, step 35500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1780, step 35600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1785, step 35700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1790, step 35800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1795, step 35900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1800, step 36000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 6.0\n",
            "episode 1805, step 36100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1810, step 36200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1815, step 36300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1820, step 36400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1825, step 36500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1830, step 36600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1835, step 36700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1840, step 36800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1845, step 36900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1850, step 37000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1855, step 37100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1860, step 37200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1865, step 37300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1870, step 37400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1875, step 37500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1880, step 37600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1885, step 37700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1890, step 37800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1895, step 37900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1900, step 38000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1905, step 38100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1910, step 38200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1915, step 38300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 1920, step 38400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1925, step 38500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1930, step 38600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1935, step 38700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1940, step 38800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1945, step 38900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1950, step 39000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1955, step 39100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 1960, step 39200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 1965, step 39300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1970, step 39400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1975, step 39500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1980, step 39600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 1985, step 39700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 1990, step 39800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 1995, step 39900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2000, step 40000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2005, step 40100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2010, step 40200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2015, step 40300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2020, step 40400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2025, step 40500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2030, step 40600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2035, step 40700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2040, step 40800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2045, step 40900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2050, step 41000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2055, step 41100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2060, step 41200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2065, step 41300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2070, step 41400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2075, step 41500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2080, step 41600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2085, step 41700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2090, step 41800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2095, step 41900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2100, step 42000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2105, step 42100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2110, step 42200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2115, step 42300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2120, step 42400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2125, step 42500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2130, step 42600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2135, step 42700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2140, step 42800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2145, step 42900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2150, step 43000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2155, step 43100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2160, step 43200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2165, step 43300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2170, step 43400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2175, step 43500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 5.0\n",
            "episode 2180, step 43600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2185, step 43700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2190, step 43800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2195, step 43900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2200, step 44000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2205, step 44100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2210, step 44200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2215, step 44300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2220, step 44400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2225, step 44500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2230, step 44600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2235, step 44700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2240, step 44800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2245, step 44900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 0.0\n",
            "episode 2250, step 45000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2255, step 45100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2260, step 45200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2265, step 45300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2270, step 45400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2275, step 45500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2280, step 45600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2285, step 45700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2290, step 45800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2295, step 45900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2300, step 46000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2305, step 46100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2310, step 46200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2315, step 46300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2320, step 46400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2325, step 46500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2330, step 46600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2335, step 46700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2340, step 46800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2345, step 46900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2350, step 47000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2355, step 47100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2360, step 47200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2365, step 47300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2370, step 47400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2375, step 47500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2380, step 47600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2385, step 47700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2390, step 47800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2395, step 47900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2400, step 48000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2405, step 48100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2410, step 48200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2415, step 48300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2420, step 48400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2425, step 48500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2430, step 48600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2435, step 48700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2440, step 48800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2445, step 48900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2450, step 49000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2455, step 49100, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2460, step 49200, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2465, step 49300, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2470, step 49400, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2475, step 49500, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n",
            "episode 2480, step 49600, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2485, step 49700, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 4.0\n",
            "episode 2490, step 49800, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 1.0\n",
            "episode 2495, step 49900, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 3.0\n",
            "episode 2500, step 50000, agent=PearlAgent with BootstrappedDQN, LSTMHistorySummarizationModule(\n",
            "  (lstm): LSTM(101, 128, num_layers=2, batch_first=True)\n",
            "), BootstrapReplayBuffer, env=RecEnv\n",
            "return: 2.0\n"
          ]
        },
        {
          "output_type": "display_data",
          "data": {
            "text/plain": [
              "<Figure size 640x480 with 1 Axes>"
            ],
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiYAAAGdCAYAAAAmK7htAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAC2MklEQVR4nO29eZgcZbn3/63eZ59MklmykARIwhIIkTVEBWQJUaPoUXmFIyC8etAE8XBEjcqqx7igR1TkHBfI8acswgvoQYhGIOSwCxKSEAhJCCSQmUzW2afX5/dH9fPUU9VV1VXV1dPV0/fnunLBzFR3V1dX13PX9/7e960wxhgIgiAIgiACQKjSO0AQBEEQBMGhwIQgCIIgiMBAgQlBEARBEIGBAhOCIAiCIAIDBSYEQRAEQQQGCkwIgiAIgggMFJgQBEEQBBEYKDAhCIIgCCIwRCq9A07I5XLYvXs3mpqaoChKpXeHIAiCIAgHMMYwMDCAKVOmIBRypoVURWCye/duTJ8+vdK7QRAEQRCEB3bt2oVp06Y52rYqApOmpiYA6htrbm6u8N4QBEEQBOGE/v5+TJ8+XazjTqiKwISnb5qbmykwIQiCIIgqw40Ng8yvBEEQBEEEBgpMCIIgCIIIDBSYEARBEAQRGKrCY+IExhgymQyy2Wyld4UgxhXhcBiRSIRK9QmCGBPGRWCSSqXQ3d2N4eHhSu8KQYxL6uvr0dXVhVgsVuldIQhinFP1gUkul8OOHTsQDocxZcoUxGIxurMjCJ9gjCGVSmHv3r3YsWMHZs+e7bhJEkEQhBeqPjBJpVLI5XKYPn066uvrK707BDHuqKurQzQaxdtvv41UKoVEIlHpXSIIYhwzbm596C6OIMoHfb8Ighgr6GpDEARBEERgoMCEGNesWrUKra2tld4NgiAIwiEUmFSQyy67DIqiiH8TJ07E+eefjw0bNvj2Gm+99RYURcH69esdP+bGG2/ECSec4Ns+BIlVq1aJ4x0OhzFhwgSceuqpuPnmm9HX11ew/a5du3D55ZcLY/WMGTNw9dVXY//+/brtzjzzTCiKgnvuuUf3+5/85CeYOXOm7T4VO947duzARRddhClTpiCRSGDatGn46Ec/itdff133fqz+vfXWW7jxxhuhKArOP//8guf/4Q9/CEVRcOaZZ9ruJ0EQxFhAgUmFOf/889Hd3Y3u7m489thjiEQi+PCHP1zp3XJEOp2u9C54orm5Gd3d3XjnnXfwzDPP4POf/zx++9vf4oQTTsDu3bvFdm+++SZOOukkbN26FXfffTe2bduG//zP/8Rjjz2GhQsX4sCBA7rnTSQS+Na3vuXrcUmn0zj33HPR19eHBx54AFu2bMG9996L4447DocOHcKFF14ozp/u7m4sXLgQn/vc53S/45O5u7q68MQTT+Cdd97RvcYdd9yBww47zLd9JgiCKAUKTCpMPB5HZ2cnOjs7ccIJJ+DrX/86du3ahb179wIANm7ciA984AOoq6vDxIkT8fnPfx6Dg4Pi8blcDjfffDOmTZuGeDyOE044AatXrxZ/nzVrFgBgwYIFurvitWvX4pRTTkFDQwNaW1uxaNEivP3221i1ahVuuukmvPLKK+KOe9WqVQDUIUy33347PvKRj6ChoQH//u//jmw2iyuuuAKzZs1CXV0d5s6di1tvvVX3Hi+77DJccMEFuOmmmzB58mQ0NzfjyiuvRCqVEtuceeaZWL58OZYvX46WlhZMmjQJ1113HRhjYptkMomvfOUrmDp1KhoaGnDqqadi7dq1utdatWoVDjvsMNTX1+NjH/tYgbLB30dnZye6urpw9NFH44orrsAzzzyDwcFBfPWrXxXbLVu2DLFYDH/9619xxhln4LDDDsOSJUvwt7/9De+++y6++c1v6p7305/+NA4dOoRf/epXxT52x7z66qvYvn07fvGLX+C0007DjBkzsGjRInznO9/Baaedhrq6OnH+dHZ2IhaLob6+Xve7cDgMAGhvb8d5552H//7v/xbP/8wzz2Dfvn340Ic+5Ns+E4RTntm2D3/4+65K70bZeWPPAH617k2kMrlK70pVMO4CE8YYhlOZivyTF1EvDA4O4ne/+x2OPPJITJw4EUNDQ1i8eDEmTJiAv//977jvvvvwt7/9DcuXLxePufXWW/GjH/0It9xyCzZs2IDFixfjIx/5CLZu3QoAeOGFFwAAf/vb39Dd3Y0HHngAmUwGF1xwAc444wxs2LABzz77LD7/+c9DURRceOGF+Ld/+zcce+yx4o77wgsvFK9344034mMf+xg2btyIyy+/HLlcDtOmTcN9992HzZs34/rrr8c3vvEN/OEPf9C9t8ceewyvvfYa1q5di7vvvhsPPPAAbrrpJt02//3f/41IJIIXXngBt956K3784x/j17/+tfj78uXL8eyzz+Kee+7Bhg0b8MlPfhLnn3++eK/PP/88rrjiCixfvhzr16/HWWedhe985zuOjn17ezsuvvhi/OlPf0I2m8WBAwfwl7/8BV/84hdRV1en27azsxMXX3wx7r33Xt1n3tzcjG9+85u4+eabMTQ05Oh1izF58mSEQiHcf//9vnQ1vvzyy0WgCahqycUXX0yN04iK8G/3vYKv/r8N2HVgfDfH/N6jr+PfH3kNa7f0VnpXqoKq72NiZCSdxTHX/6Uir7355sWoj7k7pA8//DAaGxsBAENDQ+jq6sLDDz+MUCiEu+66C6Ojo/jtb3+LhoYGAMDPf/5zLF26FN///vfR0dGBW265BV/72tfwf/7P/wEAfP/738cTTzyBn/zkJ7jtttswefJkAMDEiRPR2dkJADhw4AD6+vrw4Q9/GEcccQQA4Oijjxb71NjYiEgkIraXueiii/DZz35W9zs5wJg1axaeffZZ/OEPf8CnPvUp8ftYLIY77rgD9fX1OPbYY3HzzTfj2muvxbe//W1Rijp9+nT8x3/8BxRFwdy5c7Fx40b8x3/8Bz73uc9h586duPPOO7Fz505MmTIFAPCVr3wFq1evxp133onvfve7uPXWW3H++ecL1WPOnDl45plndAqSHUcddRQGBgawf/9+7NixA4wx3XGROfroo3Hw4EHs3bsX7e3t4vdf/OIXRVB13XXXOXpdO6ZOnYqf/vSn+OpXv4qbbroJJ510Es466yxcfPHFOPzww10/34c//GFceeWVWLduHU488UT84Q9/wFNPPYU77rij5H0lCLccGlbTnn0jaUyv8L6Uk0PDqjp8cDhVZEsCGIeKSbVx1llnYf369Vi/fj1eeOEFLF68GEuWLMHbb7+N1157DfPnzxdBCQAsWrQIuVwOW7ZsQX9/P3bv3o1FixbpnnPRokV47bXXLF+zra0Nl112GRYvXoylS5fi1ltvRXd3t6P9Pemkkwp+d9ttt+HEE0/E5MmT0djYiF/+8pfYuXOnbpv58+frGuAtXLgQg4OD2LVLk3FPO+00XdfehQsXYuvWrchms9i4cSOy2SzmzJmDxsZG8e/JJ5/E9u3bAQCvvfYaTj31VN3rLly40NH7AiDUD3kfiqlgRqUhHo/j5ptvxi233IJ9+/bp/rZz507dvn/3u991tF/Lli1DT08Pfv/732PhwoW47777cOyxx2LNmjWOHi8TjUbxz//8z7jzzjtx3333Yc6cOTj++ONdPw9BlApjDKMZVQVMZsb3jLNUVk3hjKTG9/v0i3GnmNRFw9h88+KKvbZbGhoacOSRR4qff/3rX6OlpcVXn4IZd955J770pS9h9erVuPfee/Gtb30La9aswWmnnVZ0f2XuuecefOUrX8GPfvQjLFy4EE1NTfjhD3+I559/3tf9HRwcRDgcxksvvSQ8ExyuOJXKa6+9hubmZkycOBGhUAiKouC1117Dxz72MdNtJ0+ebFqK/M///M+45ZZb8J3vfEdXkTNlyhRddVRbW5vjfWtqasLSpUuxdOlSfOc738HixYvxne98B+eee66btwhATeeceuqp2LRpEy6//HLXjycIP0hnGXjcn0yPb+8Ff3/DaQpMnDDuAhNFUVynU4KEoigIhUIYGRnB0UcfjVWrVmFoaEgEBE8//TRCoRDmzp2L5uZmTJkyBU8//TTOOOMM8RxPP/00TjnlFADaHb2ZP2HBggVYsGABVqxYgYULF+Kuu+7Caaedhlgs5tjP8PTTT+P000/HF7/4RfE7rmDIvPLKKxgZGRF+jeeeew6NjY2iYgRAQTDz3HPPYfbs2QiHw1iwYAGy2Sx6e3vxvve9z3Rfjj76aNPncEJvby/uuusuXHDBBQiFQpg4cSLOPfdc/OIXv8C//uu/6nwmXL1YtmyZ6XOFQiGsXLkSH//4x/GFL3xB/D4SieiCUK8oioKjjjoKzzzzjKfHH3vssTj22GOxYcMGXHTRRSXvD0F4QVZJkuPcFMoVk1FSTBxBqZwKk0wm0dPTg56eHrz22mu46qqrMDg4iKVLl+Liiy9GIpHApZdeik2bNuGJJ57AVVddhc985jPo6OgAAFx77bX4/ve/j3vvvRdbtmzB17/+daxfvx5XX301ANXUWVdXh9WrV2PPnj3o6+vDjh07sGLFCjz77LN4++238de//hVbt24VfoqZM2dix44dWL9+Pfbt24dkMmm5/7Nnz8aLL76Iv/zlL3jjjTdw3XXX4e9//3vBdqlUCldccQU2b96MRx55BDfccAOWL1+ua3W+c+dOXHPNNdiyZQvuvvtu/OxnPxPvY86cObj44otxySWX4IEHHsCOHTvwwgsvYOXKlfjzn/8MAEIBuuWWW7B161b8/Oc/N/WXMMbQ09OD7u5uvPbaa7jjjjtw+umno6WlBd/73vfEdj//+c+RTCaxePFirFu3Drt27cLq1atx7rnnYs6cObj++ustj8uHPvQhnHrqqfiv//ovy21kRkZGREqP/9u+fTvWr1+Pj370o7j//vuxefNmbNu2Db/5zW9wxx134KMf/aij5zbj8ccfR3d3NzWfIyrGqKSSjI5zJYErJiPj/H36BqsC+vr6GADW19dX8LeRkRG2efNmNjIyUoE9K41LL72UARD/mpqa2Mknn8zuv/9+sc2GDRvYWWedxRKJBGtra2Of+9zn2MDAgPh7NptlN954I5s6dSqLRqNs/vz57NFHH9W9zq9+9Ss2ffp0FgqF2BlnnMF6enrYBRdcwLq6ulgsFmMzZsxg119/Pctms4wxxkZHR9k//dM/sdbWVgaA3XnnnYwxxgCwBx98UPfco6Oj7LLLLmMtLS2stbWVfeELX2Bf//rX2fz583Xv86Mf/Si7/vrr2cSJE1ljYyP73Oc+x0ZHR8U2Z5xxBvviF7/IrrzyStbc3MwmTJjAvvGNb7BcLie2SaVS7Prrr2czZ85k0WiUdXV1sY997GNsw4YNYpvf/OY3bNq0aayuro4tXbqU3XLLLaylpUX8/c477xTHW1EU1tLSwk455RR28803m55fO3bsYJdeeinr6OhgiqIwAOzjH/84Gxoa0m13xhlnsKuvvlr3u2eeeYYBYDNmzCh4XpkbbrhBdx7wf2effTbbu3cv+9KXvsTmzZvHGhsbWVNTEzvuuOPYLbfcIj6vYvvBX0P+TIxcffXV7IwzzrD8ezV/z4hgsuvAEJvxtYfZjK89zB78xzuV3p2ysuDmv7IZX3uYfeOBDcU3HmfYrd9WKIyVWOM6BvT396OlpQV9fX1obm7W/W10dBQ7duzArFmzaOppQLnssstw6NAhPPTQQ5bbnHnmmTjhhBPwk5/8ZMz2yws33HADfvzjHzvy44wn6HtG+M32vYM4+0dPAgC+/0/H4cKTx2+Tv2OvX42hVBYff89U/PhTJ1R6d8YUu/XbClepnJUrV+Lkk09GU1MT2tvbccEFF2DLli1FH3fo0CEsW7YMXV1diMfjmDNnDh555BE3L00QgeCmm27CT3/6Uzz33HPI5cZ3Xpwgyomcvhkd7+bXvIdmvKes/MKVS/TJJ5/EsmXLcPLJJyOTyeAb3/gGzjvvPGzevLmgWoOTSqVw7rnnor29Hffffz+mTp2Kt99+m3LbRNVi7ONCEIR7ZMPreC4XzuYYMjk1MTFM5ldHuApMjEbCVatWob29HS+99BLe//73mz7mjjvuwIEDB/DMM88gGo0CQNGhZsT4Qu40aoWxtTxBEOMbuUR4PJcLy23oqY+JM0qqyuHTWO36MfzpT3/CwoULsWzZMnR0dGDevHn47ne/a1uOmkwm0d/fr/tHEARBjB9GJZVkdBwrJrrAhFI5jvAcmORyOXz5y1/GokWLMG/ePMvt3nzzTTHn45FHHsF1112HH/3oR7YzTFauXImWlhbxT+51QRAEQVQ/taKYyGkqUkyc4TkwWbZsGTZt2oR77rnHdrtcLof29nb88pe/xIknnogLL7wQ3/zmN/Gf//mflo9ZsWIF+vr6xD+5bbkVVVBcRBBVC32/CL+plQZrSVJMXOOpRery5cvx8MMPY926dZg2bZrttl1dXYhGo7o24kcffTR6enqQSqVMp5rG43HE43FH+8J9K8PDwwVTYAmC8IfhYXX6K/++EUSpJGukwVqSPCaucRWYMMZw1VVX4cEHH8TatWsxa9asoo9ZtGgR7rrrLuRyOdHl84033kBXV5cvo9bD4TBaW1vR26uOk66vr9cNYSMIwjuMMQwPD6O3txetra0Fc4oIwiu1o5hIqZxxHID5iavAZNmyZbjrrrvwxz/+EU1NTejp6QEAtLS0CLXikksuwdSpU7Fy5UoAwBe+8AX8/Oc/x9VXX42rrroKW7duxXe/+1186Utf8u1NdHZ2AoAITgiC8JfW1lbxPSMIP6iVcmGj+ZUxRjfPRXAVmNx+++0A1C6dMnfeeScuu+wyAOq8E3n+yfTp0/GXv/wF//qv/4rjjz8eU6dOxdVXX42vfe1rpe25hKIo6OrqQnt7O9LptG/PSxAEClKxBOEH+sBkPCsm2ntjTP054WESfS3hOpVTDLN+FAsXLnQ85bUUwuEwXUAJgiCqAH3n19pQTADVZ0KBiT00XZggCIIYc2pRMQGA4XEchPkFBSYEQRDEmJOUFujx3MfETDEh7KHAhCAIghhz5MF9Y9n5NZtjGEpmxuz1jMZes7TVoeGU76/LGCvL844FFJgQBEEQY46uXHgMFZN/vXc9Tv3uY9ixb2hMXs+omBgH+f1t8x6ccPMa/Pp/3/T1dX+xdjsWfHsN/nfrXl+fdyygwIQgCIIYcyrlMXlm+34MJjN44B/vjMnrGd+bsZfJhnfVmXMb8//1i1d2HQJjwKu7q2/WHAUmBEEQxJgzqvOYjE0qJ5XJYd9gEgDw6KaeMXlNYypnJKVPIw2OZvK/9/cY8ACoGj0tFJgQBEEQY04lFJPegVHx/9t6B7Gtd6Dsr1lgfjUEYYPJtOnvS4X7aKqxFJsCE4IgCGLMkYORVDaHXK78gyJ7+kZ1Pz+6sfyqSUEqJ6X/eTBZHsWEe1mMnpZqgAITgiAIYswx3smPhWrSbQxMxiCdU2h+NaRykvmUi9+KSf51qnE+DwUmBEEQxJhjDETGYl4OV0zeN3sSwiEFm7v7sXP/cFlf0/g+jQHZ4Gh5UjnDSfKYEARBEIRjjIHIWCgmPf1qYHJMVzNOndUGAHh0U3dZX7NYVU65UjmkmBAEQRCEC0bT9kpCOeCKSUdzAkvmqdOyy53O4QFYJKROFDZ6PkRVjo/vP5tj4viSYkIQBEEQDjCWCI+Nx2QEANDVksDiYzuhKMD6XYfE78sB95i01kcBFAZgA3nFxE+TquxjqcbZPBSYEARBEGNOgcdkDLq/7ulXe5h0tiTQ3pzAiYdNAACsLqNqwt9nS50amMgBCGNMpHJSmRyyPlUmySrJqIOAx6/X9YtIpXeAIIjaZTSdxSf+8xmcNmsivvXhYyq9O8QYwRgTC3ZTPIKBZKbs83KyOYY9eY9JZ0sCAHD+vE68+PZBfPvhzfjeo6+bPu6ozibcd+XpiEW83ccnhWISAzCkCxqGU1kwKSYYTWfREC99WR6SXsNJiujzv30Rz765Hys/fhw+esLUkl+/VEgxIQiiYmzpGcCmd/vx0Pp3K70rxBgiqyXNeSWh3IrJ/sEkMjmGkAJMbowDAJbOn4KmRAQ5pu6T2b9X3unDtt5Bz6+bygdcrfn3KQcKxmGCfqVz5Od18pz7h1IYTmWRiIZ9ef1SIcWEIIiKkcmpi1E1NoEivCMHJi11Ubx7aKTs5cK8h0l7UwKRsHpP3tGcwPPfOBsHhsyn8F74X8/h3UMjGEl7n0YsUjl5j4msmAwYAhO/DMDy98nJc/L339YQ8+X1S4UCE4IgKkYqo+rYw6kscjmGUL5ygRjf8CBEUYDGfOqi3ObXHkMah1Mfi6A+Zr4UNueDpqGk94BBmF/r1EVfVkx4RQ7HN8VENr+mMmCMQVGsv1sHAxaYUCqHIIiKwRUToDr7LRDe4GmbeCSEeFRdhspdLsxLhTubE0W21GiIqakNY7dWNxjNr7JiMmhQTPz6DgxLgVSOqS3/rfcvK5SbiRSYEARR62SymvNvqISLP1FdcMUkEQ0LX0O5FROeyjEqJnbU59UcXxST+kKPyYBBMfGr54jxuzSasj62B4fUzrPhkILmRNSX1y8VCkwIgqgYaelObriEiz9RXYzKikm+2sXY18RveEVOl5vAJB80ldILhAdhZoFJoWLiT3BuDHCGbZ53/5BaQj2hPhaYVCoFJgRBVIyM1D+BDLC1A1+s45Ew4hF18R8tu2KiNlFzp5jkA5Okf6kc+Tznc3I4xsnDXjEqJnZKjGZ8DYZaAlBgQhBEBdEpJpTKqRm4xyQR1Twm5S4X9uYxyadySgiaU7o+JvpGamPhMQHsg/6gVeQAFJgQBFFB0jqPCSkmtQJXEeKRMBIR7jEp3+fPGBMek66WOseP81Mx4X1MAM3oO2gIIEZ8Cs4LPCY2AQ8PTCY2xH15bT+gwIQgiIqR0XlMSDGpFfhCqa/KKZ9i0jeSFgFCe7PzBbhUxSST1dSRZikw4QrGYNKQyimTYmL3vKSYEARRUV7v6cey3/+jpE6WfpLOkWJSi/AgIRENa+bXMiomXC1pa4i56m5aX2K5sFymm4iGUJd/baGYFOljwhjDTf/zKv7w4i5Xr2tUTOxSOfspMCEIopLc/+I7+PPGbjz0cjBawGfIY1KTaObX0JiUC3vxlwAQc2u8lgunpPcUC4dQJwIdrpio53xT/nWMysaru/tx59Nv4VsPbSrwo9hhDETsUjlBa64GUGBCEDUFv4MLSjMzXR8TKheuGUS5cFQrFy5ng7UeD6XCQOmKCQ+2wiEFkbCmmPDvH+9jMrlJTS8ZJwH3jaipnlQmhyde73X8usYZPHZVOaSYEARRUXi+O23TCXIsSZFiUpOYlQuXUzHhqZwO14GJqmR4LWVPZbR+LQCEYjJiUEwm5QMT4+vIDdhWb+px/Lo88OHt/p1U5QSl6ytAgQlB1BQ5FqzARFZMqI9J7SCXCyd4uXBZUzlqD5Mut6mckhUT9ZyO8cBEKCbq8/HAhCsmRiVTTt88saXXsarEFZOJjYXzeYwI82sjBSYEQVQArpiUu/23U+RZOaSY1A6jZg3WyprKUbubummuBpTekl7ucAvIikku/7z5wKQxH5gYgnO5AdtwKosn39jr6HV5kD/J4nk52RzDweF8YFJPgQlBEBUgI1I5rMiWY0OaPCY1SdKsJf0YKCZuA5NSFROeqixUTMw9JnaKCeA8nTNkGMpnpZj0jaSRF1ExgVI5BEFUglw+MEmVsTTTDVSVU5uIBmvyEL8yKiZaczWPionHNKMWgKnvsV4oJhmkMjmtt4pFYMKn/p4wvRUA8LfX9ugqfcxgjGmKicXzcg7k5+Q0JyKIhoMTDrjak5UrV+Lkk09GU1MT2tvbccEFF2DLli2OH3/PPfdAURRccMEFbveTIAgf4AJFcBQT7SJLikntIJcLx8vsMRlKZoQy0emi6yugKSapTM6TL4srJnETxUSunBEBhCEA4tu8f/YktDfFMTCawdPb9xV9Ta6MCsXEIrDaP5g3vjYGp+sr4DIwefLJJ7Fs2TI899xzWLNmDdLpNM477zwMDQ0Vfexbb72Fr3zlK3jf+97neWcJgiiNXMCqctK6IX6kmNQKYzldmJcKN8UjokrFKbwqB/BmzubviadyElIfE56mqYuG0Zww72PCG7A110Wx+NhOAMDqjfbpHLnra7HAJIhdXwGXgcnq1atx2WWX4dhjj8X8+fOxatUq7Ny5Ey+99JLt47LZLC6++GLcdNNNOPzww0vaYYIgvBM486usmFBVTs3AFRO182t5y4V7PJYKA2pAEQ0rALwFzkbFpF5STLiK0xCPiHRWgfk1H7w0xiNYcpwamPx1c4/ue2OEd32NRUJoSuQnGlulcvLG1wkBMr4CJXpM+vr6AABtbW222918881ob2/HFVdc4eh5k8kk+vv7df8IolT++moPHt6wu9K7UVEyAVNMdOXCNCvHd57Ztg/3vLBzTF9z/a5DWPX0DjBmnS5MSv09yl0u7NVfwuHpF2+KCTe/qs/Bq3JGJcWkKRHRUjwWfUwaExGcMrMNbQ0xHBxO44UdByxfkz9HQyysez0zDgwGr4cJUEJgksvl8OUvfxmLFi3CvHnzLLd76qmn8Jvf/Aa/+tWvHD/3ypUr0dLSIv5Nnz7d624SBABVKbjq7pdx9T3r0T+aLv6AcQrvY1LMQDdW6FI5AelGO564+t71+PoDG7HrwPCYveb1f9yEG/9nM17edchyGzHEL1r+cuHeATUwaW/yFpjwtvTGwXhOKPCYSKmcIUkN4Skjq6qcxngEkXAIZx/VDgB4apu1z4Qrj/WxSEEVkJH9AexhApQQmCxbtgybNm3CPffcY7nNwMAAPvOZz+BXv/oVJk2a5Pi5V6xYgb6+PvFv1y53A4wIwkg6qzrgszmGvuHaDUyC1vk1LQVIXi78hDXDqQz2DqhVF9xLMBbwNur78q9thhjiJ3lMMjlmm6LwypCkTHiBV9IYB+M5wegxkQOFASno4L/P5JjupmHQsO8zJtYDAPYNWh9brjw2xMNSIGS+70Hs+goAnj6p5cuX4+GHH8a6deswbdo0y+22b9+Ot956C0uXLhW/y+UbKkUiEWzZsgVHHHFEwePi8Tji8WC5hInqRm597mYY1nhD6/wajKocucFaKptDKpMTF3GiNLi3AvC2qHqFp+fsXlOU0UrlwoB6DkR8Llvl1V48wHCLUEy8BCbGlvRSymZQStPUSfs2ks6K78Cg5EMBgLYGdV20CzTNFBNuNjYSVPOrq8CEMYarrroKDz74INauXYtZs2bZbn/UUUdh48aNut9961vfwsDAAG699VZK0RBjhuxlqOXAJJsLWCrHECCNpLIUmPgEr0YBxlaN4jcBg6PW37NRqVxY/rxH0zn47cMUnguXFTkcoZh4SeVYzcpJZzGYVJWlpngE0bCCcEhBNscwms6ipU41rcqpHEALIOwCEx5ANcTDWt8Uyz4mefNrNQcmy5Ytw1133YU//vGPaGpqQk+PWrbU0tKCujq1PvySSy7B1KlTsXLlSiQSiQL/SWtrKwDY+lIIwm9kidjugjne4ebXVEBSObJiAqh32S310QrtzfiicoqJ+pkO2NwAaLNywgiHFETDCtJZJqp1/IS/d8+KScwPxSRvfpVSObJioigK6qJhDCYzwmQrN2BriqvfCSeBiaYQadU+1ZbKcXVrcvvtt6Ovrw9nnnkmurq6xL97771XbLNz5050d3f7vqMEUQqyydLugjneyQVcMaFeJv7RLQUmYzkgkX+mdjcAxhRHgpcMW6QcSmFYVKl4VExKmJdTUC7MTa4pvccEKJw8LDdga4irf+OByX4Hikm9XJWTzonvPocxNn5SOcVYu3at7d9XrVrl5iUJwhdIMVHJBmy6sHE/qPurf+gUkzEMxvlnapcy5aZQvmDHoyEMJLUUj5/w914f96aY1BdRHewoML/G1P/KiglPMRkraOQGbNx3w5WNgdGMpR9LVkxklWg0k9U1jBtMZkTgNLEhWJ5OSuYSNUFaZ36t3aqcoHV+zRgUk7FMOYx3ZI+JVefPcpB24DERVTn5xTgeaMXEex8To2IiN1Lj5zqvuKk3KCZyDxNOS10UIbXfm5gKbGQ4nQ94YmGhRMnPyzk4lM7vU0hnvg0CFJgQNYGcMiDFRD0eRmm3EhgDpLFcQMc7eo/J2BzXbI6Bn1ZWKdNcjhUs2OWcl+Ofx6SUBmsmqZxRfSonYaGYNEmm3VBIEV1arXwm3OhcH48gFFJEAzujAXZ/foBf0NQSgAITokbQV+XU7uInH4d0rvKqCTfj8rtAakvvH3qPydgE4/qhjOavKQcfcYNiUo4ma3yh9lyVE+dVOT6bX40eE0PKiCu7xv0uZoDlgRgfQGjVVTao/hKAAhOiRpAXYbNUzqHhFJ7dvt+Rj6oS+LV/OenxQTDAcu9Pc748ktrSu+flnQex+9CI7nepTE7XhMutdyeXY3hm2z7RLM0pmVzxsny58ibBFZNIaYrJwaEUntm+z/T7wRd6r+mKkhSTjF4x4fuQyTEczAcGjYZUDg/OjIoKp5gBVlZMgELvCmc/BSYEUVmK9TH51kOb8OlfPYdnt+8fy91yzDf5/r1Z2v5lpYUjCE3W+D7wvg2kmLjjnYPD+NgvnsHlq/6u+z1vw85xq5isfaMXF/36eXz74c2uHid38rVKmfLFOhxShKlTC0y8ff7feHAjLvrV83jG8P1ljJXuMSml86vUrwXQggQAoisvLwVOFFTlqP9tNHSsnZhvH3/AovtrgWISM/fIBLVUGKDAhKgR5KqcAZML5lv7hwAA7xruPIPC9t5BAMDuQ6NFtrRHtpUEwQDL96GFFBNP8HTN6z0DuuBD9pcA7gO+HfvU2TpGJaYYsjJp5TEZNVTkAJq/wqpDaTFe3nkIAApmAqWyOaHieK3KKWlWjkEx4Y3UAO0zEYoJT+UIj4nWgE1GpHIsRmsMS51fAX1TN5mDAW2uBlBgQtQIxVrS94+ovxsNQHrDDO7AL7UBlayYBCKVkyPFpBRk38Cbe4fE/3cbAhO3AV9/PoXj1vPhxGRurMgBSlNMBkbTogLJ+N2Wg4n6qLfApDTFRO8xURSlYD94jxLjJOBBk6ocAGgT5lcLxSSpN/uKtvQpSuUQRKDIFLlg8lx6MoATbuVGSKWWU+oCkyAqJlQu7Ar5Lnhr74D4/z35hbq13lvA1ycCE3fniJzKGUlnTYfyiTk5kmLCTbBezu9teTURKAxMeDARj4Q8z+CpL8FjYlRMAC1lw+GpHM38mveYJO09JpZVOaIFPw94zPefUjkEUWHk1udGd30uxzAwql6Ig1iuOpDMiDvRUhtQBU4xMXpMarhiygvy+bp1j7ZAc8XkiMmN+e1cKiaj3hSTghEDJp+n0XcBaCZYL+f3VjkwMdx0aIu0N38JoCkP3lrSF75XuWw5LJXzGlMulopJo1reu3/QPjARqRzLcmFSTAiiosgSszH3PZjKCO9FOTpPlsoB6QJUsmLCZPNrAAKTnL4qZyRNiokb5MVGVg56RGDSAMC9YuI1lZPK6A3VgyaL+Whan94ApD4mHs7v7XaKiSGt4YWSPCbZQnVINsA2xtU5OfLv7fqYAJrCYa2YcPMrr/ZR/2v8LA9SYEIQlSWT03tM5LLCPslE5tV8V07kssBSG1DlcsEJTBhjhVU5pJi4Qpbn5cCku081rR6eV0zcekxEKsfl+WZUTMzSplxF4EoBIHV+9XB+y4qJ8aZDUw9KCEwkj4nbcn1jgzVA762R0zTGzq88MDGqPbzBmlnn11xOq0LiZt9E1L4qhwITgqgQaelOjjH9l5TL1kB5GjyVykEpMCl1/2TFpBxdNt0g97xoJY+JJ+Tz4e0Dw2LR39OvGiN5Kmc4nXXV6VeYwV2bXw2BiUnPIKMhFNCCFC/m120OUjn1HkuFAa0fSI65/86YvVc5SGqS0jRWnV+NHhNeLnxwOF3wmcoKWoNI5RRW5SQzWoM36vxKEBXC2OVUlnzlJlJBVEwO+KiYZLPB6WMiG5JJMfGG7DHJ5hje2jeMbI4J8ytP5TDmLk3ZJ6Vy3KgExnPKrDRflAubKCZuv3+j6Sx2HdRKhAuqcnhaw2OpMKBPvbjt/prKFE/liN8bFRMLjwlXTLI5VtAAj5t9FUUL9oxKDKBdUyIhBc113oO2ckGBCVETGIfFyRdMfncIBFMx0ady/FNMKm1+lYNFqsrxhlGe39Y7iP2DSWRyDCEFmN5WD4W3+3cR9PEFL8fcVW8VKiZmqRwTj4nHcuHtewchx01GxUSetOuVcEgpqJhxipn5Ve5A22CWyinwmER1zxmLhITSYuz+KtrvxyTvik1gMqEhJrYLEhSYEDWB3QWzfyTYqRy5X4Gf5cKV9pjIwWKzRR+TdDYnKqaIQoyVFlt7B0RPj/amBKLhkNa4y2HQl8rkdM9rpmIMpzKmQYTxBsDUY2KimPA0hltFkKdxeOBgqZiUOD2XKy5ueplksjlhqo9ZKSZmqZwiigmg+UKMPpMhk/b7Zqkc4S+pD56/BKDAhKgRMoZc7JBVKieAVTl+Kia5AFXl8NcPKVqu3WjSvPhXz2PR9x7X+YAIDR5IT25SfQJbewdFqXBnSwKA1sfCqWJiPNbG3j6j6SzO+OFafPwXzxQ81qiumCkmoybpDaGYuLwx4CXSx01tMX29IcPcGK/whd6N6qQbViipQ3LQ0KRTTLR+I7kcExVNRo8JIM3LMZQMa+33C19v2EQxCaLxFaDAhKgR0oY7MV0qZzTYHpODfnpMcsExv/LAJBoOaRdPydOQyebw0s6D6B/NYOf+YcvnqWW4InB8fmHe3jsoSoU7m9XAhN/tOy3F7jf4Fozfid7+JPYOJPHq7v4C/0mBYmKWykmbdH4V5ldvismCw1rF68n75Jtikg8a3PQ5klOlMYtUjs5jItryZ/PfA/X3TSaKiVXJsJnZV35eDvcgtTcHz/gKUGBC1AjpnPUFU1ZMgthg7YBPVTmMsUDNyuGLWDQcEhd+xrSFcN9gSgRSxpQFoTKSP1bHTVMDkzf3Dol5T1wxqXepmBgNlcZjL/9sVCILUqY25cJ6xcRb51fe7faE6a0A1MBbDqR4aqMUj4n6ePepHB5kRULafBzAOpUjp1z4cQuHFN1x4mjdX/Vt6YdFibGZYiLPUlIfx8+RoEGBCVETGFtjD0oqSX9VpXK8BxNZwyJSafMr73kRCSv6yof8BZR7JQD31RC1Au/oOru9CYloCKlsDi/sOAAA6MovOg0uO5caAxNjMCwHJsZzyIn51azBGq8gcfP9S2VyeDuvpB0/vVWYfAekEmU/+pgAUpM1F4GJWUWOcV9Mq3LSWVFmLTdgk2nLl/gaza9DNorJiBSw9fTng9dmCkwIomIY7+ysFJNSzaXlQFcuXML+ZZn93e1Yw0tLI6EQQiFFa/2dv7Pv6dMm23qZU1IL8CChPh4WPUs2vHMIgKSYxN16TPSLrzEwkRdn4zlUUC5sWpVj02DNxfn99v4hZHIMDbEwprQk0JhfjGWVZtgnj0m9J4+Jum3MEJjIQbicpuGBCWOqWgiY+0sAoK1BNYsfLEjlFComfN/lz5H7kLpIMSGIymG8gA5Y9jEJ1gI4ms7qFuVSzK+GVi4V72OieUzUO0KRcshfXOUJuaSYmMNTj3XRMGa3q4EJj8GFx6RUxcSgisjfEaPZ1Vkqx59yYe4vObK9EYqiCFVDDh6GfPaYuFFMzN4noPfWNJh4TACgd0BNtZj5SwAbxcSkPDphUpUlfEgtdU7eyphDgQlRE/ALJldFB3Xm1+D2MTGa20ox5xoVk8qbXzWPCaDd5fELaI8UmJBiYs6IlKo4Mh+YcLryi44W8DlUTIwek5RRMSmeyuGBhnkqx2SInzBoOj8nt4rApAmA5tcwT+WUqJjEvSgmhe3ojfsiKyLhkCK23ZsPTKwUE2vza2EgZuxjks0xEfhQKocgKgg3WjYneIdRC/Ory06X5cZ44SlFMclmg5XK4b6fiFEx4akc2WNCjddM4amcumhYLNAcXnEhAj6HqpMxMDGec3KgYlTd+M/cnGmnmOiqcjwoJlslxQTQFnH5NYdMzKBe8KaYFAZgAFAX0342KiJcNRGBiaViYh6Y8O9OnRT8aKkcbipPIptjCIcUUWYeNCgwIWoCfsGcUK8GJlYekxyrfIpDhku1/OJSkvmVBc38mldMQnnFxJBykFM5Xia71gJcEUhE9YpJW0NMLPyiB4dDxaSY+XXUxvzKg83WfOMu+86vpQ3x46kcnsLii7z8mn4pJma9QIqRslBM6qKyYqLv6sq/570Do/m/Fw9MipVH82Anlc0hk82J71V7U1xXLRQkKDAhagJeATIh/4XmfUxG09mCi2uQKnN4OeCUVlWWdzu7RMZYlVNpxSRtUEyMTazkVA4pJoXkckws5PWxMGZMrBd+HVmib5AadznB2GDNmF4Z1ikm5qkcbs4072Ni1vk1X5Xj8PzO5hi277VQTHSBid+KiftUTqFiYl4uDJgoJkUCk2Qmp9snbbJwoccEUBU2/r3qCGgaB6DAhKgRMkIx0d/Jcdk6pGj+kyD5TA4MqfvH3fM5Vlhh5JRcUKtyuMeEX/zzi5Ocyglif5lKIwfQdbEwouEQZk1Sh/bJ1Rb1Hs2vPMixLRc2Bib5c1NWTIyBBjfTJnTmV/X/nZ7f7xwcRiqTQywSwvS2egDaIj6gS+XkF+qoXx4T9+XCdlU5xsCDBy3FqnLqY2ER8MjpHDPFJB4JgQsjI6msqHYLakUOQIEJUeV866GN+OhtTxdNS6SExKy/k+N3h811UXGhHE0Fp2RYKCaSe95rOseomFTa/Mpl/2j+qlkveSEODqd1n6mTNMQXf/8SLrnjhcB4hP64/l287weP49XdfWV5fvlOmZ+7XD3okBYds2oVO3hgMrlR9R8UNFizM7/mf26TJuAaFRczxUT+fyfn5Rv5VvSHT2oQ6YhGQyonl2O6cupSMFNM/ueV3fjAj9Zi8+5+08dYVeVY9TEBZMUkn8qx8JgoiiIMsHJljlkLfkVRdM3buvv1IwuCCAUmRFXz0Mu78cquQ3hz36DtdnwRFIpJ/q6KX4SbE1FPTZ7KDb8bki8iXhWdwlROhcuFc4aqHKl6pFvqYQIUN26OprN4ZGMP1r2xF3sHk7bbjhV/3tCNXQdG8PhrvWV5/hHhL1H7wADAWXPbEVKA0w6fKLZzq5jwadvteanfGFjIgYpRdeNqR3NdxLThGSA3HtMW6FhYW4qcnN9Pb9sHQJuRA2iLPFc15P1sKEPn13v+vhNv7h3Cb57aYfoYK/Nre3McU1vrsOCw1gKPB1dMeLBhpZgAWlpa7v5q1YKfm2HlVE6QFZPSPi2CqCCMMXGhKNaYiV8wW/NTbHkfEx6YtNRF8xfZdKBSOXxI16TGGGKREFKZnG+KSarCAVhBVY6kmMj+EqC4x0S+iw9K2oefW939o0W29MaoVJHD+eRJ0/Hh46fofAwNLsuF+X635ys2ClI5NooJVyZj4TAaYxEMJDMYHM1ALhjizyc3WAvlS2WdnN+5HMOjm7oBAOfP6xS/N1bl8HNGUfSv5QXR+VVSnfgAwb+9tgfpbE4E2ByrVE48EsbjXzkDkVDhPvHP0m5ODkczwGqBn1nnV0CrBBpOkceEIMrKaDonvsDFLmb8zo7fZaQyOaQyOXF32FIXlYZdBSeVw8eatzXEPU9g5RR2fq2sYpIx9jHRKSbqxZPfSRczHcp3x256TZQT3h/HGGT5hVXFSZ3hbrneRblwLsdEepMvXMYS3mE7xUQKNo2pFY5VisPp+f3yrkPY059EYzyC986eJH6v9TFRX48HEQ0x87bubjAqJn0jadELpG8kjWe37y94jNX75L8zq4gxfnbGqh2ZiWaKiUV5tLi2pbLCu9UV0OZqAAUmRBUj30UX63/AF0HuMQFUyVekcuoiiEt52KCwXxpPzi9wXgOnXMCqcvjddYR7TKSUA59+OnOSamwsZjrUVyYEo4KHG6u7yxSYjJgoD2a4qSgZTGVEsM/Th0YFalRWTCz6mMTCIdO+IoB1tYrTkuHVebXkA0e16xZ9K8Wk1Dk5gDwrR33vvFSZ8+imnoLHWCkmdhj31cpjAph3f7VWTAqD/iCncigwIaoWWVYttljzRTgRCYu7h8FkRiweLXVRXcliUOAek4mNMbF/XpusBa3zqzC/5lURucEav3gePkk1cxZbVOXPzGnKotzwc2tPmVI5IxaLkBE3HpO+YXWf45EQmvOLop3HxKrzq51iMmpifgXg6PvHGBNBwBIpjQMU9jHh50xDiXNyAE1x4Mdwez4w4a+5ZnOPibnc3GNih1zaCwCNNqbdiY15xWRQC0zkTsD6/Vf3obtvRHxmvAFfEKHAhKhahtPOFROtNFW7YA6MZvTmV6FIBGNhy2RzOJRfKFTFhAcm3gKKTNA6vwrzq6qYyC3pefrj8MkN+d85T+WMBEAxyeaYSCkcGEqV5ZwaMfGYmCGG+DkI2OQqNa4gGs3gTvqYRGXFRApMMtmc+NwTVqkcm/P71d39eOfgCBLREM6YO1n3N5724K/HVTY/FZPRdA7ZHMPW3gEAwAUnTEVLXRT7BlP4+1sHdI+xmi5sh/GztEvlcCM/v3lJZXJChTSafXnwumPfEAA1DWSWYgoKFJgQVYvsJShufjW/YGqpnKjI7wZlwvDBYc3U1loXlVI53hY5Yx+TSnd+NfYxqZdSDjwPzifmFrvblxfLIHhMjG3dy6GaiKqcIgsvr9BIZXJFg1HZDK55rqw7v1pNF46GFdNOrHLfE6Ni4iSVw02vZ85pL1CKjH1MrNQDL8jPMZzKiHb4R3U14dxjOgAAqw3pHKuUldPXAYqlcvKBSd6HJqfcjF4V/lm+uVcNTIJcKgy4DExWrlyJk08+GU1NTWhvb8cFF1yALVu22D7mV7/6Fd73vvdhwoQJmDBhAs455xy88MILJe00QQD6xapYia9stNQCk7S4Q9SlcgJSLsyNr631UUTCISmV409VTsUVE8N0YbklPVdMjsj35UhnmW0gNRIwj4mxe2o5DLDchFpfTDGRFvBiylO/UBAjIq0wYtP51SqVI3/PBnRDMqXApGDqrn0qR5fGOa6z4O/GcmErv4UX4pGQMKsOp7LaZOPJjSKl9Oimbp2Py4vHpDCVY73vIpWTV0y4pyYWDhW8Jn9erpgEdXgfx1Vg8uSTT2LZsmV47rnnsGbNGqTTaZx33nkYGhqyfMzatWvx6U9/Gk888QSeffZZTJ8+Heeddx7efffdkneeqG3cKCbpnJT7jpukcqQGa0EpN+WlwvzOyMs8EZnCzq/B6GPCyyZ5ymFPf1LcZfNUDmAfcIxIab0geEyM82Z6yqCYcBOq8e7YSCwSEsFfsaBNrlITgbCbzq+6wKSwLT1PuUbDSkFVSrHze2vvIN7cO4RYOIQPHNVe8HeuLoyks8hkc761owfUJmVczdg7kMQ7B9U+O7M7mvDe2ZPQGI9gT38SL+86JB5jV5VjhZUKZIZQTPLXCf5+zZrJ8X1/5+AwgOArJq5CydWrV+t+XrVqFdrb2/HSSy/h/e9/v+ljfv/73+t+/vWvf43/9//+Hx577DFccsklLneXIDSGdVU5RQKTjDYwjl/AhpJZ3YU4HrByYWF85YGJxULhFKNAUvlUjr6PCVdM+qS79uZEVPS3GEpl0Vpv/lwjUrdep1N0ywk/rzjlqMzRqnKKL3z1sQj6RtJF01x9OjO4RSrHRjHJSKkcbtyUq3L4DYRpCW2R8/vRjapa8r7Zk9CUKPReyAHIUDKrdUH1QTEBVN/GwGgGm95VO/m2NcREcPCBo9rxp1d2Y/Wmbpw4Y4L6PvJBmBvFRJ48XB8zLynm8O66A8kMkhnt/Zo1k+PBKxd0glyRA5ToMenry39AbW2OHzM8PIx0Om37mGQyif7+ft0/gjAi3xkX811kJMWkSUrlmObUA5LK4f0JuMlNmHO9ml9z5s2wKkVGNOPSKyYc3mdBpHhsAo6RgFXlFCgm5UjluPBQGCc3W6HzXJkE6owx2z4mOsXExGMyalOpUuz8NmuqJhOPhEUQMJBMW3ZB9QpXIl555xAA6KY5a+mcHjESoVTzq51aAqjXLB64fOq/nsNX79+g7qfJ+zUGr0FurgaUEJjkcjl8+ctfxqJFizBv3jzHj/va176GKVOm4JxzzrHcZuXKlWhpaRH/pk+f7nU3iXHMcNKFYiLfyfEL5mjGkFMPVrkw7+jIc8mlKiY8LuGyfuUVE61SCihcQPi8l3oHnUvlSpwgeEzGIjAx6/xqRb3DeTlmnit9C3qm8yoZ04Gyodk0lZMPcsxUHrvz+619Q3i9ZwDhkCLMpmY0ScZ2s7kxpcCViFd2qTfkcmBy5tx2RMMK3jk4gncPqWkekcpx0XW2TlI7igUmoZCCI/Kpzld2HcKWPWql0IyJhbKiMVgJcnM1oISW9MuWLcOmTZvw1FNPOX7M9773Pdxzzz1Yu3YtEgnriG3FihW45pprxM/9/f0UnBAFyAtV8QZrhaa8vpG0KOm0k64rBVdMNI9JieZXppVpprOZyptfuYqV95gYvRJdzTwwcamYBKEqJ7/ANyVU+b8cbel5AFbMYwJox1D24pghl8+bVYEZ/VfW5lf9DQDHrlLF7vzmptfTj5goJheb0ZiIYP9QCoOjGd8VE36ceQAwWwpM6mJhzJzYgK29g9jWO4hpE+rF+4iFnb++TjGxqcjh/O7/nop/vH1Q/BxSFJwqzUkye15gnHlMOMuXL8fDDz+MdevWYdq0aY4ec8stt+B73/se/va3v+H444+33TYejyMeD27zFyIYyHfJRRus5bQ7Od6TYLd0F9usa7AWDI+J1vVV/S4Ic6BnxUQ9BvFoGAPJAAQmWX0fk1g4hEhIEX0u+MWz3tB10wx959fKByZ8gZ/b0YQX3z6IPWXxmKifnyPFhLdUL6aYmHhMkpkcGGNQFKWgK7IxHZiRBjM2mfQx4UGOme/C7vxeXSSNw5FbAQwJc7Bfiom6f1wxkhUT/jMPTM6c215yuXAxxQQA2psSOH9eV9HtjMFr0AMTV6kcxhiWL1+OBx98EI8//jhmzZrl6HE/+MEP8O1vfxurV6/GSSed5GlHCcKIG8VE3MmFtP4Ku/OSa30sjGg4FEDFRG9+9atcmBvs0llW0KZ+LBEt6fMeE7nyAdAunnxBsBvkJ39mQUjl8AV+Tqc6va53YFSodn4x4rAqB5Db0rvwmEjPy8+5gsDEOMQvY+8x4c9jlsqxOr/fPTSCV97pg6IA5x3jPDAZ8d1jog8UZsuTCaEpKLyUuNRyYSeBiVPk4LUpHvH1ucuBq8Bk2bJl+N3vfoe77roLTU1N6OnpQU9PD0ZGtBHll1xyCVasWCF+/v73v4/rrrsOd9xxB2bOnCkeMzhoP6aeIIrh1GOSzTEx/yMipXJ4YNKSnzgc1MBkgk/lwvxuVr5IpXOVU02MQ/wAffvwToPHxE4JkVMMQTK/Hj6pAZGQghwD9g4mizzKHTwt48T86tRjIs+OSkgLKv9OGAObgiF+Ij2nmPYxsWvTbnV+88ZlJ89sw+QmeyVdnpfjv8dEv7h3GFq6H9mhBiq8+ZqXlvRyMOgkleP4eaOFAX+QcRWY3H777ejr68OZZ56Jrq4u8e/ee+8V2+zcuRPd3d26x6RSKXziE5/QPeaWW27x710QNYlOMbEJJuSLp9zHhHdWbU7oA5OgDPErKBeOlGbO5X1M5LuyShpgtW68WkmkvMjykkZeBmo3yE+XyglAuTBf4FvrY6ICwm8DrOj86iCV47Qqh09EbqlTm/rxAYv8O2E896w6v8Yi+kaGHFEubGZ+tTi/H92orifG2ThmyCqN71U5UkroiPbGgonFR07WFBPGmLchfgZlwy/MlMgg4+qdM1Zc9l27dq3u57feesvNSxCEY5z2MclI6YqYJDFzhGISCY7HhDEmOr+2iVROaYpJ1mRGSSWbrIkKjpCFYtLsQjHRpXIqH1jKC3xHcxzvHhrxPTBxUy7spLIJ0JtfAfWcG0xmxHfCeGwth/hJisloOodMNodIOCTKhRNm5cIm53dv/yhe2qmaO4v5SwB9W3o/O78C+j4psw3+EkBtBhhS1GO4dzDpqcFauRQTeWxB0Lu+AjQrh6hinHZ+lXP7kZCCJsNgrOYApnL6RzNi4W4zNljz2GeFKyaRsCLuhCtpgM0YGqwB2iKbiIZEwOjEY6JP5VReMZFNpLw00+8ma27KhcWARBs1aTSdFYFGSz3/TuhVjIKqHIvBkFHJZA5o31VbxcTk/P7Lqz1gDFhwWKujEledYpL0r/MroA9wjMZXQL1+TG9TS3W37RkUKq4bxSQeCYELMXYD/NxipkQGGQpMiKpFr5hYL9Zy5UA4pBRcqJrr1AuOGOJX4f4egJbGqY+FRcAUL1HR4YpJOKQIX0clUzlpQ1UOoJk0u1rqhFQuqnJs/BE6xSQI5cKSV4NL5363pXfb+RWwV0z4PocUoDG/vTFYLzS/GlM7mm8oFgmJc3Ygn85xUi4sn99iNo4DtQSQ+piUQTGRF/fZHYWBCSAZYPcOiuuOG4+Joigi0Gz0KaACjB6TYPcwASgwIaqYYV3nVzvFRFsAFUWxSeUERzE5MKRP4wCy1O1t/zJSYMLv4irZ/VWT/bXLEA8OZWNhvRPFxDC/pZJKEGNM11GY36FWNpVTfA4U3+emRBShvKJm9F0ZHy+nAnM5rfkaDzaNE4ZHRTBll8pRtzkwlMLzOw4AAJY4KIkFJPNrKuPrrBxA3+r9yMlNptscma/U2bpn0FODNUALIspnfg1+K45g1wwRZWH9rkNYv/MgLj19ZoGBa6x46OV30VIXxVkmw7icMuywXDhj8DIYUzlaVU5hp8tSeWbbPuw6OIwLTz6s6LZrNu/BM9v3AYAYEjZRCkxEAyqPigkvDQ4r5opJNsew6pm3cOqsNsyb2uLpNdwg97zgyIqJ9rvii6rxb8OpLFrqrBeEh15+F811EXzgKOsuol4ZTmXFe2tORMtmfhWpHBeBiV1wJ3d95WiD/PTlwo3xCAYNvXDkCq9o/lxtjEewbzAlmqzZ+S74+b2tdxA3/c+r2HVgBNkcw7FTmkWKpBiNeW/MwaGUCJrqoz4pJnEtzTh1grnqwFM8r/f0i0rAuIsGa0D+8xzyN5VTp/OYBF8xocCkBrnuoU3Y+G4f5k9vxYLDJoz56/cOjOJf/7AejbEINt602PPzDDksF04bqj8SUXWEeVZaPNTf+6+YXH3veuwdSOL0IybZXlyzOYbld/2j4H1MadUuIqITp0fFhHd+DYUUxMKFHpPnd+zHtx/ejBNnTMD/+8Lpnl7DDZms/nMBgElNaiAmt9V21JI+bQxMMroFVoaff3XRMF69abHvwTlf4CMhtS8LV0y6+0fsHuaKdDbnauFtcJAOk0uFOUYVkd8MNCfUwEQObDOSehLN3wTwu37eYXlPPp3VYFJxwhsJ7ulP4s6n3xK//+BxztQSQEt/9A5opdlOAjcnTGpU9++ozmbL4Xo8lbN5tzbfza1iMrExjncOjhQtjXZDXTSMumgY6WzOMqgKEhSY1CCHRtQ0gXGex1ix68AIGFMvVrkcE7KxGzLZnG4RtwsmjP0yFEWtGJDldkAOTLROl6UwnMpgb/4CeWg4jek2sy6Tmax4P//y/sPzBtUQ/uk9WmfleNQ/xYSncuTAhKePegf871JqhjxXhXPFew9He1MCHz1hividE+OmsVrErl8HP/+GU+p0aW709Av5vFIURXhM9vQlfTmvAH0glogVX/icKCbG7wOgLeo8GObfs+a6KHb3jepSgfK5xINNua9IOpvD46/3AlBbyxs5acYEfOeCeeju0wK4pkQUly6cWfT9cbjKwAOgWN7r4genzGzDyo8fh/fY3MwdkQ9M5CA6Fnb3+is/dhw2vduH+dP8Uy0j4RD+6zMnIp3NWQbsQYICkxqE+zEqZXyUJe1UNodEyP0dzbAhELFVTEyqP+TApNmQyuHP58RUaIf8PoupHPJnce3iubrFmlNqgzVhfg1rqRz5ufjddN/w2ASscjdeTltDDJeePlO3XTHFJJtjukmuyUzOtl+H/LnsH0r6H5gM68+r9iY1MEllczgwlMLExtLvhEfzxyKkOFv4hGJia37VSpw5/JwbSeVTOfnH823k81b2m3BFQe7E+uz2/egbSWNSYwwnzyyM0kMhBf982oyi78UOodDkU0f1PhpIQyEFnz7FPiXbGI9gSktCjLuIhhXXN17HTGnGMVOaPe+nFe+fM9n35ywXZH6tQXgZW6WMj3J1gtdF1ihJOwpMJJOl3JLZqJgA/qRzdIFJkefjF/iQAtOgBCi9wRpfN/SKibaY8LtprmSVm0yuUDExQygmFsGGfDy43G6nmMjnH+8V4ye8hwkPTGKRkNgvv0qGh6WKEycKjDYrp7hiwlObQGG5ML8h4N+ZtIliEguHxD41io6zGVFhc+4xnZapkFIxtlpv8Kkixw1HSKXEbtUSQoWOWg3CF/HKKSaaVOt1H/giyq/J2RyzrMTI5ArLUmXHO7/IRsMhccH0o8mavAAWez4xidRGdi69wZr6OKtyYb7Y8TRbuUmbeEzMEIqJRbAhqwC8isleMdHOv/2D/gcmZikR7jPZ41PJsJtSYUCelVO8XFhvfjWkclLGwEQLYIXJ3OR71jeSxprN7kp/vdBkqGRxUrHkN/IMHbN+LURxKDCpMRhjFQ9Mug2pHC8YJWXAesGWmz5x5Dsr2exX56MBVn6fxSp9UtLdphXaWHiPikn+8IQURbyOHMzJd9P9Y+A/MpuVY0axAXRyozHRvt5mAZY/F+6r8RPRw0RaJLnPxC/FZERU5Di7hNdLqpNVB29jahOQvw/6zq/NJqmclOn3TN3uiS292DeYQktdFAtN/CV+YVRM/JqT4wa5xwkpJt6go1ZjJE0uJGONzmPiVTHJL6Jt9Vo5rdW8nIyJydJMMQEk6drj4i/jJZUTs2lfHZemrzoZD2GEd34Nh2BqfpXvpsfCGC0GvhVRTOrEnJesaYppRCqbFUGMjeKj95iMjWLS6XPJMA/MnZbCctUpx6wDeLPApKDzqyGVI19DzGYfcQVj07tqlco5R3cUDURLoT4WhpzZ8mtOjhvkrrBuK3IIFTpqNYYuMKlUKqe/9MCEL6IN8YhYZEeLKibSBTN/JxUNK7rmQ5rZz4fARPbSOA1MbBZpLqsz5i2o1Dq/hsSx0JlfU2OrmPD3LHt/zJAbZJkFjPxcqIuGtS6xNp+f/LmUQzExW+D97v4qBvg5XHjlc9zKZ2Lex8S886vsMeFBcjpTqIAZFYxypnGAfMWd5CupRCqHD/MD3HV9JTToqNUY8gJZidbruRzT5dlL9ZjUx8JS4zHzxUgbFqevygFUo59sHtTuEH3wmPQ595iIVI7NhUy+yHn57LTAxFwxkdMffJEqJ2beHzMSEe0u2MxnwhdpVTGxN8oaz7+D5UjlmCzwfnd/5SbUeocehnBIC8Ctgra+fFWOnIIqCExEKkfdhjHtc0ybKGByYNIQC+O9syc52t9SkNVQv9rRu2FCQwyTGlUl169S5VqDjlqNUWnFZL/UkREAUllvygSvymmIR4qaQrWUQeEUW2NNv9HsVwquPCYOzK9yvtpLL5Nskc6vcvpjTFI5Dj0moZAiFmCzgGMkrQWpxUqLjedfOVI5ZiZSnsqRe3SUwqgUjDlF899YKCZ25ldD51d5Gx7cpjOFHhO5kdoHju4ouQTfCbpgyMdyYTfwdI6bycKEBgUmNYZsnKyEx8RYleBVteEX1zpJMbHycfAFUF7Yee67yRCY8LvKYqmXYqQyOewb1LpPOveYWH8lFUUpqWRY3/nVXjEZi8DE7A7bivq4dWUO77GRkMyvVh4T4/lXHvMrVx4KUzl7+pOmj3HLiGT4dUqx6ibzwMR8urC8DT93xQC/UOH3DCh/GodTacUE0AITMr96g45ajSGnFNIVUEyMVQnprLd+GcJjIqdyinhM5AWQX1jb6i0UkxJTOcbuqY5TOUUuZKWUDJt3ftWOv95jUt5y4WyOiVki0SIeEwC2KRp5kS6mmPDzj6f1yukxaTHxmAwmMxjwIU027EExqbc5hqlMTpSI68yv3HNl8Jg0xqMivcbPXTH6IaJ9z1rrteaFZ84dmwZfxvRRJZjToZYMV8LjMh6gzq81RqUVkx6DlF1qVU59rHgqJ20Y4gcA5x7TgQtPmo6Pv2eqblu/BvkZvQROO78Wy0mXUjLstPMrUH7FRFZqHCkmNgHHiOQ3KtaMjZ9/R7Y34vWegTELTOpjEdGV9tBwGk2JqNXDHeFFMWmwUZ3e3j+kbhML6wZHiu8WT+VIE41jYfX98O+XWSrnmK5mLD/rSMztbBoz9UJWaSpRLgwAH50/FRvf6cMnTpxWfGOiAApMagzZm1AJj4lRMSm9Kqe4+dWsjLG1Pobvf+L4gm3jPvUxMb7P0SJVPk7KhQGt/NCLosNTObLHRJ/KkRSTMptf9XNVnLRUt07RyIs0X6it0hW8KuaYKc14vWcAI+ksRlJZ3wa9Adqxk/vjAGqg0juQRN9IGtNLfA1+frq5I+fbck+OzLbeQQBqwCabwXmflNGMWqotN3bjgQk/d4WZWboBUBQFX1k81/E++kEQFJOW+ih++Mn5FXnt8QClcmqMSptfjeWSns2vKU0x0abuFlFMHCyAxoZSXnGtmDhN5Yh5OSUoJiEtlWPW+RUov2Kim0Tr5HOx6VwqpzW0mTDmigkPGI9sbxTHev+QP74PQA24hk18GPLPfgR9/P25MZNqbekLj+FWEZg06X4vTxeWrx31sTCihsouUZYfKU+7eafwpm6Adt4Q1QUFJjWGrAQkK5LK8UcxGZI8JoloEcXEYetzoNDs5xUegE3I59iLekykIXR2yE3W3MIDE7Xzq3os+GLCGBvTwIT7ERQFjuamOPeY2JfE8vOvqyUh2tf7mc6R+78Y0zXcu+FHjxhu+HVVlWPTQVdWTGS4gjiSzuonGucVE6DQ/FqsL025aZQqcSqlmBClQYFJjVFxxaRPv2B7TuVwj0k8UnTqrpnEbIV8h1gK/H3OmNjg6Pmce0y8Vw1pnV+Vgj4myUxOBC5A+RusZUwqOOyw85iMmiom9oFJZ3MdJpQhMOEBXVM8UhBwCcXEB2OxXCLtFN6W3k4xmW0ITOS+PjygiUfUmVJcGUkZFZMKV6I0BsBjQpQGBSY1hs78OsaBCWNMSOmHtdXn96dUxSQi+S7MFyPRYdSRYuKXx0Q1WR4+yWFg4rgqp3TFRB7ix5/HuJD3lbkqx2zgmx12HhM5lWM3RZcxJpSsrpaEMHmWIzBpris0tzZLA+1KRXR+dWN+tVBMsjmG7XvNFRN5dpSYSZQ/xqLknHtMXCiT5URO5ZBiUp1QYFJjjFbQ/No/mhFy8PR8YOK1Mkh4TOLFy4U186sTL4O/HpOZ+cBkxIfpwoCsmHgJTNT/yoEJl9+NC3n/SNrTPB6n8M894iCNAxSpypGH+Nl4UfpHM+L3neVK5YwWltxyuGLiS2DiyfxqfgzfOTiMVCaHWCQkvpccuSpHbv0PaN8n/lmmHDbMKzdB6GNClAYFJjVGJcuF+WLdWh81nU7qBl7aWh8NF+/8atKS3grRwKyEzq/ZHEPvgGqonDExrwz5lsrxvn/ZfIAm9zFJ5Z+HLzri99lcWUcWuAkWgSIeE92sHK27qTGwks+/RDQsAhM/u79qpcKFC6Kf5tcRQ5DgBCvVaeseVS05fFJDQfqJf7dS2RwGk1pTQ6BwrEHGpF9QJWgKQOdXojQoMKkxKlkuzNMbnc2JAuOcW+Qhfk5n5UQdzK3gF+JShvjtH0wik2MIKVrKyi+PibGvhBt4IYy+82teMckv+JMb4+BrUzkNsE7b0XNsO7+aTBdmrFD14mkc3h5epHIGy5DKMelT0lwGxcRT51fDub0tn8aZ3dFU8JiENB330HBa95qF5ldn6chyQ4pJ9UOBicRovqfBeGa0jB6T0XRW15/CyB4pvx83KVd1g36In7NZOVEHiok2K8f7seE+mvamhDBjFns+XjZdbKEupcGa1vm18G6XK1CN8Yiv1SNWmHXjtcNpVY5uiq5hW95cjXdhFebXYe+BSSabg9lUZmOpMFCmwMTDrBzjNY4rJkbjK6CZwQHgYP44FaZy8g3WXPqGykUQZuUQpUGBSR7GGD78s6dw9o/W2i6u1Y5OMfHxfaazOXzk50/h7B89aXn8+ILd2ZLQpQzcIpe2qkP87BdrbRF008fEe4DK78w7WhLi+YoFvI7LhYv4aeywN79qnh0/vRBWaJOFnV2C6mzKgLVupBGE5Cm6BnWlWyoVBuCL+fWSO17A+3/wBPbmU3e2gUnCz3JhLy3p1QXbmEraZmF8BfLqWv6c45OY+WuKPiYGxaTiHhMpMEnQEL2qhAKTPKlsDtt6B7G7bxT7fZR2g0a5yoWf3b4fb+wZxM4Dw5YXerlUs5RUjlzaKismVoZVN3dyxXqiOEH0ymhOaBVDmaytmVSkchzOyilliF84FBKVE0Ixkaqc+AJazsCEL2ZOza8NTsyv+S6lwkth6HC6p187/wCUbH7NZHN4fscB7BtM4dFN3QDkrq9lNr968JjM6VADj827+3Eor34wxrDdolSYk8gHIAeGLFI5ASsXntpahw8c1Y6LTj0MIYfnFxEsKDDJI3eiHEyWf7JqpZBVBT/NjY9u6hH/b9VDQr5jjZagmMjPXy+VC1spJqKM0UkfEx86v8rKEH8+xuzfqygXHhPFBAWdX+XUmJ8mTSvSOefdeAGtB4dpS3pD6azVFF3tc4kDACY25s2vg946v+4bTIlj+uhG9fw3m5PD0Y5raaXYjDEMe0jlzJjYgKM6m5DJMazZvAeAqu4NJjMIhxTRc8cIP648lVMvzK/64FZ4uSqcygmFFNxx2cn47seOq+h+EN6hwCSPnH4YKPHCEWR004V9SuVkcwxrNmuBiVkPCUC6Y20pzfzKnz8RVRs9JYqU0KZzzi+Yfgzx416GrpaETkq2C3ZSGXUfiwYmpZhfdZ1fzT0mDfGImPHSN1xO8ytXiNwpJsVSOep/zf0oQrFr4YqJGqD0j2Y8fRfk8QrP79iP/YNJ28BEHNcSFZNkJicmM7tRTABgybwuANqNBPeXzJxYb3nuGQMTYx8To/m10ooJUf3QGZQnrVNMxm9gUo4Ga39/S5WzOcUUE53HxMM+yGkHALp0iRkZFx4TPxqs9UgBWDSsiBJMu+d02mCtlHJh886v+qocvWJSvu+Bm/lFgJaeMRpagcIKFaspukaPSUtdFHxe3UEPBlh5UnaOAWs27xFdXY0D/PjrAeo5X8r5JT/WdWByXCcA4Kmt+zAwmrZsRS9TJwITNaBKWPQxcVtpRRBW0BmUh1duANZ3/OMBncfEJ8VktZTGASwWj1RW3CmWan7lz8/v3LRyYXuPiTPFxIfApE8rS1UUReTobQOTjL6PiBV+KCay+TUlzK+yYjIW5ldvDdaMhtZUJieMtPx8MFNM5POvI18uHA4pmFDv3WdinCD96KYeW8WkMR4RpdilGGBFz5lwyHFgx5nd3ogjJjcglc3h8dd7pVb0haXCHK4icvNrfYH5lVflBKPzK1H9UGCSR/aYjO9UjnZhz+aYbj6KF3I5JgITfkEyLh6ApiI0xMJoknqPeFFMRoyKSZFyYVGV48Zj4lFNktvud+VTBk58K24brJU6Xdh4t8uD8fpYeGzMry5lf/5Zp7I53TkzYqIemBll+fmnvj9NzWgroZcJD0DPnDsZAPD0tn2iOsesj4miKL4Effw9yz1GnKIoipbO2diDbb0DAOwVEx4Mi6qcAvOruj/CN1ThIX5E9UNnUB45xzy+Uzn6xbHUdM76dw6hp38UjfEITp7ZBsBcMeHN1TpaVBXBD48JN0QWm5XjZi5LQgqYvARth4bT4hi3N6seBicqjFvzqxdzrghMdJ1f1efRfBpSKqesgYm7nheyyVMuvebHVA221OcSion0Pe6WepgoivaapXR/5cHOoiMmYU5HIzI5JoIGM8VE/n1JgYnBU+OW8+ep6Zy1b/Ti9Z7igQk/fwdE51f1dY3pQF5p5aSRIUHY4eoMWrlyJU4++WQ0NTWhvb0dF1xwAbZs2VL0cffddx+OOuooJBIJHHfccXjkkUc873C50HlMxrFiYkwBlBqYcLXkA0e1i4ZVZpUTPYb8fimpnAKPicMGa046UspD0byoEnyxamuIieeKOzDUij4mDsuFPSkmeY+JvvOrsSonMjZ9TFz6EWIRrcRZLgPm50J9NCwCDlHBIwUwewxdXzlt+VSOF48JV8Y6WhI4P69CcMzKhQGpl0kJFU9emqvJHDulGdPb6jCazmFgNANFAY6YbOcxCRl+Nje/umlkSBB2uApMnnzySSxbtgzPPfcc1qxZg3Q6jfPOOw9DQ0OWj3nmmWfw6U9/GldccQVefvllXHDBBbjggguwadOmknfeT2pFMTGaJpNZ714Kxpjo37BkXifqo9ygWPic3VIPE6CwXNUNslETKJ7ecGO0lAMTL6qE7C/hOGna5j6V437fcpJiEo1Y9DGJh8fUY+LGj2BWBixKhaVF2myKrmy8lmkTJcPeUzldLQksyasQgPoZWk399VMxcWt85cjpHACYNqHONsgxvhfeLyaoQ/yI6sfVGbR69WpcdtllOPbYYzF//nysWrUKO3fuxEsvvWT5mFtvvRXnn38+rr32Whx99NH49re/jfe85z34+c9/XvLO+0lGku0HxnFg4qdi8urufuw6MIJENIQz5k4W1RBmXU61UmE1vVFKKkcubQVkFaGIx8TBIhiW1AQvBlhj5Ye8f/54TPxosKaflZPLMcljoikm5fRaiWDRhR/BrC39SFofpKr/b+IxMflcAO/dXxljutk7R3U2YWZ+YKNVGkf+G6/e8cKwh66vRs6XAik74ytQ2D21LqoeXxHccsUkIEP8iOqnpNC2r68PANDW1ma5zbPPPotzzjlH97vFixfj2WeftXxMMplEf3+/7l+5yciKyXhO5fjoMeFpnDPntKM+FhGLg7nHRN9DIlbCnb+VYlLMY+KkwRrgLPViRY/kpeEUa5kPOPeYaM/lxWOi/jccUnQ+gHQuZ+j86k+/Dc76XYfwvh88jkc3dmuv6WERMxvkN5JSn0dWD8ym6HabKFmA9+6vB4fT4rvTka++4ukc2VxrxI9eJqMeBvgZOWFaqzgWdv4SoNBkW9DHxND5tdJD/Ijqx/MZlMvl8OUvfxmLFi3CvHnzLLfr6elBR0eH7ncdHR3o6emxeITqZWlpaRH/pk+f7nU3HSN7TMZ1ubBhsS2lZPiNPapx7vQjJwLQFAyzqhx+4Z+UXwh88ZjE9X1M1MZThYbVdM7dIlhKyfDbB4YBqPK4eL5I8Xk5PNAoPsTPXh2yI5s/DrJiAqjB6ZDJrJzBZEYXsHtlzeYe7Dowgr+8qn3n3XTj5ZgrJvqur4C5YrJzP/9c6nXP6TUw4WbaSY0xcS5/6qRpaEpEsOjISZaP87MqpxTFJBRS8NlFM6EowDlHd9humzC8jtb5Ve9TctubhiCs8DwTetmyZdi0aROeeuopP/cHALBixQpcc8014uf+/v6yBye6zq/jOTDJL2iKkm+TXoJiwi+uvBeEnWLCg72mvPmvlHJhvjDxO0a57Xs6y0SrbI5bo2Wdg9SLFdqkVk0e5xd2Rx6TYoFJkQokO+TOr/KxSGeZlh6LRXTGzYHRjDA1e4WrFYNSwCp6y0Q8eEykgGM4VZjKEYpJ/m+ZbA5v7jNvJOY1MJG7GHMOn9yIf1x3ru155scgv+ESPSacz7//cFz+3llFvxeFqRyj+ZX6mBD+4ikwWb58OR5++GGsW7cO06ZNs922s7MTe/bs0f1uz5496OzstHgEEI/HEY/HveyaZ+QGa+M1lZPLMaFQNMYiGEhmSgpMeGdQvpDZtQ3nfoXGvMwdC6sXt9I8JvpUDqCae43pkIzLC6bXQX7ZHMN2k0mt/MJu1RuFMe1zKTZduJifxg5uowqHFPEvm2NIZ7VUTn0sjGg4hPpYGMP5pmSlBiY9IjDRFmOtwZoLxcQkRWOW1jCaZHceGEY6y1AXDWNqq6ZkAd7LhY1mbk6xRd4P86sfqRxANcG6aTpo/NlofqWW9IRfuDqDGGNYvnw5HnzwQTz++OOYNWtW0ccsXLgQjz32mO53a9aswcKFC93taZmphZb0ctqkKR8glBSYGLpc8jJNs1QYP6Z8JLlRBnaDXNoK6FUGs46obhs/aU3W3AUm7x4cQTKTQywSwnQ5lVOsz0qOidknTqtyUhZpKzvkIX4AdCZfnh7gyoOfg/y0wEQ7LzIeBr6ZKyaFVTnGzq+8u+kR7Q0F02Yn5uflHBxOiaolJ/QYBgI6xY/AZNjQ+bjcGD0mhZ1fqSU94S+uzqBly5bhd7/7He666y40NTWhp6cHPT09GBnRZkZccsklWLFihfj56quvxurVq/GjH/0Ir7/+Om688Ua8+OKLWL58uX/vwgcyNRCYyAsjVzlK8ZjwwISb/awUE8aYOKY8IOILcCbHXC0I8vPzO2hFUWxLht1KzJonxN2x2bZX9dwcPqlBl2fngY6VmVYOzpwGJoB71UQLTHi5p3o85EWS+3b86mUiV6/ISqQXPwJfEEdMPCb1UbPARP2bmAdj0qtjQoP6PrM55ioIM3b3dUqzD3OIhOF3jAITozJT2Pk1p/svpXKIUnEVmNx+++3o6+vDmWeeia6uLvHv3nvvFdvs3LkT3d2a+/7000/HXXfdhV/+8peYP38+7r//fjz00EO2htlKUAupHL6QhUOKuKh5VUyyOSa8OHwRq7PwmIymtS6qfOGTL15ugyO5tJVjV5KbcbkIOvGEmMH9JUYfg5YaMn+f8mdQzGOiawDn0gMjd34FgFhEP5wtpGiBj19t6ftHMyJAkAN+ESy6aMZlppiMmpTOChO2ITCZ3VFYFhuPqCMSAHc+E6uGbcXwo6uuWTBWTgr7mHDzq74XDikmhF+48pg4kY7Xrl1b8LtPfvKT+OQnP+nmpcYceXFIZXNIZrKiAmK8wBeyeCRUcLfjFvnCavSYGKtPBvLeAkXRLqYxw52/VUMqM4ydXwH7JmvuFZN8isNlKsdqUmuxBmv83AspxYOnSEhBSFH9Iup7te6ZYUTr/Kr+HMsfj0P5rqcNsYjontrsQ78NQEt5AIZUjqiUKs1jYtbTQ5iw89vxz8Wqu+mEhhgGkhkcGErh8MnO9sWqYVsx/GmwVrlUjiIFr8ZBkOQxIfyCzqA8GUM6wTgyfTzAF9p4JFRS51VA8x5wsyRg7THhx7IxFhE5fmO5qhvk0lZO3Ka/B/9snV4wnTREM8NqUmux8mO+z8XSOABPW3kzwOakIX6A5hHgi6R8PP3otwFoLfoB9XgaS0vdLGJmismIiRGUB6zJjPp6mmJiHph4McD2eAxMeNqzlFJssxLpchKXXqdOav1v1ceEGqwRpUKBSR7jRWI8pnO4YpKIhksq1wVgOt5d9pjI6tqgoSIHgH6Qn8sLtFzayklYdETNSROUIw7TBsXMqmYwxrDdQjGJFwl0RHM1x4GTt5Jh0fnVsLAcyqdy5OPpl/mVN5zj8KDVbaUUUFgGDJi3Z5cDrK17BjGSziIaVjCjTd/DhOO2++vAaFqoP25TOcZSbC/IFVRjQZ2JfweQzK+GYJMarBGlQmdQHrkqB9DSD+OJpKSYGEv93MIlfnm8O18QMlJZMqAdS16Rw/Gq2phdmK0Uk7TkHXKaNnAy28bInv4kBpIZhEMKZk7SL4A8NWRlftXa0TtbaLwqJlmjYmIITOQF3S/za7eUygG0xTjtMlgELGblpAvPhVg4JJ53wzuHAACzDIZkGbe9TLi/pDkREX4Wp/BSbMB70OdXubBTZGVG/n+rIX6kmBClQoFJHtn8CoxPxWRUeEzCJadyzBQT2Ywnd381U0wAb4FJVhotrwtM+GJtUCXkaivnfUzsA5NHNnbjmw9u1P19a69akTNjYn2BN6muiJlWTBZ2OC4+7qDFvRnGwIQff+4xkc3EfplfewyBCVcaeImpK4+Jw86viqKIc+OVfGBiNw/GbWDi1V/CMQv6Hn99D5bf9Q/0DRc/3n50fnWD7DGRgyGt5J+BMeYpPUcQZtAZlMeomIzHkmG+kCWimvnVS6MuQLuoytJ0JBwSi6tcmWPsYcLxMshPVh3ku1WreTT6wMTpwm+fernlL1vw++d34k+v7Ba/sytJLdZgzemcnILn81iVEzKmckY0vxDHj+oRQO8xAbRzQfP9OL+75vskTwLW1DP9ucV/fmWXOs/rCJt5MLyB3MFht4GJu1Jhjllg8pO/bcXDG7rxt9f2WD1MwAN9t2qNV+TOr7pUDh8EmcnpPHpuxgwQhBl0BuUxNvoan4GJf4oJl6G5SZJj7CEB2AQmYl6O8zt/XpEhl7YC1oqJLpXjk8dk72ASgDbEEJCMryYGS9GttVgqx3Hg5E0xyTGj+bWwKofT7FdgYlRMeCrHQwXH4fmg7+0Dw+K9i7RGzNAELJ+W2pKf5zTbJjBpFMMBnX3n9/AeJi79JRytLb36erkcE4Ftt8GTYwY36fJREOVGVmYSJopJMpvTXT/djBkgCDMoMMmTqQHFhF/E49FQSUP0APNUDiD7ALTjJ9rRWwQmblSbIalUmFcHANblwqKHSUjRbW9HnU1DtFQmJ97PU1v3YSAfoFmVCgNaoFPcY+IwMOHv1Wsfk5C5+dVMMfHLY8INpkIx8dBgraM5jqZ4BNkcw1v71KF83PxqrFDhQRZ/z3YTdJukShkndOdVoA6PqRzjIL/u/lERyBsVJiPprHb+TSxxVIBTrBUTrY9JOqNdP92MGSAIM+gMypOuAY+Jn4qJSOUk9IEJr5yQe5nwIKXAY+IhlSOaq8X1C5FVia+XEka7cmFZ7k9lc3j89V4AUhMvEy+D03Jhp2mNuEgNuasakmflqK+nT+XIqYEWXzqUZsV5wgMDLZXjvsGaoigiJcOPt3UqRzs/QopqfrWCB8xOv/M9ouur18BEX4q9Na/qyM9txcG8WhIOKQU3BeUiLntMZF9XWBueKZ+L1PmVKBUKTPLUgmKSlBSTuIegQMY4J4dj1mtCtKP3oSrHrLkaYK2YaB1GnZ/qVn4VoNAg+ejGHuwfTIrfHz65cAEs1hfFtcekSCdZM7KSB4CXC0cLzK/mfUzczuTh8Lv/hlhYLOJ88U95UEwALSXDzcZWFSpykHVYW71tzw8emDidKu61hwnHWIrNgyygsIrJiJbGiRbM/SkX8UgIXGyUj6OcsuE3DG6USYKwggKTPMY+Jl57DASZpFT9UY6qHMC810Sxqhyj8diOYZPmaoAcmBiqcrjJ0uGiD1j3RAG0wIQHB2vf6MXGd1WD5bQJdQV37vK2xapyylkunJWCC76g8eCUxyxmikk2x0ynRTuB+yU6WhJoyitrA4Y+Jm5LS4+0UEyMgYkcZB1pU5EDaOelY8WkvzTFxJgmkwOTPUVSOQfG2F8CqEoV/06YmV8B7XOgihzCD+gsysPv4PgFbjwqJmblwl6m+wKaxN9s6THRFjO+GBmrCOJezK8W0j2vpCnoY8IXQBd3l3ZD9/gd6/xprZjeVofRdA6/eWoHAGsfQ0LaN7OBha7NrxH7QMcMOVNpTOVw5EWnLho2HfLnBjnl0WBIl2Q8NuPi5uJtvYNgjFmWzsqKmp2/BACa4uo57OQ7P5rOiuDAbXM1jmZ+LQxM9g2mbE3N/PxrGyN/CYcH13IAqKoj6v8LxYTSOIQPUGCSh9/BTajPX6TGpWJiUi5c4qycAsXEpNfEoJX5tRSPiWEhSlgs1l4Gi9mlXg7kK3ImNsawZF4XAOB/t+4DYF35oRu8Z/JeefDkvI9JaYqJNsRP/3ryYq4oSsm9THrEoLs6yWCqPhf3dLkJGAHNw/Pm3iFdutAYmMiKml1FDqApJsOprC7lZQZXNBLRkGePh6yYMMZERRentz9p+VjuMZnYONaBiXo86wznCP9e8RsG6vpK+AGdRXm45N9ar68eGE/oza+lTRfW+pgYTIfxQsVEeEx8aLBm6TERJbl+mF+tFYkD+QqWCfUxnD+vU/c3qyZeCSkAMHtOz1U5LsyvOo+JhWJiXNxL7WWieTHimsG0hKocAJjaWodENIRUNoctPZpp1C6VYzUjh9MgBTHGydhGNBWozrOXQj6u+wZT6BtJI6RoCoydz6Ryikk+MDEcZx6I8ONGqRzCD+gsysMXsAkNzmXdakOUC5foMWGMWZtfo4WKiajKieu39VYubK6YWJtf3bc+t1VMhvKKSUMMJ0xr1cn5Vk28IuGQSIuYVdK4nZUTF4GTR/OroY8Jp8Hg22kqsWRYbkQmDKYFfUzcLe6hkCKmBG/Md3WNRULiPXHkVJ/VVGFOPBIWx76YUspVoI7muKv9lmmWKp64iXd6Wz1mTKzXvYYZ/PxrG0OPCaB9v+qi+nOUf4d5p2dK5RB+QIFJHn4HV27FpLd/FLev3Y79g9ZybbngAUAiWlq58HAqKxSmgsAkrsninAGLBmte5vWIAX5Gv4pFMCHKUj2lcqzNr20NMYRCik41sfMycPPgiImR1M10Yfm5vComfA2PF3hM9Me01F4mPVIjMp4u4UGqFjC6vwTx47whbzo2mxnDU4pTW+scdUhtdNjLpFtSTLwiH9ftosy8UVT5GAcfyhyokGLC1TTjOcIDS37DQKkcwg/oLMqTHiOPyZ3PvIXvr34d/99zb5fl+e3QVeXkLyheGqzxMsdISCks08xfwIZMPCZ+pHIGLTwmxRqsuQlMZJ+M0azK26G3Nap3zB86XvWZTG+rs/Uc2LW5d53K8VAuzLu+hhSIFITxmBjTY835z8trLxPhMWlJiFJxYx+TmIcuodwzsvEdNTAxm7I7qUn9fI7uanb0nEZFx4o38j1HprR6M74C+lLsN/aogckRUmBil8oRgUmjd8XGCxMb1NczeluEYpIixYTwj7EZtlAF8GmnE8qsmHDzXO/A2Csmpp1fPSgmcqmwMc8uFJO8spHJ5kTlhB/mV6s7RqtyYS8ek7aGGEKKWka7byiJ9iZtEeKvz7tunjyzDbdf/B5Mb6s3fS4Ob5lumspx7THxYH41dH01ez1jCTb/vEaK+C7MSGVy2JdXBTtbEiIwMlbllKKYbN+rLupmism5x3Tg2xfMwxmzJzt6TqMHxox0NofHXlMb6r3P4fOaIZdib5CGDA7mA367kmHj+TdWfPNDR+PMuZPx/jn6982DW66EkceE8AMKTPJoVTlaYJLLMd+bGHFvRqmtvr0gUjmRMGJh7+ZXPgHVTCEwKiayCdayXNjFPvDOq8bAxCr9IiaeulgAI+EQJjfFsac/iZ6+UV1gwl9f7iOx5Liuos9p1xuFl0s7lcGL9UUxwywwKaaYmDXLc0rvwCgYU99TW31MnO8DSb3HxMsdNu9LwsUssym78UgYnzlthuPndNLL5Lk396NvJI1JjTGcPLPNxR7r4aXY6SzDq7v7AajBFk/hOFFMxrKPCaB2zjXrnhsLGxUTCkyI0qGzKA+/g+PmV6C4Q98LfHBXqcPRvJD0aVYOl/abTAITvpjxC9VAvjw0JhluOV72QaRSHComPGXgdgHkk2PlFuG5HMPBfFDmtlzTzrcypoqJpHAZm84ZFRPRLM+Desjv+tub4wiFFF0qhzEmTRd2fwmaMbFeZ2Y2U0zcou2f9ffykY3q0MZzj+ksMNu6QS7FzkizfMzOOZlSzr9yoaVyuMeEUjlE6VBgkof3VWiIRcRFrxzpHH7nWJHAxKdZOVZdXwFtMePSLldMjO3oAb9TOVYN1rwtgHxyrFwh0TeSFgu82zvWhE0lTUry/jjBU7kw95hIC2qB+bWg5Na7YtJtmCfDFQnG1O8VP45u+5gA6mcp372bKSZuEQ3gkubvNZtjWLNZDUyWGMrEvSB/d7paEmiMR8Sx6h1ImvZTKeX8KxcilcMVExrgR/gAnUV55DHsxgoCPxGBSQUauPG7dV2DNQ+BCQ+qmhOFwYZRMeF3oMZ29IB78ytjzDowsUhvZDyWpZoZEXkPiaZExLG6wbFVTFzPyrGfvWNGziyVE9H7TYwyfL1Jszyn9EilwoCqavCX5tOM1X3wdgmSe5P4oZgUS+X8/a0D2DeYQktdFAuPmFjy68lqI/fMTGqMIxxSkM0x4c+ROTDs/fwrFzGjxyQg+0VUN3QW5dEaPimOHfpe4BUtlfCYpEwVE/d3w3aKiXExG7Do+gpIfUwcpnKGUlmxiPMqAU7CSjHJeTNZaqWbWmBi5W9xgl0wwT8Xp6qOVdrKDq6YyKkc7jMCNG+QDP8shyxUBDtED5N8vw9F0b5XusDE4x32kVJvEj8Uk2KpnNWbeBqnwxeDZ4tJYBIOKWjPVxOZ+UwqZXy1gwci3OzuZlo0QVhBgUkenWLiwKHv9TW4klDK1FavyOXC8RI8JrapnALFpHhgkna4wB7I+0vqouGCxUgroTWYXzPePCZdJoGJlb/FCXbzd0QfE9eBSanmV+3/zYYPNoieNB4Uk369YgJADPLjAR7gvbz0yA6ty64vionNdz6XYyIw8SONA+i/O3LHYLteJvz8mxCgwIR7SobTVJVD+AedRXk0M56izfXwWTGRfSWlTG31ipbKKc1jwlUf4wA/QDNQDqeyyOWY5ZwcQPKYOAyO9vOulyYXZjHYzsL86vaC2WniMSnljtVqlg/g3vyasGi/b4dpYCK9nrHrK1CaYtJj8JgA2jmgC0w83mH7rZjwVI6ZSrr+nUPo6R9FYzyC986eVPJrAfo0qNyYzywg5gRRMaHOr0Q5oMAkj9xXQaRyfFZMjOmbsU7n6BusqR99jqHo4DIjVu3oAX3J6Ug6qykmPnhM7Lpe8sU6lcnplCjN/OrVYzIinu+ATWBUDC2YKFzk0y49JkId8qCYhBRz86udYmKm8hRD85hIgUn+HOCD6NTptN4WssMnNwjPSrkVk0c3dgMAPnBUuzBZl4peMdECkw4+L8ekl0kp51+50Myv1PmV8A/qY5InJfVVMI5oL0Y6m8O6N/bqAo2prXU49XC9Sc4sMJnS6r21tVv4QiaXCwPqYs7vOkfTWazd0qtTcw6f3IgTpreKn8UAv0RhYJKIhqAoavXFUCpj6zFx28fEboCZXNGSzOREIOB1WBxfIEbTOfSPZNBSH5Ve333XTXF8Td6rW/OrqEDy0PnVm2LiLkDP5ZgoF5ZnCTUIxSTfObiEu+tENIzpbfV4e/+waedXt1ippIwxPOpzGgfQ1MaJDTFdasZeMVGPm5fzr1yI6cLcY0KBCeEDFJjkyUgekyaXVTkPvvwuvnr/hoLfr/7y+3BUp9YS21iJM5Ylw9kcE+pBQjK/AvrA5DdP7cAP/7JF99hwSMH/fvUsEUTxXixmiomiKGiIRTCYzGA4mdUG+NkpJg5TOQdtpGz5TjaZlgITnspxmTJIRMNoa4jhwFAK3f0jaKmPitdva7BuPW/5fA5SOcbyXet9U7cbSWfBGHOkOvBDbNVgzVQxMfiFnLJ/KIVMjkFRgMlN2iLaJMyvqYLX98Ls9ia8vX/Y0SycYjRYKCbb9w7hnYMjSERDOGOu926vRvgQPuPkY7teJppi4v78Kxf8O8wVE0rlEH5A4W0eeaaKW/Nr9yEtn/7+OZPFgr1j75Buu0qmcmTZPx4N6XL7yaz2t10HhgGonR75e8nmGDblB6YB9uZXQLrTTmXEMTTvY+Ku+6zoemkSmETDipD25feqKWHuT/UOwxj6UhQTPivHbIifW48JPz8zOea4Mse0Jb10TEyrcuLa5+jGqM0Dj+ZEVBd8NBoUk1IDk+UfOBKfPHEaPuSg824xrL7z3XkT6oy2BtPgzSvnHduBT500Df96zhzd74ViYpLKKeX8KxdyShggxYTwBzqL8vAGa5GQgsa4uuA69ZjwluKLj+3Eby8/RbSrPiCZ/IDCQGQse5nIsn88EoaiKKYeD/6eL104A7+9/BSclb9L3JafSwLI5lfzC7VWzZG1nCwMuPeY2KVyFEUxbbIml4G7xSirl2R+jdqkcjwEJjy+cBrcmnV+lQfo1Zt8PnwhZsxdzxSrwFV4TIY1j0kpnDC9FT/85Hy0S+kirzRZTBcu1zTf1voYfvCJ+QXp3k4pGDYGg0E2v3LcerkIwgwKTPLoFBOXVTk8RcK/pPzCwctbOcbUzVgqJnx4XDSsiLtmnjqQAwMt9aIuKrxiYFt+Cqpc8mylmHAz4lAyo1XlmPhR+EXM6V1/sQuzmSmUp+i8mPKMvUxKWaTs5tu49ZgoiiI8Ck7TgWadX+U+Jsaur4DeVOpmPAMPXAsCE0MfkyDdXfObkcFRvTpUrsDEivZ835dUJieUpUrtixOMgUiQPlOieqGzCNDN7oiElYIR7cXQGmSpX9K2/BwLfofPqWRgwhUT2Yth5vEwlvfygWlcMZH3uckk2AA0I+VISqrKMTFXuvWYFLswi5Jh6e7ea4M1QGpLn797LWVhqLPp/Oq2jwmgLfpOzyGt86v2u2gRxSQcUsR+D7soGRbmaIOi1mRUTAJ0d81vRozpsbEOBuKRMCblrx+yz6TU869cGAMRGuJH+AGdRdAUD0DtROm2KiclGWcBSTEZMk/l8JvWsTS/yqXCHLNUivCEJHhgkldMegeRyzGxz03xiOUgM3nGihboFAYxbqtyhGJiMcBMlOSaKCZeFsGOFq10cziVFcewtM6vpXtMAPeBiekQvyIeE0CafeRGMbEwRwdZMamPhsEPjdzLxC59WC6EUtevNVkr9fwrFwWDOQMUbBLVS3CuDBWEV24A6l2kaLbkUDFJGxYWPmDLGJhwiburhVe3jL35NRE1UUxkj4lBMZkxsR7RsILhVBa7+0aku2HrygAxlVYyv5pW5Xg1v1oMMBMdUWXFxGMfE0DzmOzpGxWvHY+EPJWnWg3xY4y5TuUAWqk2P6eKYZrKkV7PTDEBCmcfOcGqnNxY+VKqx8RPQiEFjTyglr73PB07ltN8Ow2mawAln3/lwqjykWJC+AGdRdArJnKDNaflwmmDj4GncqwUk+ltdbqfx4LRtIliYuIx4YsGX0Si4RBmTlQnuW7rHRSGXbvARCgmyayjlvROUjnJjPZcxjk5HJ6mGpUUE/7ZeErlSE3W9kv+Fi9Nwfgsn4IhgzkGbmmIh50vOEIxGXanmERcVOUA3gb5FTO/coKkmADmJcMHSpiP5JVOKSDmlHr+lYtC82uwPlOiOnF9Fq1btw5Lly7FlClToCgKHnrooaKP+f3vf4/58+ejvr4eXV1duPzyy7F//34v+1sWMtLCqGtJ77gqR3/Ha5XK4RL39An16s8O73b9gCsmMZNUDh+ixxgrSOUAWq+Fbb2D0qJjXTrZIDXmMns+4+tnc6xo91l+LCMhxbIaKBEtVEyEqdnD1FNeLtw/msE7B9Uy6jaPd86JWGHQBOiDQleKSf4Y9I04O0fNOr8W62MCaIu1m7b0/RaqmrFkPEgeE8C8Lb3wdViodOWAK6qyYiJ66IyhcuMEYyBCVTmEH7i+Wg8NDWH+/Pm47bbbHG3/9NNP45JLLsEVV1yBV199Fffddx9eeOEFfO5zn3O9s+UiI/V4kKegOvaYZLSKHkC7uzowlNI5/DXFpF7381ggzK82qZzRdE4sYLLCweeS6AMTG8Uk/9j9QynT5zO+PqApG1bIPUys7hhNy4U9NlgDVHMv3+/Nu/vV1/e4QHHFZCSlf59yYOLmoi6qchwGt2adX8MhrULLrPMrUJpiYgxMChQTj5OFy4VZL5MDFQgIOkzmNO0vksasFIWBSbA+U6I6cd0xaMmSJViyZInj7Z999lnMnDkTX/rSlwAAs2bNwr/8y7/g+9//vtuXLht8ceAyN7+AprI5JDPZovMxCs2vcfH7wWRGVK8YUzn9Du92OaPpLHKMeWr0xO/UEzapHH5BVhTo8th8kuvW3kERVJm1o+dwxaQ3f2E1Pp/x9QF9G3kznNy5alU5cirHW0t6TmdLAtt6B7G5Ww1MvPaQSFhMP+bnTkhxt4+eza8hY3mngmzO+pzSmuW5UEyKlAuL144E6+5aU0rV/c/mmKggGstUjpZClD0matfXIPUwAQpVviD5hojqpezh7cKFC7Fr1y488sgjYIxhz549uP/++/HBD37Q8jHJZBL9/f26f+VEmyysHg55EJ0T1cRofq2LhcVCdDA/3yKXYxjIX7AP86CY5HIMH7z1f3H2j570NBHYTjHhaoXsB5FVCVkxsRvgx6nLH7/eAfVi2hiLmKocskJQ7D05KZXUqnJk86u+lNst3Ij4al4x8dp1U2uwZp7KcZPGASTzq8vAJGT4HHhwaGWoFG3pXczL6bOoymkyVGZ58f2UE6NSemg4Jfw/Y6lUiAGSh0ZEmXcQu74ChVU4bs9jgjCj7GfRokWL8Pvf/x4XXnghYrEYOjs70dLSYpsKWrlyJVpaWsS/6dOnl3UfM4bFKxxSxIXaic9EM79qX1KumuzP3+kMpjKibTP3mIyks666nr65bwjdfaPYN5h09BgZs3JhY7kuvyAbvQB8kmvfSBrbetV+JrZVOVwxGVDv+MwqcgDou88WSeXsHywuqYuqHF25sPc+JoC2SOzNB1leqzN4P5B0luk8TSmPDeC8KibGO9r/c8phOP2IiaIs3IjWlt6Dx8TwuRvTRUHzIxinih8UrfUjY5qiOKytHs2JCIZSWby08yCAylQHOYHMr0Q5KPtZtHnzZlx99dW4/vrr8dJLL2H16tV46623cOWVV1o+ZsWKFejr6xP/du3aVdZ9NJP7xUXKgWJiVu7ZZjDA8uqJWCSEiY3aXY9Tj4DcbMmLN4WnN0zLhfP7P5CXsI2BRCIaFirPP/IXSiceE76Ym/lLOGbdZ83gi4SdlB03Kcnlowa8LoJcVud49phIx11uS68pJu5KQJvdBiYm5cIA8I0PHo27Pnea5YLCFZMRV31MzFW1SDik6yYbOMXEMLxzvwgGxlaliIZDOOfoDgDAoxvVycb8/A+6x4RSOYQflP3KsHLlSixatAjXXnstjj/+eCxevBi/+MUvcMcdd6C7u9v0MfF4HM3Nzbp/5cTMIGm8SNmhdX4tDEy4BCvn3cMhrfLH6cIiG+G8BCZmiknU6DEZ1ZcKy/A7at4m2y4w4YoJV4isFBPA+bwcJ42uNPNroWLi9U6u0xCYePUayMdd9sCkTD4XJ/Dj7yRwBqTOry5LTeVmeU7I5phQHMzOEfncClxVjiGVU8lOq+fP6wQA/OXVHjDGKtLozQkF5ldK5RA+UPazaHh4GCHDnVE436/BzcTScpI2mUDrpi290fwKaBcQXuZnrGZxK8X39GldIL00ZuOLtVkfk2Sm0GNihLem51iV7AKFpad2ionTwIRL2baBiUm5cLqEzq+A5jHheJXSQyEtbaULTDw0VwP8M78WQzTLc+gxGZAUQLN0n1w27mV+UTkxpnIqGQy8f85k1MfCePfQCDa801e063GlKEjlBEwFI6oT12fR4OAg1q9fj/Xr1wMAduzYgfXr12Pnzp0A1DTMJZdcIrZfunQpHnjgAdx+++1488038fTTT+NLX/oSTjnlFEyZMsWfd1EiaZMJtI0uepmkHaRyjHl3t+bF7pJTOeo+2nV+tes5YvQg2ComBi+Bo8Aka39H7uTu1e8Ga4B/igmgVUTJqSaRynG5SDdL52fGQYM63kPQmMophlvFhJ+b9bGwqUrVGGTFxDC882AFephwEtEwzjqqHQDw6KYeR4F5JTCet0HzDRHVieur9YsvvogFCxZgwYIFAIBrrrkGCxYswPXXXw8A6O7uFkEKAFx22WX48Y9/jJ///OeYN28ePvnJT2Lu3Ll44IEHfHoLpcPl/phHj0k6U/j4glSOoVLBtWIipXL6Hcr3MqaKicFjYqeYzDYEJnblwq4UE4NqYwU3EdsHJiYN1kTFlVePSZ3u51LKNet4kzWTVI7rqhwpMHRyjmZz+pJ4p8jjBZxg1Y6eow9MgnV3bexjsr/CTc2W5NM5//PKbqHiBK1cmIb4EeXAdUOMM8880zYFs2rVqoLfXXXVVbjqqqvcvtSYwQ2S8h2cWXtqK8zkeGP3V2PTKZ4KcRpklGp+1TwmNoqJzcC9I0pRTGw8JkafixXc22LVjh4wLxcu1WMyoT6KWCSEVCaHcEixDciKYTbIL+kxMImGQ2iIhTGUyqJvJI0JRRYsLqoYy4WLwc2qTju/Wg3w48jngpemd+XE2PH5wFBxw3U5OWtuO+KREN49pKZxSz3/ykHhED8KTIjSobMI5iWlTS66v6ZtzK8HTMyv8n+dpnLkwMSTx0SkcqRyYYsGa2aBRGM8gilSWsN2Vk7UMO6+RI+J3OhqQoP165o3WCvNY6IoivCZTKiPuk6FyGjzckovFwbcdX/VOr+6ew0eoLtVTKwCk6ZAKybqPgfB/Aqox/79cyaLn0s9/8qBUYkMWnqOqE6CdWWoEGZNuNx4TJImjy8oFy7B/MoY03lMvAQmo3apnCJ9TDhcNYlFQrZdWutizhUTJ31MnDa60vqYmKVyvJ/q3GdS6gKlTRguPZUDuDuHvJpfRedXp4rJKFcGzT9z+VwI2iJmmcqpYPqEp3MqvR9WUB8TohzQWQRzg6S4eyoSmDDGHJlfjbl3N+bX/tEMRqTFzFMqx25WjuhjYl0uDACz85U5dmkc/rx6v05xlcNOMeHHsKUuanvh01I5kmKS8eatkOGKSemBSX5ejk+BiXYOOfGYmHd+LQY/F0bS7syvVoqafG4FTfY3BiYHAxCYnH10h7jhCWRgQuZXogwE68pQIcwm0Bod+lZkpbH18peUeyEGkxkkM9mCplMt9c7vduU0DuBtKjFfrOVUTsGsnFHrVA6gVeYYO3qaUS/5TGwVk7C+Lb4ZTnP9ZoqJ1mDN+6ne5ZtiYmZ+zU99LiGV4+Qc4qkctwFavTQp2gnFUjk682vASkv5eTqcyiKTzVU8lQOox/H0IyZVfD+soCF+RDmgswjmDdYaxPAy+wuynIKQ73qb6yJCNj84lC64k3Qjw3dLPUycPsaIUEx05le9WVSUC1soJouOnIhENIRTZrUVfT153lCjxeRadR+cKybFDJ58WOKhYe34lGp+BYDTDp+IkAKcOmui5+cApFROxsRjUuZUDk9pufUo8M8xmck5KkvuL1KV0xTgVI5s2t4zkBSfjZ3heiy4bNFMhEMKFh05qaL7YUbBEL+AfaZEdeJ+TO04xKyPiUgLpO0vxrxUGNAvfoqiYEJ9DPsGk9g/lJQCE0MfEwfqx558qfCkxjj2DSY9VuVYe0y4WjFkY34FgBkTG/DydefpVBcr5KFwdqmcmInKYcRprp8rGz19o6JyjC/IpVwwzzqqHa/edH6Bd8Yt2jllksrxpJjwyi4HionXzq/SYj2czqK5yH66UUyCJvvHI2FRgfX2/iEAalVSqZ97qZw1tx2vf/v8QKoRRgUuaOk5ojqhswjmnV/54lssty4rJsYvqVwyzMuCW0S5sBvFRA1M5naqqRQnngIjfOG3a7A2YNPHhFMXC5tOCjZSH5cVk+KpHDvzq9NUDjepjqSz6B/JiKAEKL0jpR+Lk1aVI3d+zffACaj5NRYOifN62IEB1nieGwlyKgfQ1MJdB4YBBCd9EsSgBNAP4gSCu59EdUFnESS5X7pomy0iZshSvHHBlg2wRvNrS/5ut2/YucdkTodqPnUzlZjD34e+Jb2iew9aH5PShbQGaSE36yQr9sFFKqfYIpGIhtGa9+709I/qfCtBkJh5cOOX+dVVYGIxxK8YiqKI/S6W1pT3xcr8qutjEsC5Knz/dgYsMAkyskoShO8ZUf0E78pQAcwMkgnepTNjH5ikbaR43jFy96FRsQBx0yu/cA8kM0Jmt4J3feWBCeDeAFuswVommxMLph+BiT6VMzaBCaBV0HT3jYgUHRCMC6bZ9GN/qnKcp3K8VCdxn4kjxaRoHxPt90FrsAZo5+rb+ykwcYqckqNZOYQf0FkEqcGaHJiYNMMyI2XSw4TDUw9v7VPz1SEFaIzpPSaMaSkUK7hiMqW1zvVUYo6WypEVE/U9pjI5XZ8Kq3JhN8ht6e2ez+/ARPaZyGbNIFwwzVM53qty3DTpy3gsFwY0n4kTxaR/xE0fk8p/Jkb4ucoVk6C1gA8i/DscDimBawBHVCfBuzJUgIxJcCEqKIoML7O74+XNwHbkjXRNCa1zYyIaFmmVYgsL95h0tSREQOM2MNFSOeZ9TAaS6fzfQ57u3o3wCodYkeeLO/CYuGl0xX0mPf2jYjEOygVTKxf2STERnV+LBwxa51fvislIke8CY6yo+VWufAma+RXQPCZcMSlWCUZoSnMQP0+iOqHABJoBUTbjiUWkWConW5gG4vAR5TvyionxYu3EIzCSn4UCqIuu21b2HJHKkRUTSa2wmyzsBa6Y2LWjN+6DFQfyA/yclG12NqtD93r6RqXGecG4YNbZdH6NB9T8Cki9TIooJiPprAgGnaRygmh+bTQokpTKKQ5X+4KgShLjAzqTYKWYqBfjdJaJi7oZdne8/KK2d0BdWK0CE7sgg/tL6mNhNMUjrqcSA+r74+9Bb37VSnWHHFTkuIGbX+2aqwHFAxPGGA4O5RcJB1NeeSqnu29UeEyCUilg2mDNpz4mdoM1Ae9D/ABpXk4Rjwk/JyMhRQz/M5KIhkRwFMQ7bOP5T6mc4vBzN4hmZqI6oTMJ5vNU5AurXWUOX/xMza+Gi5ox7+5kCBtvrtbZkoCiKK6nEgP6HiHm5cJZDBTp+uoWXi5cLNARwZFFKmcwmRGLd5vNnBxOh4nHJAjGV8BchfOjj0k2xzBcJM3itfMr4FwxkdM4ViXliqKIcyIoAaOM8fwnxaQ4/HMMijJJVD/BuzJUALMJtLKyYNfLhJsXzS6yxoual1ROj+QvkR/jJpUjB1byAhiXPCaD5VJMijxftIhiwo2vThtddUkek7RJiq6SJEyqcpIleEzqomGhOhRT0Lx2fgWkqhwp+Nk/mMSyu/6B/926V/yO99cpNkuJnxNBCRhljKnHiQ5UulpHKCYBDDSJ6oTOJJi3LQ+FtMZBdopJKmPdIKtAMTG06W52UGHDUzkd+TJYL+bXpHRXLi9MOo+Jjz1MAOCwiQ0AgJn5/1phnNdjZPPufgBAV2vC0ety82vfSBoDeSUqFpAFkKdE5KCyFPOroiiOOwhrnV9dv4zWx0SqHnt0Uw/+vKEbP3tsm/gdPyebigQmsyap58SU1jr3O1NmjOe/3TRrQoUHx0FMzRHVCbWkh9bHxChFJvLtqe1KhtM25cLGi5q1x8RaIvdDMTEzvgJaUJBjwKH88/kVmLx/9iQ8tGwR5nQ02m5nbItv5NFNPQCAs49qd/S6TfEI6mNhDKey2HVQTYMFpSyV91jhwSYgeUw87mNLXRT7h1JFG/WVYn7llTSyYsJTjFt7B8TvilXkcH5+0QLsPjSKIybbnxuVwFjaXuk5OdWAVpUTjO8ZUf3QmQR5Vo7+cPA7RXvFhN/xFqYZouGQ7iJt7IbpJJXDS4U7W9S7SzdTiTlmpcLqPmvvl6dM/PKYKIqCE6a36vqZmBG3SeUkM1k8/novAOD8eV2OX5erJryteFBSBny/BkYzQn3g79urcdDpaAOvnV8BrcJKVkx6+lRD98HhNPYPqv+vDfCz/8xb62M4Zkqz6/0YC3RDBkOKZT8WQoN/h4NyA0BUP3QmQavKMUr+YuiaTclw2uKxHNnVbwxMnJhf+QC/Lh9SOcaSVNPAxGbgXjmQe6kYeWrrPgwmM+hojmPB9FbHz8nVpV0H1cAkKGWMTYmoUKS4aiLKhT1e1J32Mimt82teMZEC9J5+beL11t5BAM4VkyAjn/8TGmKO5kLVOlwpCUrKlKh+gnHFrjBWignv1DmSsk7lpGz6mAD6Bk3GC7aTu11NMTGkcly0pOfTbI1TgSMhBfy6ezAfmPjVx8QpcvdZIzyNs2Rel6s7fd7L5B2RygnOBbOjWU0N8BRduoRyYcB5L5PSOr/ycmEt+OHnJQBsywcm/Jys6sBEOv+dVIERZH4l/IfOJAAZK4+JSUMsI8XMi202gUmxRSWVyWFfXibngQmXll2lckzm5ABq2oNfTPYLxWSMAxOLVE46m8OazXsAAOfP63T1nJ0t6uL/Tj6VE6QLZleL1gAOKM38CmjDIIt5jvzo/DqU95gwxsT+A1pgUmyAXzUgn/9UKuwMUS4coBsAoroJzhW7gphV5QDOur8Wa+KlS+UY1IhiQ9h6B0bBmGqM5HdvTgyzRrhiYjS/AloK4UCFA5OkITB5dvt+9I2kMakxhpNntrl6Tu7H6c6nS4JULSC3zAdKa7AGOE/tldT5VZhf1XNuIJnRGWGFYjIOUjmyYuikoR9B5lfCf+hMgrY4GCN+s9kmBY/1RTExDzK4v6S9OS5SGbIvpdhUYo4Y4Gdi0OX7zQMTPwb4uUGUCxs8JjyNc+4xna4XU+7H4c1Qg9LHBNBPPwb0pdxecFqllS0hlWOcLiyrJYBWmeO0j0mQkQNz6vrqjDilcgifoTMJckt6o2Ki/mzXYC1dpNzTLjBplmR4s5bi3YZSYcDdVGLOqI1iwgMTv2flOMUslZPNMazZzP0l7tI4gKZKcIIkMXdKnWmB0lM5TgzUgL+dX8W06/x72dOfRP9oWkvlJKo3MKmPhYXvilI5zqA+JoTfUGACuSW9RVWOg8DE6kvZZlOVwwOVVDZXkMoAtAWApyb4PjmdSsyxqsoBChfEsU7lmJUL//2tA9g3mEJLXRQLj5jo+jmNgUmQ7uTkzrSMsZJTOa7NryUEJkbFZHZHkzDzbusdHBfmV0VR0JhXiCgwcUaMyoUJn6EzCbBsXV5nMnTNSLGW4vziVh8LFyyQjfEI+DphtrAYm6tx3A7yE6kck8FqRqXHrz4mTjErF16dT+Occ3SHp6CirT6me19BupOTFZNMjol0UzxcvN2+GU7PBa3zq/chfkOpDBhjOiXvyHa1Sdq23sFxUS4MaN8BCkycoZUL03JC+AOdSYDlsDcnHpN0kXLhaRNUtYN7C2QURREX8UMmnTu5QbK9Sd990ql8z+FlnnYeE45xVki54RezbE6b4rwuP39l8bEdnp4zFFLQ0aIdsyDdyfHzYN9gStewrFTzazEzdLaEqhyumOSYGuTy87KzJYHZ7U0AgNe6+4UhttqbkvHv29QAtswPIhMMxnyCKJXqvoL4hFVwEXfgMRGdOy0WvyPbm/AfF87HrEnm7bcnNMRwcDgtzKcy/HeTGvWBidu29HsG+LydwvbaBamcCikmgHosQyHgrX1DAIATXDRVM9LZnMCuA6rBNBqgqadtDaqak8rmxP4B5U/lcEGqlM6vgNqWvidv3O1qSWBi/tz8x9sHxTZNVewxAYCVHz8em97tK+n8qyX+6cRpCCnA4mPd+8EIwgwKTGBd8ssVBrtUDg9qzPwbnI8tmGb5t4kNMby5dwgHh60Dk8JhgO56mfCUUEdLoWojy68hRUtfjRXGwKS7fwQ5pppwJzd5n1Oi+nLUxTJIiglvmb/zwDDePqAGYOGQ4knJALTAZCSdRSqTswxwSun8Gg4pSERDGE3nMJTMiFROR3NCqIqv5octNsUjnt9LUDhmSnNgW+YHkcZ4BJ9ZOLPSu0GMI4Jzxa4gVg3WtFk5dqkc+z4mxeAy6H4TxWS/RWDitpcJl96NXhVAHxg0xCNj3oJbPuapbE70xJjd3ljSvsjvNUgeE0BL5+zMN4ArJTcvK1x2qT1+jnspFwakkuFUVjqf6oTHhJtrq7m5GkEQwYACE9g0WMsv2nYN1oqZX4sxMd/E6cCgPjBhjIk28RMbzQMTt4oJb9UuIys9Y+0vAVQFQTbAbt2jBiZ8wfNKR7McmATrNOcG2J3784GJx3MHUNWMJgcKGm9541XN4E3W9g8lhR+qsyWBiQ0xTKjXghHyGRAEUSrBumJXiHQR86uzcmFvh5KrIQeGkrrf949mxF3oBMPMDjfm12Qmi335oMdYRgvoF8Wx9pdwePfZVCaHbXu5YtJU0nPKikmQGqwB2r69vd+flvnFOggDcudXb69RH1XPjTf3qumnumgYzQlVYZM/q2o3vhIEUXmCdcWuECIdY1jAeGDizPzq7U60rUH1URhTOaITayxcUObrRjHp7VcDnlgkpLuz5ciL4lj3MOHITda2+aSYdAY4ldNhSOXY+ZOc4OR8KKXzK6ApJtvzgWNXS0Kk2o6QPitSTAiCKBUKTKDl36MRqyF+xcuFvS4ubQ3qhdxofuUKitm8DidTiTmyv8TMsyH7GxorVE3BA5PhVAZv7vMnMNEpJgELTPi+8bb0paRyAHnitLXnSOv86u21uMeEKyZy4DebAhOCIHzE9VVq3bp1WLp0KaZMmQJFUfDQQw8VfUwymcQ3v/lNzJgxA/F4HDNnzsQdd9zhZX99hzFm2WAt4aDBWumpnLxiYvCY8J/532WcSPecbuEvKUzjAPpFsRIeE3kftvUOIp1lqIuGS+4hMbkxLprXBS2Vwxd17vsotTGVk4nTWudXb6/Be5nwwFE+n+Qgsprb0RMEEQxcr0RDQ0OYP38+Lr/8cnz84x939JhPfepT2LNnD37zm9/gyCOPRHd3N3I5axViLMlKg/CsWtKX0vm1GBOFx0QfmHAFxWyQmJtUDu85YeYvAYxVOWNbKiz2Ib8wb+5WS06PaG/w1G9DJhIOYXJTHHv6kyUrEn7T1aIPunxTTOzMryVMFwa07q/vHCw8n2Z3kGJCEIR/uA5MlixZgiVLljjefvXq1XjyySfx5ptvoq1NHV8/c+ZMty9bNjJSYGLsd+FH59dicPPrweEUGGMi3cI9J0bjKyDfIRcvF+7pU1NCTgKTxnhlUzm8F0apxldOZ0sd9vQnPfXuKCeTGmMIKZJiUmJg4sj8yry3pAc0xYS30JdTZZ3NCTTGIxhMZqhcmCCIkin7reSf/vQnnHTSSfjBD36AqVOnYs6cOfjKV76CkZERy8ckk0n09/fr/pULeUaLcQHTPCY25lefApN0luk8Arx82FgqDMieAicek3yXTotUTlznMalsKue1fGBSqr+EM7VVfc9mM4IqSSQcQnuT9nmUmsppzZuazboHc7IlDPEDNMWEI5djK4oiPrMJNF+GIIgSKftK9Oabb+Kpp55CIpHAgw8+iH379uGLX/wi9u/fjzvvvNP0MStXrsRNN91U7l0DoPUwAQqDCydD/NIZ9fFeza+JaBj1sTCGU1kcHEqJoMOq6ysgTSXO5DCaztouvMJj4kAxqZjHJH/cB/KzY/wKTP7l/UegIRbBkuOC1yq7syUhjMmlKibT2+oBAG/tH7LcppTOr4CmmHCM6aivnDcX//PKbpw1d7Kn5ycIguCUfSXK5XJQFAW///3v0dLSAgD48Y9/jE984hP4xS9+gbq6QpPjihUrcM0114if+/v7MX369LLsHx/gF1IK8+8ilZPJ6dIsMqWmcgA1+BhOjWD/UAozJzUAAA4MWwcmDbGISAX0j6RtA5M9IjAxN5MGoY+JcWGe7VNgMn96K+YHdN6JbB4tNTDhgdzW3kHL8zRTarmwITAxBrrvnT0J7509ydNzEwRByJQ9ldPV1YWpU6eKoAQAjj76aDDG8M4775g+Jh6Po7m5WfevXKT5naRJYMFn5WRzWuWOkVSJ5lfA3ADL/9/M/BoKKY5KhrM5hj0DqsfErB09YCgXrpBiIqtNsXAIh+UVgPGMvLCXGpgcMbkRiqJOqDYbbQBo5cKeO79Kg/yiYcX0vCQIgvCDsgcmixYtwu7duzE4OCh+98YbbyAUCmHaNOvhdmMFV0zMJtDy6cKAdVt6zWPi3WBp1v2Vlwtb5ey54dEuMNk3mEQ2xxAOKQUTijmxiHYnXOkGawAwa1JDoIbulQs5UIyX+H4T0TCmT1CDOd7S30i25Koc7Txpb0qUXDVFEARhhesr4uDgINavX4/169cDAHbs2IH169dj586dANQ0zCWXXCK2v+iiizBx4kR89rOfxebNm7Fu3Tpce+21uPzyy03TOGON1o6+8FDEIyFw5dvMZ8IYE4FJKQbGCSIw0YIMO8UEcGaA5f6S9qa45YIUiFSOdOz88pcEHT8VE0BLf/GW/jKMMVEB5D2Vo50bVuobQRCEH7i+Ir744otYsGABFixYAAC45pprsGDBAlx//fUAgO7ubhGkAEBjYyPWrFmDQ4cO4aSTTsLFF1+MpUuX4qc//alPb6E0tOnAhRdsRVFEOidpUjKczTFRPulPKkdVTEZSWdEG38xjAjjrZdJTxPgKGMuFK6+Y1Exg4qPHBNCO27Y9AwV/kyrivSsmUmDSQYEJQRBlxPVKdOaZZ4Ixc78FAKxatargd0cddRTWrFnj9qXGBKvJwpxENISRdNZUMUnbVPS4wTgvhxtfY+GQZbDAe5n02/Qy4c3V7O5wg+AxidagYiJXtZRaLgxIgYmJYpKRmhmWOl0YsC49JwiC8IPxn8wvQjpnPlmYYzfIjxtfAX/NrwcGtYocswoLwJli0p0vR+2wWUhi0nygpgBU5chdRMcz7c2a5yfqo2Ji5jGRmyz7oZjYKXAEQRClUvOBScZisjCnzqb7q11zNje0GQKT/fmUjl2zKidVObxU2F4x0e6EjU20xgoemIQU1fxaCySiYfG5+6mY9A4kC86JrKRwltr5FaDAhCCI8kKBSdZeMYnbNFlLS8ZXK2XDCRMMgYndnByOkzbk3UV6mABaUJCIhkpKR5UCr0qZMbEB8UiwurSWE+4z8cNj0pSIiufb1qtXTeR5UF6H+MlBK5lfCYIoJzUfmHDVw2oCrV1bej96mACFqZz9g9bN1TjGDrFm8M6iVpOFAa2HSKX8JYAW/NWKv4TDF3ivXYON8DTYdkNgksv5q5jYpQYJgiBKpXKrUUAQqRyLxYFX5Zh5TNI+9DABgLb8PJzhlGqytWtHzzm6Sx1098JbB5DK5AqCI8aYqMqxu8Od29mEU2a24ZRZbSW9h1I4c+5k/M8ru3HhSeXp7htU/unEaXj30AjOmONPG/cjJjfif7fuw9ZefWWOPKjSq8ckEQ3j4++ZiuFkFlNbK1/mTxDE+IUCk5x1gzVAU0zMyoWTmdLb0QPqjJpoWEE6y7B/KOUoMFkwfQLam+LoHUji6e37cNbcdt3fDw2nxf7JRksjiWgYf7hyYUn7XyrHTmnB6i+/v6L7UAk+eFwXPnhcl2/PxxWTrUbFhPF29Cgp5fjjT53g+bEEQRBOqflUDi/5tfKY1MX4vBwbj0mJUryiKJhQrwYhB4dSomzYLjAJhRQsPlYdTrd6Y0/B37m/ZFJjrKZ8G7XMkZPzJcMWHhOvaglBEMRYUvOBiVBMrPqYROzMr+oF34+qCh6E7B9K4WCRrq+cJfPUwOSvm3uEiZfT06/2MCE/QO0wu0NN771zcATDKa2/TbbEAX4EQRBjSc0HJumM/Th4bswcSZmUC/tkfgWAiY1a91cnqRwAOGVWG9oaYjg4nMbzOw7o/tbTZz+8jxh/tDXERDD75t4h8ftSB/gRBEGMJRSYFFNMeFWOTSrHjzJb0f11UEvl8GDFikg4hPOO6QAAPLqpW/c33vWVek7UFkfwRmuSATZDqRyCIKqImg9MirWkr7PpY6KZX0u/4PM73b1SgyzuO7Hj/Hw65y+v7tGVhYoeJpTKqSnEMD/JZ5KjwIQgiCqi5gOTdJEGawmbzq9+mV8BLQjZnpfgFQVodRCYnH7EJDQlItg7kMRLOw+K34seJjbN1Yjxh1lret751WsPE4IgiLGk5gMTLnMXa7CWtO1j4kMqp5EHJuqCMqE+5ugONxYJ4dyj8+kcqTrHSQ8TYvwxu101wMqKiTC/kmJCEEQVUPOBSbpIOsbJED8/OnfyVM7b+1XFpJjxVYanc1Zv6haTn3lgQlU5tQVXTN4+MIxk3hfFh/iRYkIQRDVQ8w3W0jl7j0nCwawcf8yvaiDCbSJuApP3z5mM+lgYu/tGMfe61VCg+V/I/FpbdDTH0RSPYCCZwVv7hjG3s0mUxJPHhCCIaqDmFZNiQ/zsPCZ+dX4FCgORNgf+Ek4iGsYnTpwGQFVx+H7Nn95a0Rk4xNijKAqmTlB9RXvyPiMqFyYIopqo+VUrU0wxidiVC+cbrPmQyikITIqUChu56SPH4gtnHqGbJNtFxteahE8CHk6p5yzvvUeBCUEQ1UDNByaiKsdyVg5vsFbeVM6E+hgUBcjf3Bbt+mpEURQKRAgA2iRg3v1V6/xasV0iCIJwTM2ncrRyYYs+JvmLPE+PyPhpfg2HFLTWRcXPbjwmBCHTEFPvN4bywTSlcgiCqCZqPjDJiHk3FoqJ7awc/xqsAfpghAITwiv18bxiklQVE63za81/3QmCqAJq/kqlTRcu0pLerFzYx1QOQIEJ4Q9cMeEeE63za8V2iSAIwjE1f6nipZRFPSY2fUz8ML8CFJgQ/mDlMaE+JgRBVAMUmBSZlSOXC/PmZRw/za+ANsgPACZK/08Qbqg3eEx4S3rq/EoQRDVQ84FJqmgfE+0QGQ2wfppfAX0lzoSGqM2WBGFNg8FjkiPFhCCIKqLmA5NMEdWDKyYAkDQ0WUsXUVvcMiEfmDTGI4hHwkW2JghzjIpJhqYLEwRRRVBgIhqsmV+0o+GQuKAbm6z5bX7lign5S4hSEIpJ3mNC5cIEQVQTNR+YaA3WrA8F7/5qbLLmt/mVD2Cbnf8vQXhBKCZJ3vmVAhOCIKqHmu/8qplfrS/adbEwhlLZAsXE7z4m86a24OGr3ovpbfW+PB9RmzRYdn6lwIQgiOBT84GJE8UkHjEf5CcUEx8bRMyb2uLbcxG1Sb1hVg6lcgiCqCYolcMVE5t0jFWTNR7U+JXKIQg/0BQTMr8SBFF91PyKyhusRW0u2lZN1lI+V+UQhB/w+U5DVC5MEEQVUvMraqZIS3oAqMsHJkljYJL3nJBiQgQJ3pI+mckhk82R+ZUgiKqi5lfUdM6+wRqg7/6qeywpJkQA4UP8AGA4nUX+NKXOrwRBVAWuV9R169Zh6dKlmDJlChRFwUMPPeT4sU8//TQikQhOOOEEty9bNkRVjl25sIXHpBzmV4IolVg4JGY/DSezUiqnkntFEAThDNcr6tDQEObPn4/bbrvN1eMOHTqESy65BGeffbbblywrouQ3Yn3Vjlt4TMj8SgQRRVHEIL+hVEYyv9J5ShBE8HFdLrxkyRIsWbLE9QtdeeWVuOiiixAOh12pLOWGp2PsyoXrLFI5KZ/7mBCEXzTEI+gfzaiKiSgXrvBOEQRBOGBMLlV33nkn3nzzTdxwww2Otk8mk+jv79f9KxcZB8FF0VQOKSZEwKiXmqyR+ZUgiGqi7Cvq1q1b8fWvfx2/+93vEIk4E2hWrlyJlpYW8W/69Oll2790rnhVToI3WLPo/EoeEyJoNEhN1qjzK0EQ1URZV9RsNouLLroIN910E+bMmeP4cStWrEBfX5/4t2vXrrLto1BMHPQxkacLZ3MM+es9VeUQgYOnH4dSGer8ShBEVVHWlvQDAwN48cUX8fLLL2P58uUAgFwuB8YYIpEI/vrXv+IDH/hAwePi8Tji8Xg5dw2A8+CCN6ySh/jxNA5AqRwieAjFJJmlzq8EQVQVZQ1MmpubsXHjRt3vfvGLX+Dxxx/H/fffj1mzZpXz5YvCUzGAfR+TeD7wkFM5KemxpJgQQUOuyqHOrwRBVBOuA5PBwUFs27ZN/Lxjxw6sX78ebW1tOOyww7BixQq8++67+O1vf4tQKIR58+bpHt/e3o5EIlHw+0rA7yQB++BCa7BmrphQVQ4RNHj3V9ljQooJQRDVgOvA5MUXX8RZZ50lfr7mmmsAAJdeeilWrVqF7u5u7Ny50789LCMZWTFx4DGRy4Vl46tCd6JEwODdX4eSGWTzHhPq/EoQRDXgOjA588wzwRiz/PuqVatsH3/jjTfixhtvdPuyZYH3MAHs7ybrTBqspamHCRFgZMWEf18plUMQRDVQ0+YIObiwUz14H5OkSSqHjK9EEOGKybCu8ysFJgRBBJ+aXlUzDofwmaVytK6vNX0IiYDCFZOhVJbKhQmCqCpqelUVk4WLXLBF59dMoWJCgQkRRETn1yR1fiUIorqo6VXVqWISjxT2MeH+lDilcogAUi8pJtzjTZ1fCYKoBmp6VeUeE7seJoDWYG3U1Pxa04eQCCiyx4SG+BEEUU3U9KVKBCZFxsELj4nUu4TMr0SQEVU5SZqVQxBEdVHTqyqvVigWXCTyf09lcqKLZorKhYkAI3d+5YFJMS8VQRBEEKjpwERTTIqZX8Pi/7kBlsyvRJCRZ+WQ+ZUgiGqipldVbn6NOCwXBrSSYdH5lVI5RABp4FU56Sx1fiUIoqqo6VU1k3OWjgmHFMTywQs3wMot6QkiaNTnFZNsjolqMur8ShBENVDTq2oq4zz3Ho/qAxMyvxJBpk5S+QZG0wBIMSEIojqo6VVVU0yKHwZj99eUwx4oBFEJwiFFBCf9oxkAZH4lCKI6qOlV1WmDNaBwkB+ZX4mgwytzuGJC5leCIKqBml5VnTZYAwoH+ZH5lQg6vMkaV0yojwlBENVATa+qGdHfwUUqJ2M0v9LFnggmvMkaV/dIMSEIohqo7cDERZO0hJiXk/eYkPmVCDg8lcMhxYQgiGqgpldVNwbW9uY4AOCdg8P5x5LHhAg2vMkah8yvBEFUAzW9qmZceExmtzcBALb1DgKgIX5E8DEqJpTKIQiiGqjpVZV7TKIOPCZHtjcCALbmAxNK5RBBh3tMONTHhCCIaqCmV1U3VTmzO9TAZFvvIBhjSOfTQNT5lQgqvCqHQ51fCYKoBmp6VXXTx2TmxAaEQwoGkxns6U+SYkIEnvoCxaRCO0IQBOGCmr5UpV1U5cQiIcyYWA8A2No7QOZXIvAYPSZOyuIJgiAqTU1fqdIOpwtzZrdr6Rw3QQ1BVAKjx4RiaIIgqoGavlSJWTkOTYGyAZZSOUTQMXpMqI8JQRDVQKT4JuMX94qJVjLMGJlfiWBTqJhQYEIQRPCp6VXVTR8TQFNMtkmKCXlMiKBCnV8JgqhGalwx4fNunAUXR0xuhKIAB4ZSwltCqRwiqBR0fiU/FEEQVUBNr6ppMcTP2QW7LhbG1NY6AMCe/iQAUkyI4FLQ+ZUUE4IgqoCaVkz+/YJ5WLHkKDTXRR0/ZnZ7I945OCJ+JsWECCpGxYQ6vxIEUQ3U9KraWh/DtAn1aE64CEw6mnQ/k/mVCCp1UVJMCIKoPmhVdcmRkxt1P0cjdLEngolRMaGqHIIgqgEKTFxyZIc+MCHFhAgqNF2YIIhqhFZVl/CSYQ6ZX4mgEo+EdMEIBSYEQVQDtKq6pDkRRUdzXPwcJ/MrEVAURdGpJtTHhCCIasD1qrpu3TosXboUU6ZMgaIoeOihh2y3f+CBB3Duuedi8uTJaG5uxsKFC/GXv/zF6/4GAlk1IcWECDJy91dSTAiCqAZcr6pDQ0OYP38+brvtNkfbr1u3Dueeey4eeeQRvPTSSzjrrLOwdOlSvPzyy653Nijw1vQAECXFhAgw8rwcqsohCKIacN3HZMmSJViyZInj7X/yk5/ofv7ud7+LP/7xj/if//kfLFiwwO3LB4IjJMWEzK9EkNEpJtT5lSCIKmDMG6zlcjkMDAygra3NcptkMolkMil+7u/vH4tdc8xsXSqHLvZEcJE9JqSYEARRDYz57f4tt9yCwcFBfOpTn7LcZuXKlWhpaRH/pk+fPoZ7WJyjO5tRFw1jSksCCl3siQCjM7+SuEcQRBUwporJXXfdhZtuugl//OMf0d7ebrndihUrcM0114if+/v7AxWctNRH8ecvvRcJQ2dNggga9VKTNVJMCIKoBsYsMLnnnnvwf//v/8V9992Hc845x3bbeDyOeDxuu02lOdzQAZYggkiDnMqhqhyCIKqAMRF37777bnz2s5/F3XffjQ996ENj8ZIEQQCoz5tfQwoo7UgQRFXgWjEZHBzEtm3bxM87duzA+vXr0dbWhsMOOwwrVqzAu+++i9/+9rcA1PTNpZdeiltvvRWnnnoqenp6AAB1dXVoaWnx6W0QBGFGQ75cmNQSgiCqBdeKyYsvvogFCxaIUt9rrrkGCxYswPXXXw8A6O7uxs6dO8X2v/zlL5HJZLBs2TJ0dXWJf1dffbVPb4EgCCs0xYQCE4IgqgPXismZZ54Jxpjl31etWqX7ee3atW5fgiAIn+AeE1JMCIKoFqiAkCDGMbwqhypyCIKoFigwIYhxDO9jQl1fCYKoFigwIYhxDG9JT4oJQRDVAgUmBDGO4YpJiDwmBEFUCRSYEMQ4ZnKT2qiwOTHmY7EIgiA8QVcrghjHHD65Ef9x4XwcPok6FRMEUR1QYEIQ45yPLZhW6V0gCIJwDKVyCIIgCIIIDBSYEARBEAQRGCgwIQiCIAgiMFBgQhAEQRBEYKDAhCAIgiCIwECBCUEQBEEQgYECE4IgCIIgAgMFJgRBEARBBAYKTAiCIAiCCAwUmBAEQRAEERgoMCEIgiAIIjBQYEIQBEEQRGCgwIQgCIIgiMBQFdOFGWMAgP7+/grvCUEQBEEQTuHrNl/HnVAVgcnAwAAAYPr06RXeE4IgCIIg3DIwMICWlhZH2yrMTRhTIXK5HHbv3o2mpiYoiuLb8/b392P69OnYtWsXmpubfXteohA61mMHHeuxg4712EHHeuzw81gzxjAwMIApU6YgFHLmHqkKxSQUCmHatGlle/7m5mY60ccIOtZjBx3rsYOO9dhBx3rs8OtYO1VKOGR+JQiCIAgiMFBgQhAEQRBEYKjpwCQej+OGG25APB6v9K6Me+hYjx10rMcOOtZjBx3rsaPSx7oqzK8EQRAEQdQGNa2YEARBEAQRLCgwIQiCIAgiMFBgQhAEQRBEYKDAhCAIgiCIwFDTgcltt92GmTNnIpFI4NRTT8ULL7xQ6V0KFOvWrcPSpUsxZcoUKIqChx56SPd3xhiuv/56dHV1oa6uDueccw62bt2q2+bAgQO4+OKL0dzcjNbWVlxxxRUYHBzUbbNhwwa8733vQyKRwPTp0/GDH/ygYF/uu+8+HHXUUUgkEjjuuOPwyCOP+P5+K8XKlStx8skno6mpCe3t7bjggguwZcsW3Tajo6NYtmwZJk6ciMbGRvzTP/0T9uzZo9tm586d+NCHPoT6+nq0t7fj2muvRSaT0W2zdu1avOc970E8HseRRx6JVatWFezPeP5e3H777Tj++ONF46iFCxfi0UcfFX+n41w+vve970FRFHz5y18Wv6Pj7Q833ngjFEXR/TvqqKPE36vuOLMa5Z577mGxWIzdcccd7NVXX2Wf+9znWGtrK9uzZ0+ldy0wPPLII+yb3/wme+CBBxgA9uCDD+r+/r3vfY+1tLSwhx56iL3yyivsIx/5CJs1axYbGRkR25x//vls/vz57LnnnmP/+7//y4488kj26U9/Wvy9r6+PdXR0sIsvvpht2rSJ3X333ayuro7913/9l9jm6aefZuFwmP3gBz9gmzdvZt/61rdYNBplGzduLPsxGAsWL17M7rzzTrZp0ya2fv169sEPfpAddthhbHBwUGxz5ZVXsunTp7PHHnuMvfjii+y0005jp59+uvh7JpNh8+bNY+eccw57+eWX2SOPPMImTZrEVqxYIbZ58803WX19PbvmmmvY5s2b2c9+9jMWDofZ6tWrxTbj/Xvxpz/9if35z39mb7zxBtuyZQv7xje+waLRKNu0aRNjjI5zuXjhhRfYzJkz2fHHH8+uvvpq8Xs63v5www03sGOPPZZ1d3eLf3v37hV/r7bjXLOBySmnnMKWLVsmfs5ms2zKlCls5cqVFdyr4GIMTHK5HOvs7GQ//OEPxe8OHTrE4vE4u/vuuxljjG3evJkBYH//+9/FNo8++ihTFIW9++67jDHGfvGLX7AJEyawZDIptvna177G5s6dK37+1Kc+xT70oQ/p9ufUU09l//Iv/+LrewwKvb29DAB78sknGWPqcY1Go+y+++4T27z22msMAHv22WcZY2oQGQqFWE9Pj9jm9ttvZ83NzeLYfvWrX2XHHnus7rUuvPBCtnjxYvFzLX4vJkyYwH7961/TcS4TAwMDbPbs2WzNmjXsjDPOEIEJHW//uOGGG9j8+fNN/1aNx7kmUzmpVAovvfQSzjnnHPG7UCiEc845B88++2wF96x62LFjB3p6enTHsKWlBaeeeqo4hs8++yxaW1tx0kkniW3OOecchEIhPP/882Kb97///YjFYmKbxYsXY8uWLTh48KDYRn4dvs14/az6+voAAG1tbQCAl156Cel0WncMjjrqKBx22GG6Y33ccceho6NDbLN48WL09/fj1VdfFdvYHcda+15ks1ncc889GBoawsKFC+k4l4lly5bhQx/6UMExoePtL1u3bsWUKVNw+OGH4+KLL8bOnTsBVOdxrsnAZN++fchms7oPAQA6OjrQ09NTob2qLvhxsjuGPT09aG9v1/09Eomgra1Nt43Zc8ivYbXNePyscrkcvvzlL2PRokWYN28eAPX9x2IxtLa26rY1Hmuvx7G/vx8jIyM1873YuHEjGhsbEY/HceWVV+LBBx/EMcccQ8e5DNxzzz34xz/+gZUrVxb8jY63f5x66qlYtWoVVq9ejdtvvx07duzA+973PgwMDFTlca6K6cIEUSssW7YMmzZtwlNPPVXpXRm3zJ07F+vXr0dfXx/uv/9+XHrppXjyyScrvVvjjl27duHqq6/GmjVrkEgkKr0745olS5aI/z/++ONx6qmnYsaMGfjDH/6Aurq6Cu6ZN2pSMZk0aRLC4XCBK3nPnj3o7Oys0F5VF/w42R3Dzs5O9Pb26v6eyWRw4MAB3TZmzyG/htU24+2zWr58OR5++GE88cQTmDZtmvh9Z2cnUqkUDh06pNveeKy9Hsfm5mbU1dXVzPciFovhyCOPxIknnoiVK1di/vz5uPXWW+k4+8xLL72E3t5evOc970EkEkEkEsGTTz6Jn/70p4hEIujo6KDjXSZaW1sxZ84cbNu2rSrP65oMTGKxGE488UQ89thj4ne5XA6PPfYYFi5cWME9qx5mzZqFzs5O3THs7+/H888/L47hwoULcejQIbz00ktim8cffxy5XA6nnnqq2GbdunVIp9NimzVr1mDu3LmYMGGC2EZ+Hb7NePmsGGNYvnw5HnzwQTz++OOYNWuW7u8nnngiotGo7hhs2bIFO3fu1B3rjRs36gLBNWvWoLm5Gcccc4zYxu441ur3IpfLIZlM0nH2mbPPPhsbN27E+vXrxb+TTjoJF198sfh/Ot7lYXBwENu3b0dXV1d1nteurLLjiHvuuYfF43G2atUqtnnzZvb5z3+etba26lzJtc7AwAB7+eWX2csvv8wAsB//+Mfs5ZdfZm+//TZjTC0Xbm1tZX/84x/Zhg0b2Ec/+lHTcuEFCxaw559/nj311FNs9uzZunLhQ4cOsY6ODvaZz3yGbdq0id1zzz2svr6+oFw4EomwW265hb322mvshhtuGFflwl/4whdYS0sLW7t2ra7cb3h4WGxz5ZVXssMOO4w9/vjj7MUXX2QLFy5kCxcuFH/n5X7nnXceW79+PVu9ejWbPHmyabnftddey1577TV22223mZb7jefvxde//nX25JNPsh07drANGzawr3/960xRFPbXv/6VMUbHudzIVTmM0fH2i3/7t39ja9euZTt27GBPP/00O+ecc9ikSZNYb28vY6z6jnPNBiaMMfazn/2MHXbYYSwWi7FTTjmFPffcc5XepUDxxBNPMAAF/y699FLGmFoyfN1117GOjg4Wj8fZ2WefzbZs2aJ7jv3797NPf/rTrLGxkTU3N7PPfvazbGBgQLfNK6+8wt773veyeDzOpk6dyr73ve8V7Msf/vAHNmfOHBaLxdixxx7L/vznP5ftfY81ZscYALvzzjvFNiMjI+yLX/wimzBhAquvr2cf+9jHWHd3t+553nrrLbZkyRJWV1fHJk2axP7t3/6NpdNp3TZPPPEEO+GEE1gsFmOHH3647jU44/l7cfnll7MZM2awWCzGJk+ezM4++2wRlDBGx7ncGAMTOt7+cOGFF7Kuri4Wi8XY1KlT2YUXXsi2bdsm/l5tx1lhjDF3GgtBEARBEER5qEmPCUEQBEEQwYQCE4IgCIIgAgMFJgRBEARBBAYKTAiCIAiCCAwUmBAEQRAEERgoMCEIgiAIIjBQYEIQBEEQRGCgwIQgCIIgiMBAgQlBEARBEIGBAhOCIAiCIAIDBSYEQRAEQQQGCkwIgiAIgggM/z+NABCO/D7moQAAAABJRU5ErkJggg==\n"
          },
          "metadata": {}
        }
      ]
    },
    {
      "cell_type": "markdown",
      "source": [
        "## Summary\n",
        "In this example, we illustrated Pearl's capability of dealing with dynamic action spaces, standard policy learning, history summarization and intelligent exploration, all in a single agent."
      ],
      "metadata": {
        "id": "kUDFd8HUqP20"
      }
    }
  ]
}