{
  "cells": [
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "SqdSHcZaE66V"
      },
      "source": [
        "\n",
        "Licensed under the Apache License, Version 2.0\n",
        "\n",
        "Enable TPUs for the notebook: Navigate to Edit→Notebook Settings and\n",
        "select TPU from the Hardware Accelerator drop-down.\n",
        "\n",
        "Replace the value for BUCKET and PROJECT_ID with your Google Cloud Bucket and project. This is necessary to write files. Make sure you have read and write access to this bucket. Then run the cells consecutively."
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "id": "5gPOJvQ3-V28"
      },
      "outputs": [],
      "source": [
        "BUCKET = ''\n",
        "PROJECT_ID = ''"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "id": "oI1UdUNMNLuR"
      },
      "outputs": [],
      "source": [
        "\"\"\"Runs TPU Saint-Venant Simulations.\"\"\"\n",
        "\n",
        "TPU_WORKER = ''  # pylint: disable=g-statement-before-imports\n",
        "PUBLIC_COLAB = True  # pylint: disable=g-statement-before-imports\n",
        "\n",
        "if PUBLIC_COLAB:\n",
        "  # Authenticate to access Google Cloud Storage.\n",
        "  from google.colab import auth  # pylint: disable=g-import-not-at-top\n",
        "  auth.authenticate_user()\n",
        "# Access DEM from Google Cloud Storage and use GCS for runtime files.\n",
        "\n",
        "import abc\n",
        "import collections\n",
        "import enum\n",
        "import functools\n",
        "import io\n",
        "from matplotlib.colors import LightSource\n",
        "import matplotlib.pyplot as plt\n",
        "import os\n",
        "from skimage import transform\n",
        "import tempfile\n",
        "import time\n",
        "from typing import Any, Callable, Iterable, Sequence, List, Dict, Mapping, MutableMapping, MutableSequence, Optional, Text, Tuple, Union\n",
        "\n",
        "import attr\n",
        "import numpy as np\n",
        "import tensorflow.compat.v1 as tf\n",
        "tf.disable_v2_behavior()\n",
        "\n",
        "if PUBLIC_COLAB:\n",
        "  from google.cloud import storage  # pylint: disable=g-import-not-at-top\n",
        "  import ipywidgets as widgets  # pylint: disable=g-import-not-at-top\n",
        "  from osgeo import gdal  # pylint: disable=g-import-not-at-top\n",
        "\n",
        "if PUBLIC_COLAB:\n",
        "  from tensorflow.python.ops import gen_resource_variable_ops  # pylint: disable=g-import-not-at-top\n",
        "  from tensorflow.python.ops import inplace_ops  # pylint: disable=g-import-not-at-top\n",
        "  from tensorflow.python.tpu.ops import tpu_ops  # pylint: disable=g-import-not-at-top\n",
        "\n",
        "# Floating point tolerance for timesteps.\n",
        "TIMESTEP_EPS = 1e-5\n",
        "# Floating point tolerance used in Saint-Venant step function.\n",
        "SAINT_VENANT_EPS = 1e-15\n",
        "_G = 9.8\n",
        "# Manning coefficient defaults. NB: The simulation can be more accurate if a\n",
        "# river mask is provided which defines th river region of the DEM. In that case,\n",
        "# `MANNING_COEFF_FLOODPLAIN` is used in the non-river region (i.e., the\n",
        "# floodplain region). In the Conawy example, we take a simpler approach and use\n",
        "# `np.ones()` for the river mask, so `MANNING_COEFF_FLOODPLAIN` is unused.\n",
        "MANNING_COEFF_FLOODPLAIN = 0.05\n",
        "MANNING_COEFF_RIVER = 0.02\n",
        "\n",
        "# The dynamic states are:\n",
        "#   h: the absolute height\n",
        "#   q_x: The water flow in the x direction.\n",
        "#   q_y: The water flow in the y direction.\n",
        "#   t: The current simulation time.\n",
        "#   dt: The timestep size. Note that `dt` is held constant in this simulation.\n",
        "_H = 'h'\n",
        "_Q_X = 'q_x'\n",
        "_Q_Y = 'q_y'\n",
        "_T = 't'\n",
        "_DT = 'dt'\n",
        "\n",
        "INIT_STATE_KEYS = [_H, _Q_X, _Q_Y]\n",
        "STATE_KEYS = INIT_STATE_KEYS + [_T, _DT]\n",
        "\n",
        "# The static states are:\n",
        "#   m: The Manning coefficient matrix.\n",
        "#   e: The water bed elevation.\n",
        "# We also specify the boundaries using {L,R,T,B}_BOUNDARIES.\n",
        "_M = 'm'\n",
        "_E = 'e'\n",
        "\n",
        "_I_L_BOUNDARY = 'i_left_boundary'\n",
        "_I_R_BOUNDARY = 'i_right_boundary'\n",
        "_I_T_BOUNDARY = 'i_top_boundary'\n",
        "_I_B_BOUNDARY = 'i_bottom_boundary'\n",
        "\n",
        "_O_L_BOUNDARY = 'o_left_boundary'\n",
        "_O_R_BOUNDARY = 'o_right_boundary'\n",
        "_O_T_BOUNDARY = 'o_top_boundary'\n",
        "_O_B_BOUNDARY = 'o_bottom_boundary'\n",
        "\n",
        "_M_L_BOUNDARY = 'm_left_boundary'\n",
        "_M_R_BOUNDARY = 'm_right_boundary'\n",
        "_M_T_BOUNDARY = 'm_top_boundary'\n",
        "_M_B_BOUNDARY = 'm_bottom_boundary'\n",
        "\n",
        "L_BOUNDARIES = (_I_L_BOUNDARY, _M_L_BOUNDARY, _O_L_BOUNDARY)\n",
        "R_BOUNDARIES = (_I_R_BOUNDARY, _M_R_BOUNDARY, _O_R_BOUNDARY)\n",
        "T_BOUNDARIES = (_I_T_BOUNDARY, _M_T_BOUNDARY, _O_T_BOUNDARY)\n",
        "B_BOUNDARIES = (_I_B_BOUNDARY, _M_B_BOUNDARY, _O_B_BOUNDARY)\n",
        "\n",
        "ADDITIONAL_STATE_KEYS = [\n",
        "    _M, _E, *L_BOUNDARIES, *R_BOUNDARIES, *T_BOUNDARIES, *B_BOUNDARIES\n",
        "]\n",
        "SER_EXTENSION = 'ser'\n",
        "TensorMap = Mapping[Text, tf.Tensor]\n",
        "TensorOrArray = Union[tf.Tensor, np.ndarray]\n",
        "ThreeIntTuple = Tuple[int, int, int]\n",
        "ExtractSubgridFn = Callable[[TensorOrArray,\n",
        "                             'GridParametrization',\n",
        "                             ThreeIntTuple], TensorOrArray]\n",
        "MutableTensorMap = MutableMapping[Text, tf.Tensor]\n",
        "FnOutput = Tuple[List[tf.Tensor], MutableTensorMap]\n",
        "StepOutput = List[List[tf.Tensor]]\n",
        "StepBuilder = Callable[[], StepOutput]\n",
        "\n",
        "KeyedInitialValues = Mapping[Text, Union[int, float, complex, Text, bool,\n",
        "                                         np.ndarray, tf.Tensor]]\n",
        "MutableKeyedInitialValues = MutableMapping[Text,\n",
        "                                           Union[int, float, complex, Text,\n",
        "                                                 bool, np.ndarray, tf.Tensor]]\n",
        "InputFiles = collections.namedtuple('InputFiles', ['file_name', 'dtype'])\n",
        "\n",
        "\n",
        "class BoundarySide(enum.Enum):\n",
        "  \"\"\"A representation of boundary sides.\"\"\"\n",
        "  LEFT = 0\n",
        "  RIGHT = 1\n",
        "  TOP = 2\n",
        "  BOTTOM = 3\n",
        "\n",
        "\n",
        "class SideType(enum.Enum):\n",
        "  \"\"\"A class defining the type of axis side.\"\"\"\n",
        "  LOW = 1  # The low side of an axis.\n",
        "  HIGH = 2  # The high side of an axis.\n",
        "\n",
        "\n",
        "class BCType(enum.Enum):\n",
        "  \"\"\"A class defining the type of boundary conditions.\n",
        "\n",
        "  NO_TOUCH: Preserves the boundary at its current value: useful when the grid is\n",
        "    staggered. Namely, if certain state variables are defined and computed only\n",
        "    for interior grid points, their extremal values are already correct and do\n",
        "    not need to be re-calculated/have a boundary condition imposed outside of\n",
        "    the computation loop.\n",
        "  ADDITIVE: Similar to a Neumann condition, but adds the supplied boundary\n",
        "    values to the boundary plane itself, as opposed to the plane +/- 1.\n",
        "  \"\"\"\n",
        "  NO_TOUCH = 1  # Preserves the boundary at its current value.\n",
        "  ADDITIVE = 2  # Adds the given values at the boundary.\n",
        "\n",
        "\n",
        "class BoundaryCondition:\n",
        "  \"\"\"The base class for boundary conditions.\"\"\"\n",
        "\n",
        "  def __init__(self,\n",
        "               boundary_side: BoundarySide,\n",
        "               fraction_start: float,\n",
        "               fraction_end: float,\n",
        "               left_padding: int,\n",
        "               top_padding: int,\n",
        "               unpadded_shape: Sequence[int],\n",
        "               slope: float):\n",
        "    self.boundary_side = boundary_side\n",
        "    self.slope = slope\n",
        "    self._top_bottom = boundary_side in [BoundarySide.TOP, BoundarySide.BOTTOM]\n",
        "    self.unpadded_side_length = unpadded_shape[int(self._top_bottom)]\n",
        "    self._unpadded_index_start = int(round(\n",
        "        fraction_start * (self.unpadded_side_length - 1)))\n",
        "    self._unpadded_index_end = 1 + int(round(\n",
        "        fraction_end * (self.unpadded_side_length - 1)))\n",
        "    self._unpadded_xsec_slice = self._boundary_slice()\n",
        "    self.padded_xsec_slice = self._boundary_slice(left_padding, top_padding)\n",
        "    self.padded_full_slice = self._padded_full_boundary_slice(left_padding,\n",
        "                                                              top_padding)\n",
        "\n",
        "    def add_padding_fn(xsec, ones=False):\n",
        "      return self._add_padding(xsec, ones, left_padding, top_padding)\n",
        "    self.add_padding = add_padding_fn\n",
        "\n",
        "  def _add_padding(self, xsec: np.ndarray, ones: bool,\n",
        "                   left_padding: int, top_padding: int) -\u003e np.ndarray:\n",
        "    padding = left_padding if self._top_bottom else top_padding\n",
        "    zeros_ones = np.ones if ones else np.zeros\n",
        "    one_d_padded = np.concatenate([\n",
        "        zeros_ones(padding + self._unpadded_index_start),\n",
        "        np.squeeze(xsec),\n",
        "        zeros_ones(self.unpadded_side_length - self._unpadded_index_end)])\n",
        "    axis = int(not self._top_bottom)\n",
        "    return np.expand_dims(one_d_padded, axis).astype(np.float32)\n",
        "\n",
        "  def _boundary_slice(self, left_padding: int = 0, top_padding: int = 0):\n",
        "    \"\"\"Returns a 1D boundary slice.\"\"\"\n",
        "    padding = padding = left_padding if self._top_bottom else top_padding\n",
        "    maybe_padded_xsec_slice = slice(padding + self._unpadded_index_start,\n",
        "                                    padding + self._unpadded_index_end)\n",
        "\n",
        "    if self.boundary_side == BoundarySide.LEFT:\n",
        "      return (maybe_padded_xsec_slice, left_padding)\n",
        "    elif self.boundary_side == BoundarySide.RIGHT:\n",
        "      return (maybe_padded_xsec_slice, -1)\n",
        "    elif self.boundary_side == BoundarySide.TOP:\n",
        "      return (top_padding, maybe_padded_xsec_slice)\n",
        "    elif self.boundary_side == BoundarySide.BOTTOM:\n",
        "      return (-1, maybe_padded_xsec_slice)\n",
        "\n",
        "  def _padded_full_boundary_slice(\n",
        "      self, left_padding: int = 0, top_padding: int = 0):\n",
        "    if self.boundary_side == BoundarySide.LEFT:\n",
        "      return (slice(None), slice(left_padding, left_padding + 1))\n",
        "    elif self.boundary_side == BoundarySide.RIGHT:\n",
        "      return (slice(None), slice(-1, None))\n",
        "    elif self.boundary_side == BoundarySide.TOP:\n",
        "      return (slice(top_padding, top_padding + 1), slice(None))\n",
        "    elif self.boundary_side == BoundarySide.BOTTOM:\n",
        "      return (slice(-1, None), slice(None))\n",
        "\n",
        "\n",
        "class InflowBoundaryCondition(BoundaryCondition):\n",
        "  \"\"\"An inflow boundary condition class.\"\"\"\n",
        "\n",
        "  def __init__(self,\n",
        "               boundary_side: BoundarySide,\n",
        "               fraction_start: float,\n",
        "               fraction_end: float,\n",
        "               left_padding: int,\n",
        "               top_padding: int,\n",
        "               dt: float,\n",
        "               resolution: float,\n",
        "               slope: float,\n",
        "               flux: float,\n",
        "               unpadded_dem: np.ndarray,\n",
        "               unpadded_manning_matrix: np.ndarray):\n",
        "    super(InflowBoundaryCondition, self).__init__(boundary_side,\n",
        "                                                  fraction_start,\n",
        "                                                  fraction_end,\n",
        "                                                  left_padding,\n",
        "                                                  top_padding,\n",
        "                                                  unpadded_dem.shape,\n",
        "                                                  slope)\n",
        "    dem_xsec = unpadded_dem[self._unpadded_xsec_slice]\n",
        "    manning_xsec = unpadded_manning_matrix[self._unpadded_xsec_slice]\n",
        "    midpoint = (self._unpadded_index_end - 1 - self._unpadded_index_start) // 2\n",
        "    depths = approx_normal_depth_inverse(\n",
        "        flux, dem_xsec, resolution, slope, manning_xsec,\n",
        "        cross_section_source_location=midpoint)\n",
        "    flux_xsec = approx_normal_depth_flux(depths, resolution, slope,\n",
        "                                         manning_xsec)\n",
        "    # Static value for I_*_BOUNDARY fields.\n",
        "    self.padded_inflow_flux = self.add_padding(flux_xsec) * dt / resolution**2\n",
        "\n",
        "\n",
        "class OutflowBoundaryCondition(BoundaryCondition):\n",
        "  \"\"\"An outflow boundary condition class.\"\"\"\n",
        "\n",
        "  def __init__(self,\n",
        "               boundary_side: BoundarySide,\n",
        "               fraction_start: float,\n",
        "               fraction_end: float,\n",
        "               left_padding: int,\n",
        "               top_padding: int,\n",
        "               unpadded_manning_matrix: np.ndarray,\n",
        "               slope: float):\n",
        "    super(OutflowBoundaryCondition, self).__init__(\n",
        "        boundary_side,\n",
        "        fraction_start,\n",
        "        fraction_end,\n",
        "        left_padding,\n",
        "        top_padding,\n",
        "        unpadded_manning_matrix.shape,\n",
        "        slope)\n",
        "    xsec_ones = np.ones(self._unpadded_index_end - self._unpadded_index_start,\n",
        "                        bool)\n",
        "    # Static value for O_*_BOUNDARY fields.\n",
        "    self.padded_outflow_mask = self.add_padding(xsec_ones)\n",
        "    # Static value for M_*_BOUNDARY fields.\n",
        "    # It is used as a denominator, so we set normally zero areas to ones.\n",
        "    self.full_manning_slice = self.add_padding(\n",
        "        unpadded_manning_matrix[self._unpadded_xsec_slice],\n",
        "        ones=True)\n",
        "\n",
        "\n",
        "def _do_exchange(replicas, replica_dim, high_halo_for_predecessor,\n",
        "                 low_halo_for_successor):\n",
        "  \"\"\"Does a halo exchange with predecessors/successors.\"\"\"\n",
        "  # Special case for single replica grid width along the replica_dim.\n",
        "  if replicas.shape[replica_dim] == 1:\n",
        "    return [tf.zeros_like(high_halo_for_predecessor)] * 2\n",
        "\n",
        "  # Compute the predecessor and successor replicas in `replica_dim`.\n",
        "  padded_replicas = pad_in_dim(\n",
        "      replicas, low_pad=1, high_pad=1, value=-1, axis=replica_dim)\n",
        "  predecessors = np.stack(\n",
        "      (replicas, slice_in_dim(\n",
        "          padded_replicas, start=0, end=-2, axis=replica_dim)),\n",
        "      axis=-1)\n",
        "  predecessors = [(a, b) for (a, b) in predecessors.reshape((-1, 2)) if b != -1]\n",
        "  high_halo = tpu_ops.collective_permute(high_halo_for_predecessor,\n",
        "                                         predecessors, 'high')\n",
        "  successors = np.stack(\n",
        "      (replicas,\n",
        "       slice_in_dim(padded_replicas, start=2, end=None, axis=replica_dim)),\n",
        "      axis=-1)\n",
        "  successors = [(a, b) for (a, b) in successors.reshape((-1, 2)) if b != -1]\n",
        "\n",
        "  low_halo = tpu_ops.collective_permute(low_halo_for_successor, successors,\n",
        "                                        'low')\n",
        "\n",
        "  return high_halo, low_halo\n",
        "\n",
        "\n",
        "def _replace_halo(plane, bc, dim):\n",
        "  \"\"\"Return a halo derived from boundary conditions.\n",
        "\n",
        "  This should only be called if all the following are true:\n",
        "    * `bc` is not `None`\n",
        "    * the replica is at the end of the dimension on the specified side in the\n",
        "      computational shape\n",
        "\n",
        "  Args:\n",
        "    plane: A 2D tensor. The plane from the subgrid relevant in applying boundary\n",
        "      conditions (and to get the shape for Dirichlet boundary conditions).\n",
        "    bc: The boundary conditions specification of the form [type, value]. See\n",
        "      `inplace_halo_exchange` for full details about boundary condition\n",
        "      specifications.\n",
        "    dim: The dimension (aka axis), 0, 1 or 2 for x, y or z, respectively.\n",
        "\n",
        "  Returns:\n",
        "    The border which is derived from the provided boundary conditions.\n",
        "\n",
        "  Raises:\n",
        "    ValueError if parameters have incorrect values.\n",
        "  \"\"\"\n",
        "  if not isinstance(bc, collections.abc.Sequence):\n",
        "    raise ValueError('`bc` must be a sequence `(type, value)`.')\n",
        "\n",
        "  bc_type, bc_value = bc\n",
        "\n",
        "  # bc_value could be a list of tensors of shape (1, ny) or (nx, 1). If so,\n",
        "  # convert to a tensor of shape (nz, ny) or (nx, nz). nx, ny, nz are the number\n",
        "  # of points along each axis of a (sub)grid. After this line, bc_value is\n",
        "  # either a float or a 2D tensor.\n",
        "  bc_value = (\n",
        "      tf.concat(bc_value, dim) if isinstance(bc_value, list) else bc_value)\n",
        "\n",
        "  def additive_value():\n",
        "    return plane + bc_value\n",
        "\n",
        "  if bc_type == BCType.ADDITIVE:\n",
        "    return additive_value()\n",
        "  else:\n",
        "    raise ValueError('Unknown boundary condition type: {}.'.format(bc_type))\n",
        "\n",
        "\n",
        "def _sliced_tensor_fn(tensor, slices):\n",
        "  return lambda: tensor[tuple(slices)]\n",
        "\n",
        "\n",
        "def _halo_from_self_dim_0_1(z_list, dim, plane_to_exchange, is_first,\n",
        "                            left_or_top_padding):\n",
        "  \"\"\"Returns halos from the z_list given the dimension and plane to exchange.\"\"\"\n",
        "  if dim not in [0, 1]:\n",
        "    raise ValueError('dim not in [0, 1]: {}'.format(dim))\n",
        "  low_slices, low_slices_padded, high_slices = ([slice(None)] * 2,\n",
        "                                                [slice(None)] * 2,\n",
        "                                                [slice(None)] * 2)\n",
        "  low_slices[dim] = slice(plane_to_exchange, plane_to_exchange + 1)\n",
        "  low_slices_padded[dim] = slice(plane_to_exchange + left_or_top_padding,\n",
        "                                 plane_to_exchange + left_or_top_padding + 1)\n",
        "  shape = z_list[0].shape.as_list()[dim]\n",
        "  high_slices[dim] = slice(shape - (plane_to_exchange + 1),\n",
        "                           shape - plane_to_exchange)\n",
        "\n",
        "  low_halo_from_self, high_halo_from_self = [], []\n",
        "  for tensor in z_list:\n",
        "    low_halo = tf.cond(is_first, _sliced_tensor_fn(tensor, low_slices_padded),\n",
        "                       _sliced_tensor_fn(tensor, low_slices))\n",
        "    low_halo_from_self.append(low_halo)\n",
        "    high_halo_from_self.append(tensor[high_slices])\n",
        "  # Convert to 2D tensor: a z-y or x-z plane.\n",
        "  low_halo_from_self = _convert_zlist_to_2d_tensor(low_halo_from_self, dim)\n",
        "  high_halo_from_self = _convert_zlist_to_2d_tensor(high_halo_from_self, dim)\n",
        "  return low_halo_from_self, high_halo_from_self\n",
        "\n",
        "\n",
        "def _convert_zlist_to_2d_tensor(list_of_tensors, dim):\n",
        "  return tf.concat(list_of_tensors, dim)\n",
        "\n",
        "\n",
        "def _convert_2d_tensor_to_zlist(tensor, dim):\n",
        "  nz = tensor.shape.as_list()[dim]\n",
        "  return tf.split(tensor, nz, dim)\n",
        "\n",
        "\n",
        "def _alias_inplace_update(x, plane, low):\n",
        "  return lambda: inplace_ops.alias_inplace_update(x, plane, tf.squeeze(low))\n",
        "\n",
        "\n",
        "def _inplace_halo_exchange_1d(z_list, dim, replica_id, replicas, replica_dim,\n",
        "                              bc_low, bc_high, left_or_top_padding):\n",
        "  \"\"\"Performs halo exchange and assigns values to points in a boundary plane.\n",
        "\n",
        "  This function exchanges and sets a single plane in the boundary or halo\n",
        "  region. It needs to be called for each plane in the boundary or halo region,\n",
        "  in order, from the innermost to outermost.\n",
        "\n",
        "  Args:\n",
        "    z_list: A list of length nz of tensors of shape (nx, ny), where nx, ny and\n",
        "      nz are the number of points along the axes of a (sub)grid.\n",
        "    dim: The dimension of `z_list` in which halo exchange will be performed.\n",
        "      Must be one of 0, 1 or 2 for x, y or z, respectively.\n",
        "    replica_id: The replica id.\n",
        "    replicas: A numpy array of replicas.\n",
        "    replica_dim: The dimension of `replicas` along which the halo exchange is to\n",
        "      be performed.\n",
        "    bc_low: The boundary condition for the low side of the axis. This is either\n",
        "      `None` or of the form `(bc_type, bc_value)` where `bc_value` represents a\n",
        "      single 2D plane and is either a 2D tensor of shape (nx, xy) or a sequence\n",
        "      of length nz of tensors of shape (1, ny) or (nx, 1). See\n",
        "      `inplace_halo_exchange` for more details about boundary condition\n",
        "      specifications.\n",
        "    bc_high: The boundary condition for the high side of the axis. See `bc_low`.\n",
        "    left_or_top_padding: The amount of left or top padding, where left and top\n",
        "      refer to the 2d plane formed by dims 0 and 1. This is used only if `dim`\n",
        "      is 0 or 1.\n",
        "\n",
        "  Returns:\n",
        "    The `z_list` with its `plane` boundary on the low side and corresponding\n",
        "    plane on the high side in the `dim` dimension modified by the halos of its\n",
        "    neighbors and/or boundary conditions.\n",
        "\n",
        "  Raises:\n",
        "    ValueError if parameters are incorrect.\n",
        "  \"\"\"\n",
        "  assert dim in (0, 1, 2)\n",
        "\n",
        "  tf.logging.debug('dim: %d, replica_dim: %d, bc_low: %s, bc_high: %s', dim,\n",
        "                   replica_dim, bc_low, bc_high)\n",
        "\n",
        "  is_first = is_first_replica(replica_id, replicas, replica_dim)\n",
        "  is_last = is_last_replica(replica_id, replicas, replica_dim)\n",
        "\n",
        "  def maybe_replace_halo_from_boundary_conditions(side):\n",
        "    \"\"\"Maybe return 2D plane from boundary conditions rather than neighbor.\"\"\"\n",
        "\n",
        "    def low_from_bc():\n",
        "      if bc_low[0] == BCType.NO_TOUCH:\n",
        "        return low_plane_for_outermost_slice\n",
        "      else:  # BCType.ADDITIVE\n",
        "        return _replace_halo(low_plane_for_outermost_slice, bc_low, dim)\n",
        "\n",
        "    def high_from_bc():\n",
        "      if bc_high[0] == BCType.NO_TOUCH:\n",
        "        return high_plane_for_outermost_slice\n",
        "      else:  # BCType.ADDITIVE\n",
        "        return _replace_halo(high_plane_for_outermost_slice, bc_high, dim)\n",
        "\n",
        "    if side == SideType.LOW:\n",
        "      # `tf.cond` is potentially expensive as it evaluates the input of both\n",
        "      # branches. The `if/else` statement can optimize performance by\n",
        "      # eliminating an unnecessary `tf.cond` from the graph.\n",
        "      return tf.cond(is_first, low_from_bc, lambda: low_halo_from_neighbor)\n",
        "    else:  # side = HIGH\n",
        "      return tf.cond(is_last, high_from_bc, lambda: high_halo_from_neighbor)\n",
        "\n",
        "  plane_to_exchange = 1\n",
        "\n",
        "  # dim in (0, 1).\n",
        "  low_halo_from_self, high_halo_from_self = _halo_from_self_dim_0_1(\n",
        "      z_list, dim, plane_to_exchange, is_first, left_or_top_padding)\n",
        "  high_halo_from_neighbor, low_halo_from_neighbor = _do_exchange(\n",
        "      replicas, replica_dim, low_halo_from_self, high_halo_from_self)\n",
        "\n",
        "  low_plane_for_outermost_slice, high_plane_for_outermost_slice = (\n",
        "      _halo_from_self_dim_0_1(z_list, dim, plane_to_exchange - 1, is_first,\n",
        "                              left_or_top_padding))\n",
        "\n",
        "  low_edge = maybe_replace_halo_from_boundary_conditions(SideType.LOW)\n",
        "  high_edge = maybe_replace_halo_from_boundary_conditions(SideType.HIGH)\n",
        "\n",
        "  high_edges = _convert_2d_tensor_to_zlist(high_edge, dim)\n",
        "  low_edges = _convert_2d_tensor_to_zlist(low_edge, dim)\n",
        "  result_list = []\n",
        "\n",
        "  plane_padded = left_or_top_padding\n",
        "  for x, high, low in zip(z_list, high_edges, low_edges):\n",
        "    if dim == 0:\n",
        "      x = inplace_ops.alias_inplace_update(\n",
        "          tf.cond(is_first, _alias_inplace_update(x, plane_padded, low),\n",
        "                  _alias_inplace_update(x, 0, low)),\n",
        "          x.shape.as_list()[0] - 1, tf.squeeze(high))\n",
        "    else:\n",
        "      x = tf.transpose(\n",
        "          inplace_ops.alias_inplace_update(\n",
        "              tf.cond(\n",
        "                  is_first,\n",
        "                  _alias_inplace_update(\n",
        "                      tf.transpose(x, [1, 0]), plane_padded, low),\n",
        "                  _alias_inplace_update(tf.transpose(x, [1, 0]), 0, low)),\n",
        "              x.shape.as_list()[1] - 1, tf.squeeze(high)), [1, 0])\n",
        "    result_list.append(x)\n",
        "\n",
        "  return result_list\n",
        "\n",
        "\n",
        "def inplace_halo_exchange(z_list: List[tf.Tensor],\n",
        "                          dims: Sequence[int],\n",
        "                          replica_id: tf.Tensor,\n",
        "                          replicas: np.ndarray,\n",
        "                          replica_dims: Sequence[int],\n",
        "                          boundary_conditions=None,\n",
        "                          left_padding: int = 0,\n",
        "                          top_padding: int = 0) -\u003e List[tf.Tensor]:\n",
        "  \"\"\"Performs a N-dimensional halo exchange.\n",
        "\n",
        "  Args:\n",
        "    z_list: A list of length nz of tensors of shape `(nx, ny)`, where `nx`,\n",
        "      `ny` and `nz` are the number of points along the axes of a (sub)grid.\n",
        "    dims: The dimensions or axes along which halo exchange will be performed.\n",
        "      This is a sequence containing some or all of 0, 1, 2 (corresponding to\n",
        "      `x`, `y`, `z`).\n",
        "    replica_id: The replica id.\n",
        "    replicas: A numpy array of replicas.\n",
        "    replica_dims: The dimensions of `replicas` along which halo exchange will be\n",
        "      performed.\n",
        "    boundary_conditions: The boundary conditions to apply. If `None`, the\n",
        "      boundary will be set to 0. See more info about boundary conditions below.\n",
        "    left_padding: The amount of left padding, referring the 2d plane formed by\n",
        "      dims 0 and 1 (left is dim 1).\n",
        "    top_padding: The amount of top padding, referring to the 2d plane formed by\n",
        "      dims 0 and 1 (top is dim 0).  If boundary_conditions is not `None` it must\n",
        "      have the form  [ [(`BCType` for dim 0 lower bound, value for dim 0 lower\n",
        "      bound), (`BCType` for dim 0 upper bound, value for dim 0 upper bound)],\n",
        "      [(`BCType` for dim1 lower bound, value for dim 1 lower bound), (`BCType`\n",
        "      for dim1 upper bound, value for dim 1 upper bound)], ... ].  Note that the\n",
        "      innermost sequence can be `None`, in which case the corresponding boundary\n",
        "      will be set to zero. The value can be a float, or can be a sequence of\n",
        "      planes of length 1. An element of this sequence is a tensor if\n",
        "      dim = 2 (z-axis) and a sequence if dim is 0 or 1. A z-axis boundary plane\n",
        "      is specified by a 2D tensor of shape `(nx, ny)`. A 2D x- or y-axis\n",
        "      boundary plane is specified by a list of length nz of tensors of shape\n",
        "      (1, `ny`) or (`nx`, 1), respectively. The order of planes in the sequence\n",
        "      is from low to high along the dimension `dim`. This means for a low\n",
        "      boundary the innermost plane is the last element in the\n",
        "      sequence. For a high boundary the innermost plane is the 0th element.\n",
        "      Halo exchange and applying boundary conditions is done one plane at a time\n",
        "      for performance reasons.\n",
        "\n",
        "  Returns:\n",
        "    The incoming `z_list` modified to include the result of halo exchange and\n",
        "      taking boundary conditions into account.\n",
        "  \"\"\"\n",
        "  boundary_conditions = boundary_conditions or [[None, None]] * len(dims)\n",
        "\n",
        "  assert len(dims) == len(replica_dims)\n",
        "  assert len(dims) == len(boundary_conditions)\n",
        "\n",
        "  for (dim, replica_dim, bc) in zip(dims, replica_dims, boundary_conditions):\n",
        "    bc_low, bc_high = bc if bc else (None, None)\n",
        "\n",
        "    left_or_top_padding = (top_padding, left_padding, 0)[dim]\n",
        "\n",
        "    # Select the relevant planes from the sequence of bc planes.\n",
        "    # Create a mutable copy of the bc passed in.\n",
        "    bc_low_plane = list(bc_low)\n",
        "    # If the boundary condition is a list of planes select the relevant one.\n",
        "    bc_low_plane[1] = (\n",
        "        bc_low_plane[1]\n",
        "        if isinstance(bc_low_plane[1], float) else bc_low_plane[1][0])\n",
        "    # Create a mutable copy of the bc passed in.\n",
        "    bc_high_plane = list(bc_high)\n",
        "    # If the boundary condition is a list of planes select the relevant one.\n",
        "    bc_high_plane[1] = (\n",
        "        bc_high_plane[1]\n",
        "        if isinstance(bc_high_plane[1], float) else bc_high_plane[1][0])\n",
        "\n",
        "    z_list = _inplace_halo_exchange_1d(z_list, dim, replica_id, replicas,\n",
        "                                       replica_dim, bc_low_plane, bc_high_plane,\n",
        "                                       left_or_top_padding)\n",
        "\n",
        "  return z_list\n",
        "\n",
        "\n",
        "def _get_core_n(n: int) -\u003e Optional[int]:\n",
        "  \"\"\"Returns dimension of grid per core not used for halo exchange.\"\"\"\n",
        "  core_n = n - 2\n",
        "  return core_n if core_n \u003e 0 else None\n",
        "\n",
        "\n",
        "def _get_full_grid(n: Optional[int], l: float) -\u003e tf.Tensor:\n",
        "  \"\"\"The full grid without halos.\n",
        "\n",
        "  Args:\n",
        "    n: The total number of grid points without halos.\n",
        "    l: The total length of the domain.\n",
        "\n",
        "  Returns:\n",
        "    A equidistant grid for the entire computational domain. The first grid point\n",
        "    is 0.\n",
        "  \"\"\"\n",
        "  n_effective = n if n is not None else 1\n",
        "  return tf.linspace(0.0, l, n_effective)\n",
        "\n",
        "\n",
        "def _get_full_grid_size(\n",
        "    n: int,\n",
        "    num_cores: int,\n",
        "    num_boundary_points: int = 1,\n",
        ") -\u003e int:\n",
        "  \"\"\"The full grid size (includes padding, if any).\"\"\"\n",
        "  core_n = _get_core_n(n)\n",
        "  if not core_n:\n",
        "    return 1\n",
        "  return num_cores * core_n + num_boundary_points * 2\n",
        "\n",
        "\n",
        "# An object to hold Grid Parametrization data.\n",
        "GridParametrizationData = collections.namedtuple(\n",
        "    'GridParametrizationData',\n",
        "    [\n",
        "        # Computation shape.\n",
        "        'cx',\n",
        "        'cy',\n",
        "        'cz',\n",
        "        # Length dims.\n",
        "        'lx',\n",
        "        'ly',\n",
        "        'lz',\n",
        "        # Grid size.\n",
        "        'nx',\n",
        "        'ny',\n",
        "        'nz',\n",
        "        # Physical grid size.\n",
        "        'fx_physical',\n",
        "        'fy_physical',\n",
        "        'fz_physical',\n",
        "        # Time delta.\n",
        "        'dt',\n",
        "    ])\n",
        "\n",
        "\n",
        "class GridParametrization:\n",
        "  \"\"\"An object to hold configuration parameters.\n",
        "\n",
        "  For computing dx, dy, dz below, we assume the 'box' boundaries coincide with\n",
        "  the outer most grid points on each end -- the 'halo' grid. This means there\n",
        "  are in total `core * c + 2` points, or `core * c + 1` spacings.\n",
        "  \"\"\"\n",
        "\n",
        "  def __init__(self, params: GridParametrizationData = None):\n",
        "    \"\"\"Creates an object from `GridParametrizationData`.\"\"\"\n",
        "    self.cx = params.cx\n",
        "    self.cy = params.cy\n",
        "    self.cz = params.cz\n",
        "    self.lx = params.lx\n",
        "    self.ly = params.ly\n",
        "    self.lz = params.lz\n",
        "    self.nx = params.nx\n",
        "    self.ny = params.ny\n",
        "    self.nz = params.nz\n",
        "    self.fx_physical = params.fx_physical\n",
        "    self.fy_physical = params.fy_physical\n",
        "    self.fz_physical = params.fz_physical\n",
        "    self.dt = params.dt\n",
        "    self.num_boundary_points = 1\n",
        "\n",
        "  def __str__(self):\n",
        "    return ('fx_physical: {}, fy_physical: {}, fz_physical: {}, fx: {}, fy: {},'\n",
        "            'fz: {}, nx: {}, ny: {}, nz: {}, core_nx: {}, core_ny: {}, '\n",
        "            'core_nz: {}, lx: {}, ly: {}, lz: {}, dt: {}, dx: {}, dy: {}, '\n",
        "            'dz: {}, computation_shape: {}'.format(\n",
        "                self.fx_physical, self.fy_physical, self.fz_physical, self.fx,\n",
        "                self.fy, self.fz, self.nx, self.ny, self.nz, self.core_nx,\n",
        "                self.core_ny, self.core_nz, self.lx, self.ly, self.lz, self.dt,\n",
        "                self.dx, self.dy, self.dz, self.computation_shape))\n",
        "\n",
        "  @property\n",
        "  def computation_shape(self) -\u003e np.ndarray:\n",
        "    return np.array([self.cx, self.cy, self.cz])\n",
        "\n",
        "  @property\n",
        "  def core_nx(self) -\u003e Optional[int]:\n",
        "    return _get_core_n(self.nx)\n",
        "\n",
        "  @property\n",
        "  def core_ny(self) -\u003e Optional[int]:\n",
        "    return _get_core_n(self.ny)\n",
        "\n",
        "  @property\n",
        "  def core_nz(self) -\u003e Optional[int]:\n",
        "    return _get_core_n(self.nz)\n",
        "\n",
        "  def _get_grid_spacing(self, full_grid_size, length) -\u003e Optional[float]:\n",
        "    \"\"\"Get the grid spacing between nodes in a equidistant mesh.\n",
        "\n",
        "    Args:\n",
        "      full_grid_size: The total number of nodes in the mesh grid.\n",
        "      length: The size of the domain in a particular dimension.\n",
        "\n",
        "    Returns:\n",
        "      The distance between two adjacent nodes.\n",
        "    \"\"\"\n",
        "    full_grid_size -= 2 * self.num_boundary_points\n",
        "    return length / (full_grid_size - 1) if full_grid_size \u003e 1 else None\n",
        "\n",
        "  @property\n",
        "  def dx(self) -\u003e Optional[float]:\n",
        "    return self._get_grid_spacing(self.fx, self.lx)\n",
        "\n",
        "  @property\n",
        "  def dy(self) -\u003e Optional[float]:\n",
        "    return self._get_grid_spacing(self.fy, self.ly)\n",
        "\n",
        "  @property\n",
        "  def dz(self) -\u003e Optional[float]:\n",
        "    return self._get_grid_spacing(self.fz, self.lz)\n",
        "\n",
        "  @property\n",
        "  def fx(self):\n",
        "    \"\"\"The full grid size in dim 0.\"\"\"\n",
        "    return _get_full_grid_size(self.nx, self.cx, self.num_boundary_points)\n",
        "\n",
        "  @property\n",
        "  def fy(self):\n",
        "    \"\"\"The full grid size in dim 1.\"\"\"\n",
        "    return _get_full_grid_size(self.ny, self.cy, self.num_boundary_points)\n",
        "\n",
        "  @property\n",
        "  def fz(self):\n",
        "    \"\"\"The full grid size in dim 2.\"\"\"\n",
        "    return _get_full_grid_size(self.nz, self.cz, self.num_boundary_points)\n",
        "\n",
        "  @property\n",
        "  def x(self) -\u003e tf.Tensor:\n",
        "    \"\"\"The full grid in dim 0.\"\"\"\n",
        "    return _get_full_grid(self.fx, self.lx)\n",
        "\n",
        "  @property\n",
        "  def y(self) -\u003e tf.Tensor:\n",
        "    \"\"\"The full grid in dim 0.\"\"\"\n",
        "    return _get_full_grid(self.fy, self.ly)\n",
        "\n",
        "  @property\n",
        "  def z(self) -\u003e tf.Tensor:\n",
        "    \"\"\"The full grid in dim 0.\"\"\"\n",
        "    return _get_full_grid(self.fz, self.lz)\n",
        "\n",
        "  @property\n",
        "  def num_replicas(self):\n",
        "    return self.cx * self.cy * self.cz\n",
        "\n",
        "\n",
        "def _get_padding(num_cores, divisor, full_physical_size):\n",
        "  \"\"\"Returns amount of padding across all cores.\"\"\"\n",
        "  nc_div = num_cores * divisor\n",
        "  return int(\n",
        "      (nc_div - (full_physical_size + 2 * (num_cores - 1)) % nc_div) % nc_div)\n",
        "\n",
        "\n",
        "class SaintVenantParams(GridParametrization):\n",
        "  \"\"\"A configuration object for Saint-Venant simulation.\"\"\"\n",
        "\n",
        "  def __init__(self, grid_parametrization_data, grid_size_dim_0_divisor,\n",
        "               grid_size_dim_1_divisor, difference_method, num_secs,\n",
        "               num_secs_per_cycle):\n",
        "    super(SaintVenantParams, self).__init__(grid_parametrization_data)\n",
        "    self.manning_coeff_floodplain = MANNING_COEFF_FLOODPLAIN\n",
        "    self.manning_coeff_river = MANNING_COEFF_RIVER\n",
        "    self.nx_divisor = grid_size_dim_0_divisor\n",
        "    self.ny_divisor = grid_size_dim_1_divisor\n",
        "    self.difference_method = difference_method\n",
        "    self.num_secs = num_secs\n",
        "    self.num_secs_per_cycle = num_secs_per_cycle\n",
        "    self.warmup_seconds = 3600\n",
        "\n",
        "  @property\n",
        "  def left_padding(self):\n",
        "    return self.fy - self.fy_physical\n",
        "\n",
        "  @property\n",
        "  def top_padding(self):\n",
        "    return self.fx - self.fx_physical\n",
        "\n",
        "  def _get_grid_spacing(self, core_spacing, num_cores,\n",
        "                        length) -\u003e Optional[float]:\n",
        "    return (None if core_spacing is None\n",
        "            else length / (core_spacing * num_cores + 1))\n",
        "\n",
        "  @property\n",
        "  def dx(self) -\u003e Optional[float]:\n",
        "    return self._get_grid_spacing(self.core_nx, self.cx, self.lx)\n",
        "\n",
        "  @property\n",
        "  def dy(self) -\u003e Optional[float]:\n",
        "    return self._get_grid_spacing(self.core_ny, self.cy, self.ly)\n",
        "\n",
        "  @property\n",
        "  def dz(self) -\u003e None:\n",
        "    return None\n",
        "\n",
        "  @property\n",
        "  def num_steps_per_cycle(self) -\u003e int:\n",
        "    return int(round(self.num_secs_per_cycle / self.dt)) if self.dt else 1\n",
        "\n",
        "  @property\n",
        "  def num_cycles(self) -\u003e int:\n",
        "    return int(round(self.num_secs / self.num_secs_per_cycle))\n",
        "\n",
        "  @property\n",
        "  def num_steps(self) -\u003e int:\n",
        "    return self.num_cycles * self.num_steps_per_cycle\n",
        "\n",
        "\n",
        "def get_tile_name(base_name: Text, tile_id: int) -\u003e Text:\n",
        "  \"\"\"Returns TensorMap key used to store a given tile.\"\"\"\n",
        "  return '%s_tile_%d' % (base_name, tile_id)\n",
        "\n",
        "\n",
        "def gen_field(field_name: Text, nx: int, ny: int, nz: int,\n",
        "              dtype: tf.dtypes.DType = tf.float32) -\u003e TensorMap:\n",
        "  \"\"\"Returns a dict of zero initial values.\"\"\"\n",
        "  return {field_name: tf.zeros([nz, nx, ny], dtype=dtype)}\n",
        "\n",
        "\n",
        "def get_field(state: TensorMap, field_name: Text,\n",
        "              nz: int) -\u003e List[tf.Tensor]:\n",
        "  \"\"\"Returns list of tiles from `state`.\"\"\"\n",
        "  return [state[get_tile_name(field_name, i)] for i in range(nz)]\n",
        "\n",
        "\n",
        "def split_state_in_z(state: TensorMap,\n",
        "                     state_keys: Iterable[Text],\n",
        "                     nz: int) -\u003e MutableTensorMap:\n",
        "  \"\"\"Splits state in z, assuming that z is in the first dimension.\n",
        "\n",
        "  Args:\n",
        "    state: A dictionary of keyed tuples.\n",
        "    state_keys: A list of string keys (must be present in state dictionary).\n",
        "    nz: Z-dimension length/size.\n",
        "  Returns:\n",
        "    State split in the z dimension.\n",
        "  \"\"\"\n",
        "  out_dict = {}\n",
        "  for state_key in state_keys:\n",
        "    out_dict.update({\n",
        "        get_tile_name(state_key, i): state[state_key][i, :, :]\n",
        "        for i in range(nz)\n",
        "    })\n",
        "  return out_dict\n",
        "\n",
        "\n",
        "def merge_state_in_z(state: TensorMap,\n",
        "                     state_keys: Iterable[Text],\n",
        "                     nz: int) -\u003e MutableTensorMap:\n",
        "  \"\"\"Merges state in z, assuming that z is in the first dimension.\n",
        "\n",
        "  Args:\n",
        "    state: A dictionary of keyed tuples.\n",
        "    state_keys: A list of string keys (must be present in state dictionary).\n",
        "    nz: Z-dimension length/size.\n",
        "  Returns:\n",
        "    State stacked in the z dimension.\n",
        "  \"\"\"\n",
        "  out_dict = {}\n",
        "  for state_key in state_keys:\n",
        "    out_dict.update({\n",
        "        state_key:\n",
        "        tf.stack(\n",
        "            [state[get_tile_name(state_key, i)] for i in range(nz)],\n",
        "            axis=0)\n",
        "    })\n",
        "  return out_dict\n",
        "\n",
        "\n",
        "def get_haloless_slice(replica_idx: int, num_replicas: int) -\u003e slice:\n",
        "  \"\"\"Returns a slice to be used on a tensor tile.\n",
        "\n",
        "  In particular, the slice will conditionally remove the outermost indices\n",
        "  of a given tensor in a given dimension.\n",
        "\n",
        "  Args:\n",
        "    replica_idx: The replica index in the dimension for which the slice is\n",
        "      being determined.\n",
        "    num_replicas: The number of replicas in given dimension for which the\n",
        "      slice is being determined.\n",
        "  Returns:\n",
        "    A slice corresponding to the given input parameters.\n",
        "  \"\"\"\n",
        "  def _is_first_replica():\n",
        "    return replica_idx == 0\n",
        "\n",
        "  def _is_last_replica():\n",
        "    return replica_idx == num_replicas - 1\n",
        "\n",
        "  if num_replicas == 1:\n",
        "    return slice(None, None)\n",
        "  elif _is_first_replica():\n",
        "    return slice(0, -1)\n",
        "  elif _is_last_replica():\n",
        "    return slice(1, None)\n",
        "  else:  # Interior replica.\n",
        "    return slice(1, -1)\n",
        "\n",
        "\n",
        "def flatten_weights(\n",
        "    weights: Mapping[Text, Any]) -\u003e 'collections.OrderedDict[Text, Any]':\n",
        "  \"\"\"Flattens a nested weight dictionary a dictionary keyed on a/b/c paths.\"\"\"\n",
        "  flat = collections.OrderedDict()\n",
        "  for key, value in weights.items():\n",
        "    if isinstance(value, dict):\n",
        "      for subkey, subvalue in flatten_weights(value).items():\n",
        "        flat['{}/{}'.format(key, subkey)] = subvalue\n",
        "    else:\n",
        "      flat[key] = value\n",
        "  return flat\n",
        "\n",
        "\n",
        "def unflatten_weights(\n",
        "    flattened_weights: Mapping[Text,\n",
        "                               Any]) -\u003e 'collections.OrderedDict[Text, Any]':\n",
        "  \"\"\"Unflattens a dictionary keyed on a/b/c paths to nested dictionaries.\"\"\"\n",
        "  weights = collections.OrderedDict()\n",
        "  for flat_key, value in flattened_weights.items():\n",
        "    w = weights\n",
        "    flat_keys = flat_key.split('/')\n",
        "    for key in flat_keys[:-1]:\n",
        "      if key not in w:\n",
        "        w[key] = collections.OrderedDict()\n",
        "      w = w[key]\n",
        "    w[flat_keys[-1]] = value\n",
        "  return weights\n",
        "\n",
        "\n",
        "def grid_coordinates(computation_shape: np.ndarray) -\u003e np.ndarray:\n",
        "  \"\"\"Returns a numpy array containing all grid coordinates.\n",
        "\n",
        "  Args:\n",
        "    computation_shape: A sequence of integers giving the shape of the grid.\n",
        "\n",
        "  Returns:\n",
        "    A numpy array with shape\n",
        "    (np.prod(computation_shape), len(computation_shape)) and type np.int32.\n",
        "  \"\"\"\n",
        "  rank = len(computation_shape)\n",
        "  assert rank \u003e 0\n",
        "  coords = np.meshgrid(\n",
        "      *[np.arange(x, dtype=np.int32) for x in computation_shape], indexing='ij')\n",
        "  return np.stack(coords, axis=-1).reshape(-1, rank)\n",
        "\n",
        "\n",
        "def gen_computation_stride(computation_shape: np.ndarray,\n",
        "                           tpu_mesh_shape: np.ndarray) -\u003e np.ndarray:\n",
        "  \"\"\"Generates `computation_stride` for TPU `device_assignment`.\n",
        "\n",
        "  The user-defined `computation_shape` is recast into the format of\n",
        "  `computation_stride` for TPU `device_assignment`. The recasting is based on\n",
        "  `tpu_mesh_shape`, the TPU topology `mesh_shape`, describing the shape of TPU\n",
        "  topology, a rank-1 array of size 4, and in the format of\n",
        "  `[nx, ny, nz, num_cores]` with `ni (i = x, y, z)` denoting the number of TPU\n",
        "  chips along each dimension and `num_cores` denotes the number of cores per\n",
        "  requested chip. The recasting consists of two steps: first, it counts the\n",
        "  total number of TPU cores under request; second, recasting the number of\n",
        "  requested TPU cores per dimension in the sequence of\n",
        "  `num_cores -\u003e nz -\u003e ny -\u003e nx`. Note that the recasting is based on the\n",
        "  assumption that the number of TPU cores per replica is always `1`.\n",
        "\n",
        "  Args:\n",
        "    computation_shape: A rank 1 array of size 3 representing the shape of the\n",
        "      user-defined computational grid. Each element in the grid represents the\n",
        "      requested number of processors, to be precise, TPU cores.\n",
        "    tpu_mesh_shape: The TPU topology `mesh_shape`, a rank 1 array of size 4\n",
        "      describing the shape of the TPU topology, which is in the form of `[nx,\n",
        "      ny, nz, num_cores]` with `ni (i = x, y, z)` denoting the number of TPU\n",
        "      chips along each dimension and `num_cores` denoting the number of cores\n",
        "      per requested chip. Note that `num_cores` can be `1` or `2`.\n",
        "\n",
        "  Returns:\n",
        "    The `computation_stride` for TPU `device_assignment`, a rank 1 array of size\n",
        "    `topology_rank`, describing the inter-core spacing in the TPU topology. Note\n",
        "    that `topology_rank` is always `4`.\n",
        "  Raises:\n",
        "    ValueError: If `computation_shape` does not fit the TPU topology mesh\n",
        "    shape `tpu_mesh_shape`.\n",
        "  \"\"\"\n",
        "  computation_stride = np.ones_like(tpu_mesh_shape)\n",
        "  num_cores_requested = np.prod(computation_shape)\n",
        "\n",
        "  if num_cores_requested \u003e np.prod(tpu_mesh_shape):\n",
        "    raise ValueError('Requested {} cores, whereas only {} are available from '\n",
        "                     'the topology.'.format(num_cores_requested,\n",
        "                                            np.prod(tpu_mesh_shape)))\n",
        "\n",
        "  idx = 3\n",
        "  while idx \u003e= 0:\n",
        "    div, mod = np.divmod(num_cores_requested, tpu_mesh_shape[idx])\n",
        "    if mod == 0:\n",
        "      num_cores_requested = div\n",
        "      computation_stride[idx] = tpu_mesh_shape[idx]\n",
        "    if div == 0:\n",
        "      computation_stride[idx] = mod\n",
        "      break\n",
        "    idx -= 1\n",
        "\n",
        "  if np.prod(computation_stride) \u003c np.prod(computation_shape):\n",
        "    raise ValueError('Requested computation_shape ({}, {}, {}) does not fit '\n",
        "                     'into TPU topology mesh_shape ({}, {}, {}, {}).'.format(\n",
        "                         computation_shape[0], computation_shape[1],\n",
        "                         computation_shape[2], tpu_mesh_shape[0],\n",
        "                         tpu_mesh_shape[1], tpu_mesh_shape[2],\n",
        "                         tpu_mesh_shape[3]))\n",
        "\n",
        "  return computation_stride\n",
        "\n",
        "\n",
        "def get_tpu_device_assignment(\n",
        "    computation_shape: np.ndarray, tpu_topology: tf.tpu.experimental.Topology\n",
        ") -\u003e Tuple[tf.tpu.experimental.DeviceAssignment, np.ndarray]:\n",
        "  \"\"\"Builds a DeviceAssignment that maps grid coordinates to TPU cores.\"\"\"\n",
        "  compute_core_assignment = grid_coordinates(computation_shape)\n",
        "\n",
        "  computation_stride = gen_computation_stride(computation_shape,\n",
        "                                              tpu_topology.mesh_shape)\n",
        "\n",
        "  device_assignment = tf.tpu.experimental.DeviceAssignment.build(\n",
        "      tpu_topology,\n",
        "      computation_stride=computation_stride,\n",
        "      num_replicas=np.prod(computation_stride))\n",
        "\n",
        "  return device_assignment, compute_core_assignment\n",
        "\n",
        "\n",
        "def pad_in_dim(x: np.ndarray, low_pad: int, high_pad: int, value: float,\n",
        "               axis: int) -\u003e np.ndarray:\n",
        "  padding = [(0, 0)] * x.ndim\n",
        "  padding[axis] = (low_pad, high_pad)\n",
        "  return np.pad(x, padding, mode='constant', constant_values=value)\n",
        "\n",
        "\n",
        "def slice_in_dim(x: np.ndarray, start: int, end, axis: int) -\u003e np.ndarray:\n",
        "  slices = [slice(None)] * x.ndim\n",
        "  slices[axis] = slice(start, end)\n",
        "  return x[tuple(slices)]\n",
        "\n",
        "\n",
        "def is_first_replica(replica_id: tf.Tensor, replicas: np.ndarray,\n",
        "                     replica_dim: int) -\u003e tf.Tensor:\n",
        "  \"\"\"Returns whether the given replica id is the first replica.\"\"\"\n",
        "  first_replicas = slice_in_dim(replicas, start=0, end=1, axis=replica_dim)\n",
        "  return tf.reduce_any(tf.equal(replica_id, first_replicas))\n",
        "\n",
        "\n",
        "def is_last_replica(replica_id: tf.Tensor, replicas: np.ndarray,\n",
        "                    replica_dim: int) -\u003e tf.Tensor:\n",
        "  \"\"\"Returns whether the given replica id is the last replica.\"\"\"\n",
        "  last_replicas = slice_in_dim(replicas, start=-1, end=None, axis=replica_dim)\n",
        "  return tf.reduce_any(tf.equal(replica_id, last_replicas))\n",
        "\n",
        "\n",
        "def three_d_subgrid_of_2d_border_strip(\n",
        "    border_strip: np.ndarray, params: GridParametrization,\n",
        "    coordinates: ThreeIntTuple) -\u003e np.ndarray:\n",
        "  \"\"\"Returns the subgrid of `border_strip` corresponding to `coordinates`.\n",
        "\n",
        "  All the points in the subgrid come from the supplied tensor.\n",
        "\n",
        "  Args:\n",
        "    border_strip: A 2D tensor.\n",
        "    params: A GridParametrization instance.\n",
        "    coordinates: The core coordinates of the subgrid to return.\n",
        "\n",
        "  Returns:\n",
        "    The requested subgrid.\n",
        "  \"\"\"\n",
        "  fx, fy = border_strip.shape\n",
        "\n",
        "  cxi, cyi, _ = coordinates\n",
        "  core_nx, core_ny = params.core_nx, params.core_ny\n",
        "\n",
        "  if fx == 1:\n",
        "    x_slice = slice(None)\n",
        "  else:\n",
        "    nx_start = cxi * core_nx\n",
        "    x_slice = slice(nx_start, nx_start + core_nx + 2)\n",
        "\n",
        "  if fy == 1:\n",
        "    y_slice = slice(None)\n",
        "  else:\n",
        "    ny_start = cyi * core_ny\n",
        "    y_slice = slice(ny_start, ny_start + core_ny + 2)\n",
        "  return np.expand_dims(border_strip[x_slice, y_slice], axis=0)\n",
        "\n",
        "\n",
        "def subgrid_of_2d_grid(\n",
        "    full_2d_grid: TensorOrArray,\n",
        "    params: GridParametrization,\n",
        "    coordinates: ThreeIntTuple) -\u003e TensorOrArray:\n",
        "  \"\"\"Returns the subgrid of `full_2d_grid` corresponding to `coordinates`.\n",
        "\n",
        "  Args:\n",
        "    full_2d_grid: A 2D tensor or numpy array.\n",
        "    params: A GridParametrization instance.\n",
        "    coordinates: The core coordinates of the subgrid to return.\n",
        "\n",
        "  Returns:\n",
        "    The requested subgrid.\n",
        "  \"\"\"\n",
        "  cxi, cyi, _ = coordinates\n",
        "  core_nx, core_ny = params.core_nx, params.core_ny\n",
        "\n",
        "  nx_start = cxi * core_nx\n",
        "  x_slice = slice(nx_start, nx_start + core_nx + 2)\n",
        "\n",
        "  ny_start = cyi * core_ny\n",
        "  y_slice = slice(ny_start, ny_start + core_ny + 2)\n",
        "\n",
        "  return full_2d_grid[x_slice, y_slice]\n",
        "\n",
        "\n",
        "def three_d_subgrid_of_2d_grid(\n",
        "    full_2d_grid: TensorOrArray, params: GridParametrization,\n",
        "    coordinates: ThreeIntTuple) -\u003e TensorOrArray:\n",
        "  \"\"\"Same as `subgrid_of_2d_grid`, but with an added trivial third dimension.\"\"\"\n",
        "  sub_grid = subgrid_of_2d_grid(full_2d_grid, params, coordinates)\n",
        "  expand_dims = (tf.expand_dims if isinstance(full_2d_grid, tf.Tensor)\n",
        "                 else np.expand_dims)\n",
        "  return expand_dims(sub_grid, axis=0)\n",
        "\n",
        "\n",
        "class InitFilesManager():\n",
        "  \"\"\"Manages initial input temp files.\n",
        "\n",
        "  This class manages input files to address an initial state values scalability\n",
        "  issue. Large-sized initial values can create large nodes in the TF graph. The\n",
        "  issue is exacerbated if `n` TPU cores are used, as the large nodes are folded\n",
        "  into the graph `n` times. The size of the graph can readily exceed the\n",
        "  protobuffer 2 GB limit (the TF graph is sent via a protobuf to the TPUs).\n",
        "\n",
        "  When using this class, the states should be initialized with TF ops\n",
        "  (e.g. `tf.zeros`) as they do not require data space in the graph. A CPU host\n",
        "  should use this class to write initial numpy arrays to disk as `TensorProto`s.\n",
        "  The files can be read in an initial simulation `step` run on each host, and\n",
        "  the host and TPU stores are updated with the initial values. A function can be\n",
        "  provided on initialization of this class in case a per-TPU subgrid is required\n",
        "  rather than the full grid.\n",
        "\n",
        "  A directory that the program can access for writing must be specified.\n",
        "  \"\"\"\n",
        "\n",
        "  def __init__(self,\n",
        "               params: GridParametrization,\n",
        "               extract_subgrid_fn: ExtractSubgridFn,\n",
        "               directory: Optional[Text] = None,\n",
        "               full_grid_keys: Optional[List[Text]] = None,\n",
        "               keep_files: bool = False,\n",
        "               num_splits_dim_1: int = 1):\n",
        "    \"\"\"Initializes a temp init files manager.\n",
        "\n",
        "    Args:\n",
        "      params: The `GridParametrization` instance.\n",
        "      extract_subgrid_fn: A `Callable` that extracts a subgrid from the full\n",
        "        grid. The extracted subgrid is written to disk, then read from disk to\n",
        "        update a corresponding `state` on a TPU core.\n",
        "      directory: Optional directory specifying where to write the init files. If\n",
        "        not set, tempfile is used. This should be set to a remote directory\n",
        "        if running with a headless TPU worker.\n",
        "      full_grid_keys: A list of keys specifying the fields that should not be\n",
        "        split, so the full field is loaded into each TPU core. For example, the\n",
        "        sensitivity map of magnetic resonance imaging antennas/coils must not\n",
        "        have any partitioning because it is the same across all the TPU cores.\n",
        "      keep_files: If `True`, the written files are not deleted.\n",
        "      num_splits_dim_1: The number of splits of fields in dim 1. Useful e.g. in\n",
        "        the case\n",
        "        of e.g. 1 core where there is otherwise no splitting.\n",
        "    \"\"\"\n",
        "    self._params = params\n",
        "    self._extract_subgrid_fn = extract_subgrid_fn\n",
        "    self._full_grid_keys = tuple() if not full_grid_keys else full_grid_keys\n",
        "    self._keep_files = keep_files\n",
        "    self._num_splits_dim_1 = num_splits_dim_1\n",
        "\n",
        "    self._subgrid_format = '{}-split-{}-cxyz-{}-{}-{}'\n",
        "    self._files = {}\n",
        "    self._full_grids = {}\n",
        "    self._num_deleted = 0\n",
        "\n",
        "    if not directory:\n",
        "      self._directory = tempfile.TemporaryDirectory().name\n",
        "      tf.io.gfile.makedirs(self._directory)\n",
        "    else:\n",
        "      self._directory = directory\n",
        "\n",
        "  def write_file(self, key: Text, np_array: np.ndarray) -\u003e None:\n",
        "    \"\"\"Writes the arrays or subarrays to files on disk.\"\"\"\n",
        "    if key in self._full_grid_keys:\n",
        "      # Full grid.\n",
        "      file_name = os.path.join(self._directory, key + SER_EXTENSION)\n",
        "      with tf.io.gfile.GFile(file_name, 'wb') as f:\n",
        "        f.write(tf.make_tensor_proto(np_array).SerializeToString())\n",
        "      self._files[key] = InputFiles(file_name=file_name, dtype=np_array.dtype)\n",
        "    else:\n",
        "      # Subgrids.\n",
        "      for cxi in range(self._params.cx):\n",
        "        for cyi in range(self._params.cy):\n",
        "          for czi in range(self._params.cz):\n",
        "            np_subgrid = self._extract_subgrid_fn(np_array,\n",
        "                                                  self._params,\n",
        "                                                  (cxi, cyi, czi))\n",
        "            slices = [slice(None)] * np_subgrid.ndim\n",
        "            n_split_dim_1 = np_subgrid.shape[1] // self._num_splits_dim_1\n",
        "            for n in range(self._num_splits_dim_1):\n",
        "              subgrid_key = self._subgrid_format.format(key, n, cxi, cyi, czi)\n",
        "              file_name = os.path.join(self._directory,\n",
        "                                       subgrid_key + SER_EXTENSION)\n",
        "              slice_hi = (None if n == self._num_splits_dim_1 - 1\n",
        "                          else (n + 1) * n_split_dim_1)\n",
        "              slices[1] = slice(n * n_split_dim_1, slice_hi)\n",
        "              np_subgrid_split_in_dim_1 = np_subgrid[tuple(slices)]\n",
        "              with tf.io.gfile.GFile(file_name, 'wb') as f:\n",
        "                f.write(tf.make_tensor_proto(\n",
        "                    np_subgrid_split_in_dim_1).SerializeToString())\n",
        "              self._files[subgrid_key] = (\n",
        "                  InputFiles(file_name=file_name, dtype=np_array.dtype))\n",
        "\n",
        "  def write_all_files(self, inputs: Dict[Text, np.ndarray]) -\u003e None:\n",
        "    \"\"\"Writes all inputs to files on disk.\"\"\"\n",
        "    for key in inputs:\n",
        "      self.write_file(key, inputs[key])\n",
        "\n",
        "  def read_file(\n",
        "      self, key: Text, coordinates: Optional[ThreeIntTuple] = None,\n",
        "      dtype: Optional[tf.dtypes.DType] = None) -\u003e Optional[tf.Tensor]:\n",
        "    \"\"\"Reads a file with `key` from the disk.\n",
        "\n",
        "    Args:\n",
        "      key: The state name of type `Text` corresponding to a file that has been\n",
        "        written.\n",
        "      coordinates: The coordinates of the TPU core. Optional. Only needed if key\n",
        "        is not a full grid key.\n",
        "      dtype: Optional. If set, the returned tensor is cast to this type.\n",
        "    Returns:\n",
        "      The `Tensor` obtained from reading a file. Returns `None` if there was no\n",
        "      file written corresponding to `key`.\n",
        "    \"\"\"\n",
        "    if key in self._full_grid_keys:\n",
        "      # Full grid.\n",
        "      if key not in self._files:\n",
        "        return None\n",
        "      file_name = self._files[key].file_name\n",
        "      file_dtype = self._files[key].dtype\n",
        "      if key in self._full_grids:\n",
        "        field = self._full_grids[key]\n",
        "      else:\n",
        "        field = tf.parse_tensor(tf.read_file(file_name), file_dtype)\n",
        "        if (self._params.cx * self._params.cy * self._params.cz) \u003e 1:\n",
        "          self._full_grids[key] = field\n",
        "    else:\n",
        "      # Subgrids.\n",
        "      for n in range(self._num_splits_dim_1):\n",
        "        key_i = self._subgrid_format.format(key, n, *coordinates)\n",
        "\n",
        "        if key_i not in self._files:\n",
        "          return None\n",
        "\n",
        "        file_name = self._files[key_i].file_name\n",
        "        file_dtype = self._files[key_i].dtype\n",
        "\n",
        "        if n == 0:\n",
        "          field = tf.parse_tensor(tf.read_file(file_name), file_dtype)\n",
        "        else:\n",
        "          field = tf.concat([field, tf.parse_tensor(\n",
        "              tf.read_file(file_name), file_dtype)], axis=1)\n",
        "\n",
        "    if dtype is not None:\n",
        "      field = tf.cast(field, dtype)\n",
        "\n",
        "    return field\n",
        "\n",
        "  def __del__(self):\n",
        "    if not self._keep_files:\n",
        "      for key in self._files:\n",
        "        file_name = self._files[key].file_name\n",
        "        tf.io.gfile.remove(file_name)\n",
        "\n",
        "\n",
        "class VariableStore:\n",
        "  \"\"\"Class that stores a state snapshot in TF variables for persistence.\"\"\"\n",
        "\n",
        "  def __init__(self, weights: Mapping[Text, Union[tf.Tensor,\n",
        "                                                  Mapping[Text, tf.Tensor]]]):\n",
        "    self.variables = collections.OrderedDict()\n",
        "    flat_weights = flatten_weights(weights)\n",
        "    for key, initializer in flat_weights.items():\n",
        "      self.variables[key] = tf.get_variable(\n",
        "          key, initializer=initializer, use_resource=True)\n",
        "\n",
        "  def read(self) -\u003e Mapping[Text, Union[tf.Tensor, Mapping[Text, tf.Tensor]]]:\n",
        "    return unflatten_weights(\n",
        "        {key: var.read_value() for key, var in self.variables.items()})\n",
        "\n",
        "  def write(\n",
        "      self, weights: Mapping[Text, Union[tf.Tensor, Mapping[Text, tf.Tensor]]]\n",
        "  ) -\u003e List[tf.Tensor]:\n",
        "    updates = []\n",
        "    for key, value in flatten_weights(weights).items():\n",
        "      updates.append(self.variables[key].assign(value))\n",
        "    return updates\n",
        "\n",
        "\n",
        "class TPUDistributor:\n",
        "  \"\"\"TPU stateful function distributor.\n",
        "\n",
        "  Keeps a persistent copy of the variable state mirrored across each TPU device.\n",
        "  \"\"\"\n",
        "\n",
        "  WrappedInitFn = Callable[[int], TensorMap]  # pylint: disable=invalid-name\n",
        "\n",
        "  def __init__(self,\n",
        "               tpu_device_assignment: tf.tpu.experimental.DeviceAssignment,\n",
        "               init_fn: WrappedInitFn,\n",
        "               host_vars_filter: Optional[Sequence[Text]] = None) -\u003e None:\n",
        "    \"\"\"Constructor.\n",
        "\n",
        "    Args:\n",
        "      tpu_device_assignment: A TPU DeviceAssignment.\n",
        "      init_fn: A function with signature int -\u003e state initializer, that maps a\n",
        "        replica number to its state initializer.\n",
        "      host_vars_filter: A list of the TPU variable keys. Only variables in this\n",
        "        list will have a mirror host variable and corresponding ops generated\n",
        "        for syncing between the host and tpu. If None, every TPU variable will\n",
        "        get a mirror host variable. If it is an empty list, then no mirror host\n",
        "        variables will be created.\n",
        "    \"\"\"\n",
        "    self.device_assignment = tpu_device_assignment\n",
        "    self.tpu_variable_stores = []\n",
        "    self.host_variable_stores = []\n",
        "    tf.enable_resource_variables()\n",
        "\n",
        "    for replica_id in range(self.device_assignment.num_replicas):\n",
        "      tpu_device = self.device_assignment.tpu_device(replica=replica_id)\n",
        "      host_device = self.device_assignment.host_device(replica=replica_id)\n",
        "      init_params = {}\n",
        "      host_init_params = {}\n",
        "      # Set up initialzation for the host mirror variables.\n",
        "      with tf.device(host_device):\n",
        "        # Since we use the same variable names across different replicas and\n",
        "        # across TPU and host CPU, we need to put them under different\n",
        "        # variable_scope to distinguish them. Without it, they will be\n",
        "        # considered as duplicated variables and graph validation will fail.\n",
        "        with tf.variable_scope('replica_%d_host' % replica_id,\n",
        "                               use_resource=True):\n",
        "          # **** Important ******\n",
        "          # This call must be placed within the scope of host_device such that\n",
        "          # the ops related to initialzation are placed on that host device.\n",
        "          # Otherwise, they will all be defaulted to a single host which can\n",
        "          # cause the host to go OOM during initializtion.\n",
        "          init_params = init_fn(replica_id)\n",
        "          # Set up host vars for all variables.\n",
        "          if host_vars_filter is None:\n",
        "            host_init_params = init_params\n",
        "          else:\n",
        "            for k, v in init_params.items():\n",
        "              if k in host_vars_filter:\n",
        "                host_init_params.update({k: v})\n",
        "          self.host_variable_stores.append(VariableStore(host_init_params))\n",
        "\n",
        "      # Set up initialization for the TPU variables.\n",
        "      with tf.device(tpu_device):\n",
        "        with tf.variable_scope('replica_%d' % replica_id, use_resource=True):\n",
        "          self.tpu_variable_stores.append(VariableStore(init_params))\n",
        "\n",
        "  def get_step_builder(\n",
        "      self, fn: Callable[[Sequence[tf.Tensor], TensorMap, MutableTensorMap],\n",
        "                         FnOutput],\n",
        "      replica_inputs: Sequence[Sequence[tf.Tensor]]) -\u003e StepBuilder:\n",
        "    \"\"\"Returns a builder for running a replicated function.\n",
        "\n",
        "    Args:\n",
        "      fn: A function specifying the computations to be replicated across the\n",
        "        TPU cores. It must have the signature:\n",
        "\n",
        "          fn(inputs, params) -\u003e (output, params)\n",
        "\n",
        "        where\n",
        "          `inputs` is a sequence of `tf.Tensor` representing a set of inputs.\n",
        "          `params` is a dictionary of `tf.Tensor` that comes from the\n",
        "            variables used to represent the persistent state of the simulation.\n",
        "            It is required that the keys in this map are not changed during\n",
        "            the computation/transformation defined by `fn`. The change of the\n",
        "            values of `params` defined by `fn` represents the evolution of the\n",
        "            state of a step. Note that the shapes of these tensors also have to\n",
        "            remain unchanged.\n",
        "          `output` is a list of `tf.Tensor` that represents the output to be\n",
        "            consumed outside the TPU core at the end of a step.\n",
        "\n",
        "        Within the context of `fn` (which is distributed to a particular\n",
        "        `replica_id` TPU core) the values of `inputs` are\n",
        "        `replica_inputs[replica_id]`. The values of `params` are mapped to the\n",
        "        values of the variable placed on that core; and `output` from this\n",
        "        `fn` will be aggregated with the `output` from the other cores into a\n",
        "        list of list of `tf.Tensor`.\n",
        "      replica_inputs: A sequence of sequence of `tf.Tensor` representing the\n",
        "        inputs to be distributed to the TPU core replicas. More specifically,\n",
        "        it has `num_of_replicas` elements, where each element has\n",
        "        `num_of_inputs` `tf.Tensor`.\n",
        "    Returns:\n",
        "      A callable that takes no arguments and returns a `StepOutput`.\n",
        "    Raises:\n",
        "      ValueError: If the length of `replica_inputs` does not match the number of\n",
        "        replicas.\n",
        "    \"\"\"\n",
        "    def build_step(fn, replica_inputs):\n",
        "      \"\"\"Replicates the function `fn` to tpu cores.\"\"\"\n",
        "      if replica_inputs is None:\n",
        "        replica_inputs = [[]] * self.device_assignment.num_replicas\n",
        "      if len(replica_inputs) != self.device_assignment.num_replicas:\n",
        "        raise ValueError(\n",
        "            'The length of `replica_inputs`: {} does not match '\n",
        "            '`num_replicas`: {}.'.format(len(replica_inputs),\n",
        "                                         self.device_assignment.num_replica))\n",
        "      keyed_dtypes = [(key, var.dtype) for (key, var) in\n",
        "                      self.tpu_variable_stores[0].variables.items()]\n",
        "      outputs_prototype = []\n",
        "\n",
        "      def device_fn(*args):\n",
        "        \"\"\"Device-side step function.\"\"\"\n",
        "        handles = args[:len(keyed_dtypes)]\n",
        "        # Note that although the code here should be able to handle more\n",
        "        # complicated structures such as a list of list, for now we limit the\n",
        "        # use to just a list (through type annotation).\n",
        "        inputs = tf.nest.pack_sequence_as(\n",
        "            replica_inputs[0],\n",
        "            args[len(keyed_dtypes):(len(args))])\n",
        "        flat_params = collections.OrderedDict(\n",
        "            [(key, gen_resource_variable_ops.read_variable_op(handle, dtype))\n",
        "             for ((key, dtype), handle) in zip(keyed_dtypes, handles)])\n",
        "        param_keys = flat_params.keys()\n",
        "        params = unflatten_weights(flat_params)\n",
        "        outputs, params = fn(inputs, params)\n",
        "        flat_params = flatten_weights(params)\n",
        "        if flat_params.keys() != param_keys:\n",
        "          raise ValueError(\n",
        "              'Input parameters and output parameters have different keys ({} '\n",
        "              'vs {})'.format(flat_params.keys(), param_keys))\n",
        "\n",
        "        outputs_prototype.append(outputs)\n",
        "        updates = [\n",
        "            gen_resource_variable_ops.assign_variable_op(handle, param)\n",
        "            for (handle, param) in zip(handles, flat_params.values())\n",
        "        ]\n",
        "        with tf.control_dependencies(updates):\n",
        "          # Return an extra dummy output to make sure the updates happen.\n",
        "          return [tf.constant(0, dtype=tf.int32)] + tf.nest.flatten(outputs)\n",
        "\n",
        "      def get_variable_handles(variable_stores):\n",
        "        return [\n",
        "            variable.handle for variable in variable_stores.variables.values()]\n",
        "\n",
        "      replica_handles = [get_variable_handles(variable_stores)\n",
        "                         for variable_stores in self.tpu_variable_stores]\n",
        "      inputs = [\n",
        "          handles + tf.nest.flatten(inputs)\n",
        "          for (handles, inputs) in zip(replica_handles, replica_inputs)\n",
        "      ]\n",
        "      outputs = tf.tpu.replicate(\n",
        "          device_fn, inputs=inputs, device_assignment=self.device_assignment)\n",
        "      return [\n",
        "          tf.nest.pack_sequence_as(outputs_prototype[0], replica[1:])\n",
        "          for replica in outputs\n",
        "      ]\n",
        "\n",
        "    return functools.partial(build_step, fn, replica_inputs)\n",
        "\n",
        "\n",
        "def download_from_bucket(bucket_filename: Text) -\u003e Text:\n",
        "  \"\"\"Returns a local filename for the bucket file.\n",
        "\n",
        "  The bucket file is stored on Google Cloud and is copied to a temporary file\n",
        "  location.\n",
        "\n",
        "  Args:\n",
        "    bucket_filename: The name of the bucket file.\n",
        "  \"\"\"\n",
        "\n",
        "  client = storage.Client(project=PROJECT_ID)\n",
        "  bucket = client.get_bucket(BUCKET)\n",
        "  blob = bucket.get_blob(bucket_filename)\n",
        "  assert blob is not None, f'ERROR: Can\\'t load {bucket_filename}.'\n",
        "\n",
        "  tempf = tempfile.NamedTemporaryFile(delete=False)\n",
        "  blob.download_to_filename(tempf.name)\n",
        "  return tempf.name\n",
        "\n",
        "\n",
        "def get_dem_tiff_filename(resolution: int) -\u003e Text:\n",
        "  \"\"\"Returns the local filename of the DEM tiff.\n",
        "\n",
        "  The file is stored on Google Cloud and is copied to a temporary file location\n",
        "  for the tifffile reader.\n",
        "\n",
        "  Args:\n",
        "    resolution: resolution of the DEM.\n",
        "  Returns:\n",
        "    File path to local temporary DEM.\n",
        "  \"\"\"\n",
        "\n",
        "  dem_tiff_filename = f'gs://gresearch/flood-sim/{resolution}m.tiff'\n",
        "  tempf = tempfile.NamedTemporaryFile(delete=False)\n",
        "  with tf.io.gfile.GFile(dem_tiff_filename, 'rb') as f:\n",
        "    file = f.read()\n",
        "  with open(tempf.name, 'wb') as f:\n",
        "    f.write(file)\n",
        "  return tempf.name\n",
        "\n",
        "\n",
        "def load_dem_from_tiff_file(dem_tiff_filename: Text) -\u003e np.ndarray:\n",
        "  \"\"\"Loads DEM as a numpy array.\n",
        "\n",
        "  Args:\n",
        "    dem_tiff_filename: The name of the DEM tif file.\n",
        "\n",
        "  Returns:\n",
        "    A 2D np.ndarray digital elevation model.\n",
        "  \"\"\"\n",
        "  if PUBLIC_COLAB:\n",
        "    return gdal.Open(dem_tiff_filename).ReadAsArray()  # pytype: disable=name-error\n",
        "\n",
        "\n",
        "def add_padding(original_2d_field: np.ndarray,\n",
        "                left_padding: int,\n",
        "                top_padding: int,\n",
        "                constant: float = 0) -\u003e np.ndarray:\n",
        "  \"\"\"Adds padding on the left and/or top of a 2D `np.ndarray` field.\"\"\"\n",
        "  fx_orig, fy_orig = original_2d_field.shape\n",
        "  if left_padding + top_padding == 0:\n",
        "    return original_2d_field\n",
        "  left_pad = np.ones((fx_orig, left_padding), dtype=np.float32) * constant\n",
        "  padded_field = np.concatenate((left_pad, original_2d_field), axis=1)\n",
        "  top_pad = np.ones((top_padding, fy_orig + left_padding), dtype=np.float32)\n",
        "  top_pad *= constant\n",
        "  return np.concatenate((top_pad, padded_field), axis=0)\n",
        "\n",
        "\n",
        "def get_manning_matrix_from_river_mask(\n",
        "    river_mask: np.ndarray, manning_coeff_river: float,\n",
        "    manning_coeff_floodplain: float) -\u003e np.ndarray:\n",
        "  \"\"\"Gets a Manning matrix from a river mask.\"\"\"\n",
        "  manning_river = river_mask * np.float32(manning_coeff_river**2)\n",
        "  manning_floodplain = (\n",
        "      np.logical_not(river_mask) * np.float32(manning_coeff_floodplain**2))\n",
        "  return manning_river + manning_floodplain\n",
        "\n",
        "\n",
        "class InitFnBuilder(metaclass=abc.ABCMeta):\n",
        "  \"\"\"An abstract class to build initial values for a simulation.\"\"\"\n",
        "\n",
        "  def __init__(self,\n",
        "               params: SaintVenantParams,\n",
        "               init_state_keys: Iterable[Text],\n",
        "               start_time_secs: float = 0):\n",
        "    self._params = params\n",
        "    self._init_state_keys = init_state_keys\n",
        "    self._start_time_secs = start_time_secs\n",
        "\n",
        "  def init_fn(self, replica_id: int,\n",
        "              coordinates: ThreeIntTuple) -\u003e KeyedInitialValues:\n",
        "    \"\"\"Initializes each state in a Saint-Venant simulation.\n",
        "\n",
        "    Initializes the following fields using `tf.zeros()`:\n",
        "      `init_state_keys`, `_T` and `_DT`.\n",
        "\n",
        "    `_fill_additional_fields()` is called to override these values and/or\n",
        "    initialize other fields.\n",
        "\n",
        "    Args:\n",
        "      replica_id: The TPU replica id.\n",
        "      coordinates: The corresponding TPU replica coordinates.\n",
        "\n",
        "    Returns:\n",
        "      A mapping of state key to a 3D `Tensor` of initial values.\n",
        "    \"\"\"\n",
        "    output = {'replica_id': replica_id}\n",
        "\n",
        "    for key in self._init_state_keys:\n",
        "      output.update(gen_field(key, self._params.nx, self._params.ny, nz=1))\n",
        "\n",
        "    output.update(gen_field(_T, nx=1, ny=1, nz=1))\n",
        "    output[_T] += self._start_time_secs\n",
        "    output.update(gen_field(_DT, nx=1, ny=1, nz=1))\n",
        "\n",
        "    self._fill_additional_fields(output, coordinates)\n",
        "    return output\n",
        "\n",
        "  @abc.abstractmethod\n",
        "  def _fill_additional_fields(self, output: MutableKeyedInitialValues,\n",
        "                              coordinates: ThreeIntTuple) -\u003e None:\n",
        "    raise NotImplementedError('Calling an abstract method.')\n",
        "\n",
        "\n",
        "class SaintVenantRealisticInitFnBuilder(InitFnBuilder):\n",
        "  \"\"\"An `InitFnBuilder` for a realistic Saint-Venant simulation.\"\"\"\n",
        "\n",
        "  def __init__(\n",
        "      self,\n",
        "      params: SaintVenantParams,\n",
        "      init_state_keys: Iterable[Text],\n",
        "      unpadded_dem: np.ndarray,\n",
        "      unpadded_manning_matrix: np.ndarray,\n",
        "      inflow_bcs: Sequence[InflowBoundaryCondition],\n",
        "      outflow_bcs: Sequence[OutflowBoundaryCondition],\n",
        "      init_files_manager: InitFilesManager,\n",
        "      input_file_format: Optional[Text] = None,\n",
        "      start_time_secs: float = 0):\n",
        "    super(SaintVenantRealisticInitFnBuilder, self).__init__(\n",
        "        params, init_state_keys, start_time_secs)\n",
        "    self._unpadded_dem = unpadded_dem\n",
        "    self._unpadded_manning_matrix = unpadded_manning_matrix\n",
        "    self._inflow_bcs = inflow_bcs\n",
        "    self._outflow_bcs = outflow_bcs\n",
        "    self._input_file_format = input_file_format\n",
        "\n",
        "    self._maybe_read_init_states_from_disk()\n",
        "    self._create_static_additional_states()\n",
        "\n",
        "    # Write the initial states and the static additional states to disk in\n",
        "    # serialized TensorProto format.\n",
        "    for key, state in self._init_states.items():\n",
        "      init_files_manager.write_file(key, state)\n",
        "    for key, state in self._additional_states.items():\n",
        "      init_files_manager.write_file(key, state)\n",
        "\n",
        "  def _fill_additional_fields(self, output: MutableKeyedInitialValues,\n",
        "                              coordinates: ThreeIntTuple) -\u003e None:\n",
        "    \"\"\"Initializes additional fields with `tf.zeros()`.\"\"\"\n",
        "\n",
        "    for key in self._additional_states:\n",
        "      output.update(gen_field(key, self._params.nx, self._params.ny, nz=1))\n",
        "\n",
        "    types = (tf.float32, tf.float32, bool)\n",
        "    for key, typ in zip(L_BOUNDARIES + R_BOUNDARIES, types * 2):\n",
        "      output.update(gen_field(key, self._params.nx, ny=1, nz=1, dtype=typ))\n",
        "    for key, typ in zip(T_BOUNDARIES + B_BOUNDARIES, types * 2):\n",
        "      output.update(gen_field(key, nx=1, ny=self._params.ny, nz=1, dtype=typ))\n",
        "\n",
        "    for key, state in self._boundary_states.items():\n",
        "      output[key] = three_d_subgrid_of_2d_border_strip(\n",
        "          state, self._params, coordinates)\n",
        "\n",
        "  def _maybe_read_init_states_from_disk(self):\n",
        "    \"\"\"If `input_file_format` is provided, use it to load the init files.\"\"\"\n",
        "    self._init_states = {}\n",
        "\n",
        "    if not self._input_file_format:\n",
        "      return\n",
        "\n",
        "    for key in self._init_state_keys:\n",
        "      if PUBLIC_COLAB:\n",
        "        file_name = download_from_bucket(self._input_file_format.format(key))\n",
        "\n",
        "      with tf.io.gfile.GFile(file_name, 'rb') as npy_file:\n",
        "        self._init_states[key] = add_padding(\n",
        "            np.load(npy_file), self._params.left_padding,\n",
        "            self._params.top_padding)\n",
        "\n",
        "  def _create_static_additional_states(self):\n",
        "    \"\"\"Creates the additional (static) fields from unpadded data.\"\"\"\n",
        "    dem = add_padding(self._unpadded_dem, self._params.left_padding,\n",
        "                      self._params.top_padding)\n",
        "\n",
        "    manning_matrix = add_padding(\n",
        "        self._unpadded_manning_matrix, self._params.left_padding,\n",
        "        self._params.top_padding)\n",
        "\n",
        "    self._additional_states = {\n",
        "        _E: dem,\n",
        "        _M: manning_matrix\n",
        "    }\n",
        "\n",
        "    side_to_keys = {\n",
        "        BoundarySide.LEFT: L_BOUNDARIES,\n",
        "        BoundarySide.RIGHT: R_BOUNDARIES,\n",
        "        BoundarySide.TOP: T_BOUNDARIES,\n",
        "        BoundarySide.BOTTOM: B_BOUNDARIES\n",
        "    }\n",
        "\n",
        "    self._boundary_states = {}\n",
        "    for i_bc in self._inflow_bcs:\n",
        "      i_key = side_to_keys[i_bc.boundary_side][0]\n",
        "      self._boundary_states[i_key] = i_bc.padded_inflow_flux\n",
        "\n",
        "    for o_bc in self._outflow_bcs:\n",
        "      m_key = side_to_keys[o_bc.boundary_side][1]\n",
        "      self._boundary_states[m_key] = o_bc.full_manning_slice\n",
        "      o_key = side_to_keys[o_bc.boundary_side][2]\n",
        "      self._boundary_states[o_key] = o_bc.padded_outflow_mask\n",
        "\n",
        "\n",
        "def _make_slice_kernel(u, stencil, axis, offset):\n",
        "  \"\"\"Creates a slice-based finite difference operator.\"\"\"\n",
        "  kernel = stencil[offset] * u\n",
        "  if axis == 'x':\n",
        "    for i in range(offset):\n",
        "      kernel += stencil[i] * tf.pad(u[:i - offset, :],\n",
        "                                    [[offset - i, 0], [0, 0]])\n",
        "    for i in range(offset + 1, len(stencil)):\n",
        "      kernel += stencil[i] * tf.pad(u[i - offset:, :],\n",
        "                                    [[0, i - offset], [0, 0]])\n",
        "  elif axis == 'y':\n",
        "    for i in range(offset):\n",
        "      kernel += stencil[i] * tf.pad(u[:, :i - offset],\n",
        "                                    [[0, 0], [offset - i, 0]])\n",
        "    for i in range(offset + 1, len(stencil)):\n",
        "      kernel += stencil[i] * tf.pad(u[:, i - offset:],\n",
        "                                    [[0, 0], [0, i - offset]])\n",
        "  else:\n",
        "    raise ValueError(\"axis must be either 'x' or 'y', not %d.\"% (axis,))\n",
        "  return kernel\n",
        "\n",
        "\n",
        "def _slice_kernel_dict() -\u003e Dict[Text, Callable[[tf.Tensor], tf.Tensor]]:\n",
        "  \"\"\"Generates a dictionary of slice-based finite-difference operators.\"\"\"\n",
        "  def kdx_plus(u):\n",
        "    return _make_slice_kernel(u, [-1., 1.], axis='x', offset=0)\n",
        "\n",
        "  def kdy_plus(u):\n",
        "    return _make_slice_kernel(u, [-1., 1.], axis='y', offset=0)\n",
        "\n",
        "  return {\n",
        "      'kdx+': kdx_plus,\n",
        "      'kdy+': kdy_plus,\n",
        "  }\n",
        "\n",
        "\n",
        "def _apply_slice_op(tiles: Iterable[tf.Tensor],\n",
        "                    op: tf.Operation) -\u003e List[tf.Tensor]:\n",
        "  \"\"\"Helper to apply a slice op.\"\"\"\n",
        "  return [op(tile) for tile in tiles]\n",
        "\n",
        "\n",
        "class ApplyKernelOp:\n",
        "  \"\"\"Applies a kernel op using slicing.\"\"\"\n",
        "\n",
        "  def _get_kernel(self, name: Text) -\u003e Callable[[tf.Tensor], tf.Tensor]:\n",
        "    if name not in self._kernels.keys():\n",
        "      raise ValueError('Invalid Kernel name requested.')\n",
        "    return self._kernels[name]\n",
        "\n",
        "  def __init__(self) -\u003e None:\n",
        "    \"\"\"Initializes kernels of slice-based finite-difference operators.\"\"\"\n",
        "    self._kernels = _slice_kernel_dict()\n",
        "\n",
        "  def apply_kernel_op_x(self, tiles: Iterable[tf.Tensor],\n",
        "                        name: Text) -\u003e List[tf.Tensor]:\n",
        "    return _apply_slice_op(tiles, self._get_kernel(name))\n",
        "\n",
        "  def apply_kernel_op_y(self, tiles: Iterable[tf.Tensor],\n",
        "                        name: Text) -\u003e List[tf.Tensor]:\n",
        "    return _apply_slice_op(tiles, self._get_kernel(name))\n",
        "\n",
        "\n",
        "def _saint_venant_step(kernel_op: ApplyKernelOp,\n",
        "                       dt: tf.Tensor,\n",
        "                       dx: float,\n",
        "                       dy: float,\n",
        "                       is_first,\n",
        "                       manning_matrix,\n",
        "                       bed_elevation,\n",
        "                       absolute_height,\n",
        "                       q_x,\n",
        "                       q_y,\n",
        "                       left_padding: int = 0,\n",
        "                       top_padding: int = 0):\n",
        "  r\"\"\"Saint-Venant update step.\n",
        "\n",
        "  For ease of notation, define:\n",
        "    h(t) := absolute_height\n",
        "    m := manning_matrix\n",
        "    e := bed_elevation\n",
        "  Performs the following finite difference update:\n",
        "    q_x(t + dt) =\n",
        "        (q_x(t) - _G * dt / dx * h(t) * \\partial(h(t) + e) / \\partial x) /\n",
        "        (1 + dt * m * q_x(t) / h(t)**(7/3))\n",
        "    q_y(t + dt) = [similar definition]\n",
        "\n",
        "  Args:\n",
        "    kernel_op: An ApplyKernelOp instance to use in computing the step update.\n",
        "    dt: Time step (in seconds). Scalar float.\n",
        "    dx: Grid spacing in x (in meters). Scalar float.\n",
        "    dy: Grid spacing in y (in meters). Scalar float.\n",
        "    is_first: A sequence of two boolean `tf.Tensor`s specifying whether dim 0\n",
        "      and dim 1 is first in the computational domain, respectively.\n",
        "    manning_matrix: [Static] Manning-friction coefficient matrix.\n",
        "      List of 2D tensors (float32).\n",
        "    bed_elevation: [Static] Elevation map (height) over [x, y] grid.\n",
        "      List of 2D tensors (float32).\n",
        "    absolute_height: [Dynamic] Absolute Water height over [x, y] grid.\n",
        "      List of 2D tensors (float32).\n",
        "    q_x: [Dynamic] Water flow in the x direction over [x, y] grid.\n",
        "      List of 2D tensors (float32).\n",
        "    q_y: [Dynamic] Water flow in the y direction over [x, y] grid.\n",
        "      List of 2D tensors (float32).\n",
        "    left_padding: The amount of left padding.\n",
        "    top_padding: The amount of top padding.\n",
        "\n",
        "  Returns:\n",
        "    (h, q_x, q_y): Tuple of updated states, each being a list of 2D\n",
        "      Tensors (float32).\n",
        "  \"\"\"\n",
        "  manning_matrix = manning_matrix[0]\n",
        "  e = bed_elevation[0]\n",
        "  h = absolute_height[0]\n",
        "  q_x = q_x[0]\n",
        "  q_y = q_y[0]\n",
        "\n",
        "  water_surface = h + e\n",
        "\n",
        "  water_depth = (\n",
        "      tf.maximum(water_surface[1:, :], water_surface[:-1, :]) -\n",
        "      tf.maximum(e[1:, :], e[:-1, :]))\n",
        "  water_depth = tf.clip_by_value(water_depth, SAINT_VENANT_EPS, water_depth)\n",
        "\n",
        "  q_y_flux = q_y[:, :-1]\n",
        "  q_y_flux = q_y_flux[:, 1:] + q_y_flux[:, :-1]\n",
        "  q_y_flux = q_y_flux[1:, :] + q_y_flux[:-1, :]\n",
        "\n",
        "  # If there is padding, the column (or, below, row) of the padded region\n",
        "  # adjacent to the physical region needs to be zeroed out to reproduce the\n",
        "  # no-padding behavior of the `tf.pad` that follows.\n",
        "  if left_padding \u003e 0:\n",
        "    width = q_y_flux.shape.as_list()[1]\n",
        "    maybe_zeros_column_vector = tf.concat([\n",
        "        tf.ones([left_padding]) *\n",
        "        tf.cast(tf.logical_not(is_first[1]), tf.float32),\n",
        "        tf.ones([width - left_padding])\n",
        "    ],\n",
        "                                          axis=0)\n",
        "    q_y_flux *= maybe_zeros_column_vector[tf.newaxis, :]\n",
        "\n",
        "  q_x_flux = q_x[:-1, :]\n",
        "  flux_norm = tf.sqrt(\n",
        "      tf.square(q_x_flux) + tf.square(tf.pad(q_y_flux, [[0, 0], [1, 1]]) * .25))\n",
        "\n",
        "  d_water_surface_x = kernel_op.apply_kernel_op_x([water_surface],\n",
        "                                                  'kdx+')[0][:-1, :]\n",
        "  mm = _G * manning_matrix[:-1, :]\n",
        "\n",
        "  q_x_next = ((q_x_flux - _G * dt *\n",
        "               (1 / dx) * water_depth * d_water_surface_x) /\n",
        "              (1 + dt * mm * flux_norm * tf.pow(water_depth, -7. / 3)))\n",
        "\n",
        "  q_x_next = tf.where(water_depth \u003c 2 * SAINT_VENANT_EPS,\n",
        "                      tf.zeros_like(q_x_next), q_x_next)\n",
        "\n",
        "  water_depth = (\n",
        "      tf.maximum(water_surface[:, 1:], water_surface[:, :-1]) -\n",
        "      tf.maximum(e[:, 1:], e[:, :-1]))\n",
        "  water_depth = tf.clip_by_value(water_depth, SAINT_VENANT_EPS, water_depth)\n",
        "\n",
        "  q_x_flux = q_x_flux[1:, :] + q_x_flux[:-1, :]\n",
        "  q_x_flux = q_x_flux[:, 1:] + q_x_flux[:, :-1]\n",
        "  q_y_flux = q_y[:, :-1]\n",
        "\n",
        "  if top_padding \u003e 0:\n",
        "    height = q_x_flux.shape.as_list()[0]\n",
        "    maybe_zeros_row_vector = tf.concat([\n",
        "        tf.ones([top_padding]) *\n",
        "        tf.cast(tf.logical_not(is_first[0]), tf.float32),\n",
        "        tf.ones([height - top_padding])\n",
        "    ],\n",
        "                                       axis=0)\n",
        "    q_x_flux *= maybe_zeros_row_vector[:, tf.newaxis]\n",
        "\n",
        "  flux_norm = tf.sqrt(\n",
        "      tf.square(q_y_flux) + tf.square(tf.pad(q_x_flux, [[1, 1], [0, 0]]) * .25))\n",
        "\n",
        "  d_water_surface_y = kernel_op.apply_kernel_op_y([water_surface],\n",
        "                                                  'kdy+')[0][:, :-1]\n",
        "\n",
        "  mm = _G * manning_matrix[:, :-1]\n",
        "\n",
        "  q_y_next = ((q_y_flux - _G * dt *\n",
        "               (1 / dy) * water_depth * d_water_surface_y) /\n",
        "              (1 + dt * mm * flux_norm * tf.pow(water_depth, -7. / 3)))\n",
        "\n",
        "  q_y_next = tf.where(water_depth \u003c 2 * SAINT_VENANT_EPS,\n",
        "                      tf.zeros_like(q_y_next), q_y_next)\n",
        "\n",
        "  if left_padding \u003e 0:\n",
        "    maybe_zeros_column_vector = tf.concat(\n",
        "        [maybe_zeros_column_vector, tf.ones([1])], axis=0)\n",
        "    q_y_next *= maybe_zeros_column_vector[tf.newaxis, :]\n",
        "\n",
        "  if top_padding \u003e 0:\n",
        "    maybe_zeros_row_vector = tf.concat(\n",
        "        [maybe_zeros_row_vector, tf.ones([1])], axis=0)\n",
        "    q_x_next *= maybe_zeros_row_vector[:, tf.newaxis]\n",
        "\n",
        "  flux = ((tf.pad(q_x_next, [[1, 0], [0, 0]]) -\n",
        "           tf.pad(q_x_next, [[0, 1], [0, 0]])) / dx +\n",
        "          (tf.pad(q_y_next, [[0, 0], [1, 0]]) -\n",
        "           tf.pad(q_y_next, [[0, 0], [0, 1]])) / dy)\n",
        "  h_next = h + dt * flux\n",
        "\n",
        "  q_x_next = tf.pad(q_x_next, [[0, 1], [0, 0]])\n",
        "  q_y_next = tf.pad(q_y_next, [[0, 0], [0, 1]])\n",
        "  return [h_next], [q_x_next], [q_y_next]\n",
        "\n",
        "\n",
        "def _do_halo_exchange(left_padding: int, top_padding: int,\n",
        "                      replica_id: tf.Tensor, replicas: np.ndarray, fields, bcs):\n",
        "  r\"\"\"Does halo exchange on a tuple of fields.\n",
        "\n",
        "  Args:\n",
        "    left_padding: The left padding.\n",
        "    top_padding: The top padding.\n",
        "    replica_id: The replica id as a `tf.Tensor`.\n",
        "    replicas: A numpy array that maps core coordinates to replica id numbers.\n",
        "    fields: A tuple of `tf.Tensor` fields.\n",
        "    bcs: A sequence of boundary condition specs, one corresponding to each\n",
        "      field. See `inplace_halo_exchange` for details.\n",
        "\n",
        "  Returns:\n",
        "    The fields after a halo exchange and/or applying boundary conditions.\n",
        "  \"\"\"\n",
        "  halo_dims = (0, 1)\n",
        "  replica_dims = (0, 1)\n",
        "\n",
        "  return_fields = []\n",
        "\n",
        "  for field, bc in zip(fields, bcs):\n",
        "    return_fields.append(\n",
        "        inplace_halo_exchange(field, halo_dims, replica_id, replicas,\n",
        "                              replica_dims, bc, left_padding, top_padding))\n",
        "\n",
        "  return tuple(return_fields)\n",
        "\n",
        "\n",
        "def approx_normal_depth_flux(cross_section_stage: TensorOrArray,\n",
        "                             resolution: float, slope: float,\n",
        "                             manning: TensorOrArray) -\u003e TensorOrArray:\n",
        "  \"\"\"Compute a local approximation of flux using Manning's equation.\n",
        "\n",
        "  To compute the flux locally at each cell, the hydraulic radius is approximated\n",
        "  by the water depth. This approximation is coherent with flux_with_inertia.py,\n",
        "  that approximates the hydraulic radius in the shallow water equation by water\n",
        "  depth.\n",
        "\n",
        "  Args:\n",
        "    cross_section_stage: Water depth at each cell of the cross section.\n",
        "    resolution: The size of each cell in meters.\n",
        "    slope: Slope of the channel.\n",
        "    manning: The Manning matrix.\n",
        "\n",
        "  Returns:\n",
        "    Flux at each cell of the cross section in cms (m**3 / s) at normal flow\n",
        "    conditions.\n",
        "  \"\"\"\n",
        "  use_tf = isinstance(cross_section_stage, tf.Tensor)\n",
        "  clip = tf.clip_by_value if use_tf else np.clip\n",
        "  power = tf.pow if use_tf else np.power\n",
        "  sqrt = tf.sqrt if use_tf else np.sqrt\n",
        "  cross_section_stage = clip(cross_section_stage, 0.0, np.inf)\n",
        "  return (resolution * power(cross_section_stage, 5 / 3) *\n",
        "          np.sqrt(abs(slope)) * np.sign(slope) / sqrt(manning))\n",
        "\n",
        "\n",
        "class SaintVenantRiverChannelStep:\n",
        "  \"\"\"Defines model function for the Saint Venant simulation.\"\"\"\n",
        "\n",
        "  def __init__(self,\n",
        "               kernel_op,\n",
        "               params,\n",
        "               inflow_bcs,\n",
        "               outflow_bcs):\n",
        "    self._kernel_op = kernel_op\n",
        "    self._resolution = params.dx\n",
        "    self._inflow_bcs = inflow_bcs\n",
        "    self._outflow_bcs = outflow_bcs\n",
        "    self._dt = params.dt\n",
        "    self._top_padding = params.top_padding\n",
        "    self._left_padding = params.left_padding\n",
        "    self._params = params\n",
        "\n",
        "  def _add_inflow_bc_summands(self, t, i_lrtb, bc_summands):\n",
        "    \"\"\"Adds inflow `h` boundary conditions.\"\"\"\n",
        "    warmup_seconds = self._params.warmup_seconds\n",
        "    if warmup_seconds == 0:\n",
        "      warmup_fraction = 1\n",
        "    else:\n",
        "      warmup_fraction = tf.minimum(t / warmup_seconds, 1)\n",
        "    for inflow_bc in self._inflow_bcs:\n",
        "      bc_summands[inflow_bc.boundary_side] += (\n",
        "          warmup_fraction * i_lrtb[inflow_bc.boundary_side])\n",
        "\n",
        "  def _add_outflow_bc_summands(self, h, m_lrtb, o_lrtb, bc_summands):\n",
        "    \"\"\"Adds outflow `h` boundary conditions.\"\"\"\n",
        "    for outflow_bc in self._outflow_bcs:\n",
        "      side = outflow_bc.boundary_side\n",
        "      print('Calculating approximate normal depth flux for %s '\n",
        "            '(slope=%f, resolution=%f).' %\n",
        "            (side, outflow_bc.slope, self._resolution))\n",
        "      masked_height_slice = h[0][outflow_bc.padded_full_slice] * o_lrtb[side]\n",
        "      flux = approx_normal_depth_flux(\n",
        "          masked_height_slice, self._resolution, outflow_bc.slope, m_lrtb[side])\n",
        "      bc_summands[outflow_bc.boundary_side] -= (\n",
        "          flux  * self._dt / self._resolution**2)\n",
        "\n",
        "  def update_states(self, h, q_x, q_y, t, m, e, h_bcs, q_bcs, dt, replica_id,\n",
        "                    replicas, is_first):\n",
        "    \"\"\"Updates states by doing halo exchanges and a time step.\"\"\"\n",
        "    # Note that for the very first call to `update_states`, the halos are\n",
        "    # (assumed to be) correct due to initialization. In subsequent calls, the\n",
        "    # `h` halos are correct by virtue of the deferred `_do_halo_exchange` call\n",
        "    # following the step update.\n",
        "    dt_scalar = dt[0][0]\n",
        "    # pylint: disable=unbalanced-tuple-unpacking\n",
        "    q_x, q_y = _do_halo_exchange(self._params.left_padding,\n",
        "                                 self._params.top_padding, replica_id, replicas,\n",
        "                                 (q_x, q_y), (q_bcs, q_bcs))\n",
        "    h, q_x, q_y = _saint_venant_step(self._kernel_op, dt_scalar,\n",
        "                                     self._params.dx, self._params.dy, is_first,\n",
        "                                     m, e, h, q_x, q_y,\n",
        "                                     self._params.left_padding,\n",
        "                                     self._params.top_padding)\n",
        "    h, = _do_halo_exchange(self._params.left_padding, self._params.top_padding,\n",
        "                           replica_id, replicas, (h,), (h_bcs,))\n",
        "    return {\n",
        "        'h': h,\n",
        "        'q_x': q_x,\n",
        "        'q_y': q_y,\n",
        "        't': [ti + dt_scalar for ti in t],\n",
        "        'dt': dt\n",
        "    }\n",
        "\n",
        "  def _convert_bc_summands_to_bcs(self, bc_summands):\n",
        "    \"\"\"Converts `bc_summands` values into `BoundaryConditionSpec`.\"\"\"\n",
        "    def float_or_list(side):\n",
        "      val = bc_summands[side]\n",
        "      return [[val]] if isinstance(val, tf.Tensor) else float(val)\n",
        "\n",
        "    return [[(BCType.ADDITIVE, float_or_list(BoundarySide.TOP)),\n",
        "             (BCType.ADDITIVE, float_or_list(BoundarySide.BOTTOM))],\n",
        "            [(BCType.ADDITIVE, float_or_list(BoundarySide.LEFT)),\n",
        "             (BCType.ADDITIVE, float_or_list(BoundarySide.RIGHT))]]\n",
        "\n",
        "  def step(self, replica_id: tf.Tensor, replicas: np.ndarray,\n",
        "           step_id: tf.Tensor, states, additional_states):\n",
        "    \"\"\"The model time step function.\"\"\"\n",
        "    del step_id\n",
        "\n",
        "    h = states[_H]\n",
        "    q_x = states[_Q_X]\n",
        "    q_y = states[_Q_Y]\n",
        "    t = states[_T]\n",
        "    dt = states[_DT]\n",
        "    m = additional_states[_M]\n",
        "    e = additional_states[_E]\n",
        "\n",
        "    i_lrtb = {\n",
        "        BoundarySide.LEFT: additional_states[_I_L_BOUNDARY][0],\n",
        "        BoundarySide.RIGHT: additional_states[_I_R_BOUNDARY][0],\n",
        "        BoundarySide.TOP: additional_states[_I_T_BOUNDARY][0],\n",
        "        BoundarySide.BOTTOM: additional_states[_I_B_BOUNDARY][0]\n",
        "    }\n",
        "    m_lrtb = {\n",
        "        BoundarySide.LEFT: additional_states[_M_L_BOUNDARY][0],\n",
        "        BoundarySide.RIGHT: additional_states[_M_R_BOUNDARY][0],\n",
        "        BoundarySide.TOP: additional_states[_M_T_BOUNDARY][0],\n",
        "        BoundarySide.BOTTOM: additional_states[_M_B_BOUNDARY][0]\n",
        "    }\n",
        "    o_lrtb = {\n",
        "        BoundarySide.LEFT: additional_states[_O_L_BOUNDARY][0],\n",
        "        BoundarySide.RIGHT: additional_states[_O_R_BOUNDARY][0],\n",
        "        BoundarySide.TOP: additional_states[_O_T_BOUNDARY][0],\n",
        "        BoundarySide.BOTTOM: additional_states[_O_B_BOUNDARY][0]\n",
        "    }\n",
        "\n",
        "    bc_summands = {BoundarySide.LEFT: 0, BoundarySide.RIGHT: 0,\n",
        "                   BoundarySide.TOP: 0, BoundarySide.BOTTOM: 0}\n",
        "    self._add_inflow_bc_summands(t[0], i_lrtb, bc_summands)\n",
        "    self._add_outflow_bc_summands(h, m_lrtb, o_lrtb, bc_summands)\n",
        "\n",
        "    h_bcs = self._convert_bc_summands_to_bcs(bc_summands)\n",
        "    q_bcs = [[(BCType.NO_TOUCH, 0.0)] * 2] * 2\n",
        "    is_first = []\n",
        "    for dim in (0, 1):\n",
        "      is_first.append(is_first_replica(replica_id, replicas, dim))\n",
        "\n",
        "    return self.update_states(h, q_x, q_y, t, m, e, h_bcs, q_bcs, dt,\n",
        "                              replica_id, replicas, is_first)\n",
        "\n",
        "\n",
        "class TPUSimulation:\n",
        "  \"\"\"A class that manages a simulation distributed across TPU cores.\"\"\"\n",
        "\n",
        "  def __init__(self,\n",
        "               init_fn,\n",
        "               step_fn,\n",
        "               computation_shape: np.ndarray,\n",
        "               tpu_topology: tf.Tensor,\n",
        "               host_vars_filter=None) -\u003e None:\n",
        "    self._step_fn = step_fn\n",
        "    self._computation_shape = computation_shape\n",
        "    (self._device_assignment,\n",
        "     self._compute_core_assignment) = get_tpu_device_assignment(\n",
        "         computation_shape, tpu_topology)\n",
        "\n",
        "    def init_wrapper(replica_id: int):\n",
        "      coordinates = self._compute_core_assignment[replica_id, :]\n",
        "      return init_fn(replica_id, coordinates)\n",
        "\n",
        "    self._distributor = TPUDistributor(\n",
        "        self._device_assignment, init_wrapper, host_vars_filter)\n",
        "\n",
        "    # Numpy array mapping grid coordinates to replica numbers.\n",
        "    self._replicas = np.full(self._computation_shape, -1, dtype=np.int32)\n",
        "    for replica_id in range(self.num_replicas):\n",
        "      coordinates = self._compute_core_assignment[replica_id, :]\n",
        "      self._replicas[tuple(coordinates)] = replica_id\n",
        "\n",
        "  @property\n",
        "  def replicas(self) -\u003e np.ndarray:\n",
        "    return self._replicas\n",
        "\n",
        "  @property\n",
        "  def device_assignment(self) -\u003e tf.tpu.experimental.DeviceAssignment:\n",
        "    return self._device_assignment\n",
        "\n",
        "  def step_builder(self, replica_inputs=None):\n",
        "\n",
        "    def step_wrapper(inputs, state):\n",
        "      return self._step_fn(inputs, state, self.replicas)\n",
        "\n",
        "    return self._distributor.get_step_builder(\n",
        "        step_wrapper, replica_inputs=replica_inputs)\n",
        "\n",
        "  def tpu_variable_store(self, replica_id: int) -\u003e VariableStore:\n",
        "    return self._distributor.tpu_variable_stores[replica_id]\n",
        "\n",
        "  def host_variable_store(self, replica_id: int) -\u003e VariableStore:\n",
        "    return self._distributor.host_variable_stores[replica_id]\n",
        "\n",
        "  @property\n",
        "  def num_replicas(self) -\u003e int:\n",
        "    return self._device_assignment.num_replicas\n",
        "\n",
        "  def coordinates(self, replica_id: int) -\u003e np.ndarray:\n",
        "    return self._compute_core_assignment[replica_id, :]\n",
        "\n",
        "\n",
        "class DynamicStepUpdater:\n",
        "  \"\"\"Steps the simulation forward using a dynamically calculated dt.\"\"\"\n",
        "\n",
        "  def __init__(self, model_fn, num_secs_per_while_loop, dt):\n",
        "    self._model_fn = model_fn\n",
        "    self._num_secs_per_while_loop = tf.convert_to_tensor(\n",
        "        num_secs_per_while_loop, dtype=tf.float32)\n",
        "    self._dt = dt\n",
        "\n",
        "  def step(self, replica_id: tf.Tensor, replicas: np.ndarray,\n",
        "           step_id: tf.Tensor, state_keys, states, additional_states):\n",
        "    \"\"\"Advance simulation by a step.\"\"\"\n",
        "    del state_keys\n",
        "    elapsed_time_0 = states['t'][0][0][0]\n",
        "    stop_time = elapsed_time_0 + self._num_secs_per_while_loop\n",
        "    i0 = tf.constant(0)\n",
        "\n",
        "    def condition(elapsed_time, states, i):\n",
        "      del states, i\n",
        "      return tf.less(elapsed_time + TIMESTEP_EPS, stop_time)\n",
        "\n",
        "    def body(elapsed_time, states, i):\n",
        "      \"\"\"Advances the simulation by one step.\"\"\"\n",
        "      new_dt = tf.minimum(self._dt, stop_time - elapsed_time)\n",
        "      states['dt'] = [tf.reshape(new_dt, (1, 1))]\n",
        "\n",
        "      updated_states = self._model_fn.step(replica_id, replicas, step_id + i,\n",
        "                                           states, additional_states)\n",
        "\n",
        "      return (updated_states['t'][0][0][0], updated_states, i + 1)\n",
        "\n",
        "    _, (output_states), _ = tf.while_loop(\n",
        "        condition,\n",
        "        body,\n",
        "        loop_vars=(elapsed_time_0, states, i0),\n",
        "        return_same_structure=True,\n",
        "        back_prop=False)\n",
        "\n",
        "    return output_states\n",
        "\n",
        "\n",
        "def build_step_fn(params, updater, state_keys, additional_state_keys):\n",
        "  \"\"\"Returns a function that does a one-step update.\"\"\"\n",
        "\n",
        "  def step_fn(inputs, state, replicas: np.ndarray):\n",
        "    \"\"\"Wraps a method that calls a step fn update.\"\"\"\n",
        "    nz = params.nz\n",
        "\n",
        "    step_id = inputs[0]\n",
        "    replica_id = state['replica_id']\n",
        "\n",
        "    split_state = split_state_in_z(state, state_keys, nz)\n",
        "    keyed_split_states = collections.OrderedDict([\n",
        "        (key, get_field(split_state, key, nz)) for key in state_keys\n",
        "    ])\n",
        "\n",
        "    additional_split_state = split_state_in_z(\n",
        "        state, additional_state_keys, nz)\n",
        "    keyed_additional_split_states = collections.OrderedDict([\n",
        "        (key, get_field(additional_split_state, key, nz))\n",
        "        for key in additional_state_keys\n",
        "    ])\n",
        "\n",
        "    keyed_additional_vars = keyed_additional_split_states\n",
        "    output_states = updater.step(replica_id, replicas, step_id, state_keys,\n",
        "                                 keyed_split_states, keyed_additional_vars)\n",
        "\n",
        "    for key, states in output_states.items():\n",
        "      for i, enumerated_state in enumerate(states):\n",
        "        split_state[get_tile_name(key, i)] = enumerated_state\n",
        "\n",
        "    output_split_state_keys = list(output_states.keys())\n",
        "    state.update(\n",
        "        merge_state_in_z(split_state, output_split_state_keys, nz))\n",
        "    outputs = [tf.constant(0)]  # Dummy output to avoid pruning computation.\n",
        "    return outputs, state\n",
        "\n",
        "  return step_fn\n",
        "\n",
        "\n",
        "class Equalizer:\n",
        "  \"\"\"Logic for filling a cross section with water keeping constant surface.\n",
        "\n",
        "  The Equalizer that disregards the layout of the cross section -\n",
        "  conceptually re-arranging the cells of the original cross section to form\n",
        "  a bi-modal* array.\n",
        "\n",
        "  * A bi-modal array, in this context, is an array which satisfies the following\n",
        "  condition: There exists i, 0 \u003c= i \u003c len(array), such that array[:i] is sorted\n",
        "  in decreasing order and array[i:] is sorted in increasing order.\n",
        "  \"\"\"\n",
        "\n",
        "  def __init__(self, heights: Sequence[float], cell_area: float = 1):\n",
        "    \"\"\"Initializer.\n",
        "\n",
        "    Args:\n",
        "      heights: Sequence of floats (or convertible to float).\n",
        "        Defines the elevation of each cell along the cross section.\n",
        "        NaN values are accepted and will be ignored by all the computations.\n",
        "      cell_area: A float. Optional. The horizontal area of the cell.\n",
        "    \"\"\"\n",
        "    self._heights_shape = len(heights)\n",
        "    self._non_nan_height_idxs = sorted(np.flatnonzero(~np.isnan(heights)))\n",
        "    self._non_nan_heights = np.array(heights)[self._non_nan_height_idxs]\n",
        "    self._cell_area = cell_area\n",
        "    self._compute_marks()\n",
        "\n",
        "  def _compute_marks(self):\n",
        "    \"\"\"Computes the amount of water needed to reach each cell.\n",
        "\n",
        "    The amount of water to reach a cell as function of its elevation is\n",
        "    monotonic. This method sorts the cells by elevation, and computes the\n",
        "    amounts of water needed to reach each cell in \"iterative\" fashion -- at each\n",
        "    step it computes the amount of water needed to reach from the current water\n",
        "    surface to the next lowest dry cell.\n",
        "\n",
        "    A by-product of such computation is that for any given volume it is easy to\n",
        "    know which cells exactly are going to be wet, by finding the index of the\n",
        "    volume in the self._marks sorted array. All cells below that index will be\n",
        "    wet.\n",
        "    \"\"\"\n",
        "    self._sorted_args = np.argsort(self._non_nan_heights)\n",
        "    self._sorted_heights = self._non_nan_heights[self._sorted_args]\n",
        "    self._marks = np.concatenate([\n",
        "        [0],\n",
        "        np.cumsum(np.diff(self._sorted_heights) *\n",
        "                  np.arange(1, len(self._non_nan_heights)))\n",
        "    ])\n",
        "\n",
        "  def fill(self, volume: float):\n",
        "    \"\"\"Returns the stage at each cell that will sum up to volume.\n",
        "\n",
        "    All wet pixels will have same water surface.\n",
        "    All dry pixels are higher than that surface.\n",
        "\n",
        "    Args:\n",
        "      volume: A float. The volume of water that the cross section should\n",
        "        contain.\n",
        "\n",
        "    Returns:\n",
        "      1-D np.array of same shape as self._heights. Each cell represents the\n",
        "      water stage at the corresponding cross section cell.\n",
        "    \"\"\"\n",
        "    normalized_volume = volume / self._cell_area\n",
        "    num_wet_cells = np.searchsorted(self._marks, normalized_volume)\n",
        "    if num_wet_cells == 0:\n",
        "      return np.zeros(self._heights_shape, dtype=float)\n",
        "    # The amount of water needed to raise all lower pixels to the height\n",
        "    # of base surface.\n",
        "    base_volume = self._marks[num_wet_cells - 1]\n",
        "    # The residue volume that will spread equally between all wet pixels.\n",
        "    residue_volume = normalized_volume - base_volume\n",
        "    residue_stage = residue_volume / num_wet_cells\n",
        "    # The elevation of the hightest pixel that is going to get wet.\n",
        "    # See _compute_marks for more info.\n",
        "    base_water_elevation = self._sorted_heights[num_wet_cells - 1]\n",
        "    water_elevation = base_water_elevation + residue_stage\n",
        "    return self.level_at(water_elevation)\n",
        "\n",
        "  def level_at(self, height: float):\n",
        "    \"\"\"Returns the stage at each cell, so that water levels up at [height].\n",
        "\n",
        "    Args:\n",
        "      height: A double. The height of the desired water surface.\n",
        "\n",
        "    Returns:\n",
        "      1-D np.array of same shape as self._heights. Each cell represents the\n",
        "      water stage at the corresponding cross section cell.\n",
        "\n",
        "    Raises:\n",
        "      RuntimeError: In case at least one of the boundary pixels get wet.\n",
        "    \"\"\"\n",
        "    stage = np.zeros(self._heights_shape, dtype=float)\n",
        "    stage[self._non_nan_height_idxs] = np.clip(height - self._non_nan_heights,\n",
        "                                               0, None)\n",
        "    if (stage[self._non_nan_height_idxs[0]] \u003e SAINT_VENANT_EPS or\n",
        "        stage[self._non_nan_height_idxs[-1]] \u003e SAINT_VENANT_EPS):\n",
        "      raise RuntimeError('Water reaches the boundaries of the cross section. '\n",
        "                         'This may lead to unexpected behavior.')\n",
        "    return stage\n",
        "\n",
        "\n",
        "class _Node(\n",
        "    attr.make_class(\n",
        "        '_Node', {\n",
        "            'left': attr.ib(default=None),\n",
        "            'right': attr.ib(default=None),\n",
        "            'value': attr.ib(default=None)\n",
        "        })):\n",
        "  \"\"\"A node in a binary tree.\"\"\"\n",
        "\n",
        "  def debug_str(self, transform=lambda x: x, prefix='', depth=0):\n",
        "    s = '{}{}\\n'.format('-\u003e' * depth + prefix, transform(self.value))\n",
        "    if self.left:\n",
        "      s += self.left.debug_str(transform, 'L: ', depth + 1)\n",
        "    if self.right:\n",
        "      s += self.right.debug_str(transform, 'R: ', depth + 1)\n",
        "    return s\n",
        "\n",
        "\n",
        "def _build_max_tree(arr: Sequence[float]) -\u003e Optional[_Node]:\n",
        "  r\"\"\"Returns a binary max tree representation of an array.\n",
        "\n",
        "  A binary max tree representation of an array is a tree of indices of the array\n",
        "  and is defined as follows:\n",
        "  1. Its root is the argmax of the array.\n",
        "  2. Its left node is the binary max tree representation of arr[:argmax]\n",
        "  3. Its right node is the binary max tree representation of arr[argmax + 1:]\n",
        "\n",
        "  For example, the binary tree represenatation of [3, 0, 7, 5, 6, 4] is:\n",
        "       2\n",
        "    /     \\\n",
        "   0      4\n",
        "   \\     / \\\n",
        "    1   3   5\n",
        "\n",
        "\n",
        "  Note, will return None in case arr is empty.\n",
        "\n",
        "  Args:\n",
        "    arr: Which will be represented as a binary max tree.\n",
        "  Returns:\n",
        "    `_Node` handle to the binary max tree representation.\n",
        "  \"\"\"\n",
        "  # The tree is built in an iterative fashion. At iteration i, we have a binary\n",
        "  # max tree representation of `arr[:i]`.\n",
        "  #\n",
        "  # To add a new element to the tree, we note that the rightmost element of\n",
        "  # the array is also the rightmost element in the tree, i.e. if we repeat the\n",
        "  # operation:\n",
        "  #   node = root; while node.right: node = node.right;\n",
        "  # we'll arrive at the rightmost element of the array.\n",
        "  #\n",
        "  # Hence appending a new element to the tree from the right is done by scanning\n",
        "  # along the right branch of the tree, and finding the corresponding place for\n",
        "  # the new element. Every node below that place becomes the left sub-tree of\n",
        "  # the new element.\n",
        "  if len(arr) == 0:  # pylint: disable=g-explicit-length-test\n",
        "    return None\n",
        "  right_branch = [_Node(left=None, right=None, value=0)]\n",
        "  for i in range(1, len(arr)):\n",
        "    v = arr[i]\n",
        "    last_node = None\n",
        "    while (right_branch and arr[right_branch[-1].value] \u003c v):\n",
        "      last_node = right_branch.pop()\n",
        "    cur = _Node(left=last_node, right=None, value=i)\n",
        "    if right_branch:\n",
        "      right_branch[-1].right = cur\n",
        "    right_branch.append(cur)\n",
        "  return right_branch[0]\n",
        "\n",
        "\n",
        "class _EqualizerEntry(\n",
        "    attr.make_class(\n",
        "        '_EqualizerEntry', {\n",
        "            'lbound': attr.ib(type=int),\n",
        "            'rbound': attr.ib(type=int),\n",
        "            'capacity': attr.ib(type=float),\n",
        "            'equalizer': attr.ib(type=Equalizer)\n",
        "        })):\n",
        "  \"\"\"An entry of equalizer in a binary tree.\n",
        "\n",
        "  Each entry is responsible of equalizing water within [lbound, rbound), and\n",
        "  corresponds to the case where there is enough water to fill all the sub-cross\n",
        "  sections (in other words, the volume of water is larger then the sum of the\n",
        "  capacities of the children nodes). In such case the water is at the same level\n",
        "  across [lbound, rbound). The capacity of each entry is the maximal amount of\n",
        "  water the cross section can contain without overflowing the left or right\n",
        "  boundaries.\n",
        "  \"\"\"\n",
        "\n",
        "\n",
        "class HierarchicalEqualizer(metaclass=abc.ABCMeta):\n",
        "  \"\"\"Equalizer that takes into account the layout of the cross section.\n",
        "\n",
        "  HierarchicalEqualizer separates the cross section into a hierarchy of cross\n",
        "  sections, so that water can't 'magically' travel over hills, but rather has to\n",
        "  overtop the next peak to reach the adjacent cross section.\n",
        "\n",
        "  Every cross section interacts with its adjacent cross section as follows.\n",
        "  Once the water at the current cross section reaches the lower\n",
        "  boundary, it spills over to the cross section beyond that boundary.\n",
        "  In case the adjacent cross section is full to capacity as well, they\n",
        "  continue to fill up jointly until one of the unshared peaks is reached.\n",
        "  \"\"\"\n",
        "\n",
        "  @abc.abstractmethod\n",
        "  def fill(self,\n",
        "           volume: float,\n",
        "           relative_location: int = 0,\n",
        "           stage: Sequence[float] = None) -\u003e Sequence[float]:\n",
        "    \"\"\"Fill water to the cross section at the given location.\n",
        "\n",
        "    Args:\n",
        "      volume: The volume of water to fill in.\n",
        "      relative_location: The index at which to start pouring water. The index\n",
        "        is relative to the cross-section: it must be in the range\n",
        "        [0, len(stage) - 1].\n",
        "      stage: The current amount of water along the cross section.\n",
        "\n",
        "    Returns:\n",
        "      The stage after distributing the volume of water along the cross section.\n",
        "    \"\"\"\n",
        "    raise NotImplementedError()\n",
        "\n",
        "\n",
        "class _HierarchicalEqualizer(HierarchicalEqualizer):\n",
        "  \"\"\"The implementation of HierarchicalEqualizer.\"\"\"\n",
        "\n",
        "  def __init__(self, heights: Sequence[float], cell_area: float,\n",
        "               equalizers_tree: _EqualizerEntry):\n",
        "    \"\"\"Initializer.\n",
        "\n",
        "    The object should be usually initialized by invoking\n",
        "    'hierarchical_equalizer`. This initializer should be used only in case of\n",
        "    initialization with custom `equalizers_tree`.\n",
        "\n",
        "    Args:\n",
        "      heights: Cell elevations along some cross sections.\n",
        "      cell_area: The area of each cell in the cross section.\n",
        "      equalizers_tree: The tree describing the separation of the original cross\n",
        "        section into sub-cross sections.\n",
        "    \"\"\"\n",
        "    super(_HierarchicalEqualizer, self).__init__()\n",
        "    self._heights = heights\n",
        "    self._cell_area = cell_area\n",
        "    self._equalizers_tree = equalizers_tree\n",
        "\n",
        "  def _fill_recursively(self, equalizers_subtree: _Node, volume: float,\n",
        "                        relative_location: int,\n",
        "                        stage: MutableSequence[float]) -\u003e float:\n",
        "    \"\"\"Recursively fill water into the cross section.\n",
        "\n",
        "    The operation will travel to the leaf of the equalizers tree that contains\n",
        "    the requested location, and will use the equalizer to fill water in that\n",
        "    cross section. In case capacity is reached, it will fill the residue water\n",
        "    into the adjacent cross section. In case capacity there is reached as well,\n",
        "    it will fill the residue into the parent cross section. It continues\n",
        "    recursively until all the requested volume is distributed along the cross\n",
        "    section.\n",
        "\n",
        "    Args:\n",
        "      equalizers_subtree: The root node of the equalizers sub-tree.\n",
        "      volume: The amount of volume to distribute.\n",
        "      relative_location: The location at which to pour water, in the range\n",
        "        [0, len(stage) - 1].\n",
        "      stage: The current stage along the cross section.\n",
        "\n",
        "    Returns:\n",
        "      The amount of water that was filled into the current sub-tree.\n",
        "    \"\"\"\n",
        "    if volume \u003c= SAINT_VENANT_EPS:\n",
        "      return 0\n",
        "    start_volume = volume\n",
        "    lnode = equalizers_subtree.left\n",
        "    rnode = equalizers_subtree.right\n",
        "    lbound = equalizers_subtree.value.lbound\n",
        "    rbound = equalizers_subtree.value.rbound\n",
        "    current_volume = np.sum(stage[lbound:rbound])\n",
        "    lnode_capacity = 0 if lnode is None else lnode.value.capacity\n",
        "    rnode_capacity = 0 if rnode is None else rnode.value.capacity\n",
        "\n",
        "    # Handle the case when both left and right subtrees are full to capacity.\n",
        "    if current_volume + volume \u003e lnode_capacity + rnode_capacity:\n",
        "      remaining_capacity = equalizers_subtree.value.capacity - current_volume\n",
        "      this_volume = min(remaining_capacity, volume)\n",
        "      stage[lbound:rbound] = equalizers_subtree.value.equalizer.fill(\n",
        "          current_volume + this_volume)\n",
        "      return this_volume\n",
        "\n",
        "    if lnode and lnode.value.lbound \u003c= relative_location \u003c lnode.value.rbound:\n",
        "      volume -= self._fill_recursively(lnode, volume, relative_location, stage)\n",
        "      # Spill over to the adjacent cross section on the right.\n",
        "      volume -= self._fill_recursively(rnode, volume, lnode.value.rbound, stage)\n",
        "    elif rnode and rnode.value.lbound \u003c= relative_location \u003c rnode.value.rbound:\n",
        "      volume -= self._fill_recursively(rnode, volume, relative_location, stage)\n",
        "      # Spill over to the adjacent cross section on the left.\n",
        "      volume -= self._fill_recursively(lnode, volume, rnode.value.lbound - 1,\n",
        "                                       stage)\n",
        "    return start_volume - volume\n",
        "\n",
        "  def fill(self,\n",
        "           volume: float,\n",
        "           relative_location: int = 0,\n",
        "           stage: Sequence[float] = None) -\u003e Sequence[float]:\n",
        "    \"\"\"See the documentation at `HierarchicalEqualizer.fill`.\"\"\"\n",
        "    if not 0 \u003c= relative_location \u003c= len(self._heights) - 1:\n",
        "      raise RuntimeError('Relative location is outside of the cross section.')\n",
        "    stage_copy = np.zeros_like(self._heights)\n",
        "    if stage is not None:\n",
        "      stage_copy[1:-1] = stage\n",
        "    # Increment location by 1, to take into account the one cell padding in\n",
        "    # self._heights.\n",
        "    self._fill_recursively(self._equalizers_tree, volume / self._cell_area,\n",
        "                           relative_location + 1, stage_copy)\n",
        "    return stage_copy[1:-1]\n",
        "\n",
        "\n",
        "def _local_maximums(arr: Sequence[float]) -\u003e np.ndarray:\n",
        "  \"\"\"Returns the position of all local maximums within an array.\"\"\"\n",
        "  clipped = np.concatenate([[arr[0]], arr, [arr[-1]]])\n",
        "  loose_cond = np.logical_and(clipped[1:-1] \u003e= clipped[:-2],\n",
        "                              clipped[1:-1] \u003e= clipped[2:])\n",
        "  eq = (clipped[:-1] == clipped[1:])\n",
        "  strict_cond = np.logical_or(~eq[:-1], ~eq[1:])\n",
        "  return np.flatnonzero(np.logical_and(loose_cond, strict_cond))\n",
        "\n",
        "\n",
        "def _build_equalizers_tree(heights: np.ndarray, maximums: Sequence[float],\n",
        "                           peak_node: Optional[_Node], lpeak: int,\n",
        "                           rpeak: int) -\u003e _Node:\n",
        "  \"\"\"Recursively builds a hierarchy of cross sections.\n",
        "\n",
        "  Each node in the tree corresponds to a peak. Each node has two child nodes\n",
        "  that together define the cross section to the left and right of the peak\n",
        "  respectively. The value of each node is an EqualizerEntry that corresponds to\n",
        "  the case when we know that both child cross sections are full, and hence the\n",
        "  water is level along the given interval. Note that this function assumes that\n",
        "  the area of each cell within the cross section equals 1. If that is not the\n",
        "  case, the volumes have to be normalized before usage.\n",
        "\n",
        "  Args:\n",
        "    heights: Cell elevations along some cross sections.\n",
        "    maximums: The indices within `heights` of local maximums.\n",
        "    peak_node: Corresponding node in the max tree representation of the peaks.\n",
        "    lpeak: The index of the closest higher point from the left.\n",
        "    rpeak: The index of the closest higher point from the right.\n",
        "\n",
        "  Returns:\n",
        "    The root of the equalizers tree corresponding to the subtree of peak_node.\n",
        "  \"\"\"\n",
        "  max_height = min(heights[lpeak], heights[rpeak])\n",
        "  if np.isinf(max_height):\n",
        "    max_volume = np.inf\n",
        "  else:\n",
        "    depths_at_capacity = np.clip(max_height - heights[lpeak:rpeak + 1], 0, None)\n",
        "    max_volume = np.sum(depths_at_capacity)\n",
        "\n",
        "  if peak_node is None:\n",
        "    equalizer = Equalizer(heights[lpeak:rpeak + 1])\n",
        "    return _Node(value=_EqualizerEntry(lpeak, rpeak + 1, max_volume, equalizer))\n",
        "\n",
        "  peak_index = maximums[peak_node.value]\n",
        "  equalizer = Equalizer(heights[lpeak:rpeak + 1])\n",
        "  return _Node(\n",
        "      left=_build_equalizers_tree(heights, maximums, peak_node.left, lpeak,\n",
        "                                  peak_index),\n",
        "      right=_build_equalizers_tree(heights, maximums, peak_node.right,\n",
        "                                   peak_index, rpeak),\n",
        "      value=_EqualizerEntry(lpeak, rpeak + 1, max_volume, equalizer))\n",
        "\n",
        "\n",
        "def hierarchical_equalizer(heights: Sequence[float],\n",
        "                           cell_area: float = 1) -\u003e HierarchicalEqualizer:\n",
        "  \"\"\"Returns an initialized HierarachicalEqualizer.\n",
        "\n",
        "  Builds an equalizers_tree from `heights` (see documentation of\n",
        "  _build_equalizer_tree), and initializes an instance of HierarchicalEqualizer\n",
        "  accordingly.\n",
        "\n",
        "  Args:\n",
        "    heights: Cell elevations along some cross sections.\n",
        "    cell_area: The area of each cell in the cross section.\n",
        "  \"\"\"\n",
        "  # We add inf on both bounds of the cross section to induce the behavior that\n",
        "  # once the cross section is full, water continues to rise evenly along all\n",
        "  # the pixels of the cross section.\n",
        "  heights = np.concatenate([[np.inf], heights, [np.inf]])\n",
        "  maximums = _local_maximums(heights)\n",
        "  maximums = maximums[maximums != 0]\n",
        "  maximums = maximums[maximums != (len(heights) - 1)]\n",
        "  peaks_max_tree = _build_max_tree(heights[maximums])\n",
        "  equalizers_tree = _build_equalizers_tree(heights, maximums, peaks_max_tree, 0,\n",
        "                                           len(heights) - 1)\n",
        "  return _HierarchicalEqualizer(heights, cell_area, equalizers_tree)\n",
        "\n",
        "\n",
        "def approx_normal_depth_inverse(flux: float,\n",
        "                                cross_section_elevation: np.ndarray,\n",
        "                                resolution: float,\n",
        "                                slope: float,\n",
        "                                manning: np.ndarray,\n",
        "                                tolerance: float = 1e-10,\n",
        "                                cross_section_source_location=0) -\u003e np.ndarray:\n",
        "  \"\"\"Deduces water level in Manning's equation, given all other parameters.\n",
        "\n",
        "  Given discharge, slope and roughness coefficient, finds what should be the\n",
        "  water level in manning's equation.\n",
        "\n",
        "  Args:\n",
        "    flux: The total flux in cms (m^3 / s) that should run through the cross\n",
        "      section.\n",
        "    cross_section_elevation: Bed elevation in meters along the cross section.\n",
        "    resolution: Cell size in meters.\n",
        "    slope: The slope of the water surface. i.e. the ratio between elevation\n",
        "    change to the advance in the axis perpendicular to the cross section.\n",
        "    manning: An array specifying he roughness coefficient at each cell along\n",
        "      the cross section.\n",
        "    tolerance: The relative tolerance of error. The algorithm will stop\n",
        "      once it finds a water level for which the total amount of flux differs\n",
        "      at most `tolerance` from `flux`.\n",
        "    cross_section_source_location: The location of the source within the\n",
        "      cross section. See `relative_location` at HierarchicalEqualizer.fill for\n",
        "      more details.\n",
        "      In non-bimodal cross sections, the results depends on the source cell\n",
        "      from which water starts to rise. The sub-cross section containing the\n",
        "      source will be filled in first, and only then water will start to pour\n",
        "      into adjacent sub-cross sections.\n",
        "\n",
        "  Raises:\n",
        "    RuntimeError: If couldn't find a solution with accuracy better than\n",
        "      `tolerance` due to float precision.\n",
        "\n",
        "  Returns:\n",
        "    The water level at which the cross section will yield the desired flux,\n",
        "    under the specified conditions - slope, manning, cross section profile.\n",
        "  \"\"\"\n",
        "  equalizer = hierarchical_equalizer(cross_section_elevation, resolution)\n",
        "  error = np.inf\n",
        "  allowed_error = flux * tolerance\n",
        "  lbound, ubound = 0, np.inf\n",
        "  volume_above_lbound = 1\n",
        "  volume = 1\n",
        "  while abs(error) \u003e allowed_error:\n",
        "    if np.isfinite(ubound):\n",
        "      new_volume = (lbound + ubound) / 2\n",
        "    else:\n",
        "      new_volume = volume + volume_above_lbound\n",
        "      volume_above_lbound *= 2\n",
        "    # If volume doesn't change it means that we can't refine the solution\n",
        "    # any further due to float precision.\n",
        "    if volume == new_volume:\n",
        "      raise RuntimeError(\n",
        "          \"Couldn't find solution that's accurate to {}%, due to float \"\n",
        "          'precision. Best solution is volume of {}, with error of {}%.'\n",
        "          .format(100 * tolerance, volume, 100 * error / flux))\n",
        "    volume = new_volume\n",
        "\n",
        "    water_depths = np.array(equalizer.fill(\n",
        "        volume, relative_location=cross_section_source_location))\n",
        "    current_flux = approx_normal_depth_flux(water_depths, resolution, slope,\n",
        "                                            manning)\n",
        "\n",
        "    error = flux - np.sum(current_flux)\n",
        "    if flux \u003e= np.sum(current_flux):\n",
        "      lbound = volume\n",
        "    else:\n",
        "      ubound = volume\n",
        "  return water_depths\n",
        "\n",
        "\n",
        "class TimedNestedLoopStepRunner:\n",
        "  \"\"\"Steps the simulation forward using a fixed number of steps.\n",
        "\n",
        "  In so doing, measures the elapsed time to run a given cycle (or number of\n",
        "  steps) of the simulation.\n",
        "  \"\"\"\n",
        "\n",
        "  def __init__(self, step_count_ph: tf.Tensor, num_cycles: int,\n",
        "               num_steps_per_cycle: int, run_dir: Optional[Text],\n",
        "               params, sim) -\u003e None:\n",
        "    self._step_count_ph = step_count_ph\n",
        "    self._num_cycles = num_cycles\n",
        "    self._num_steps_per_cycle = num_steps_per_cycle\n",
        "    self._run_dir = run_dir\n",
        "    self._params = params\n",
        "    self._sim = sim\n",
        "\n",
        "  def run_step(self, sess, step, total_num_steps: int) -\u003e int:\n",
        "    \"\"\"Runs `_num_cycles * _num_steps_per_cycle` time steps.\"\"\"\n",
        "    for _ in range(self._num_cycles):\n",
        "      t0 = time.time()\n",
        "      sess.run(step, feed_dict={self._step_count_ph: total_num_steps})\n",
        "      run_time = time.time() - t0\n",
        "      print(f'Ran {self._num_steps_per_cycle} in {run_time}s '\n",
        "            f'({1e3 * run_time / self._num_steps_per_cycle} ms / step)')\n",
        "      total_num_steps += self._num_steps_per_cycle\n",
        "      self.save_output_files(sess, total_num_steps)\n",
        "    return total_num_steps\n",
        "\n",
        "  def remove_padding(self, field):\n",
        "    return field[self._params.top_padding:, self._params.left_padding:]\n",
        "\n",
        "  def get_concatenated_states(self, sess):\n",
        "    \"\"\"Returns concatenated `INIT_STATE_KEYS` fields.\"\"\"\n",
        "    concatenated_states = []\n",
        "    for key in INIT_STATE_KEYS:\n",
        "      my = []\n",
        "      for cy in range(self._params.cy):\n",
        "        mx = []\n",
        "        for cx in range(self._params.cx):\n",
        "          replica_id = self._sim.replicas[cx, cy, 0]\n",
        "          x_slice = get_haloless_slice(cx, self._params.cx)\n",
        "          y_slice = get_haloless_slice(cy, self._params.cy)\n",
        "          mx.append(\n",
        "              self._sim.tpu_variable_store(replica_id).variables[key]\n",
        "              [0, x_slice, y_slice])\n",
        "        my.append(tf.concat(mx, axis=0))\n",
        "      concatenated_states.append(self.remove_padding(tf.concat(my, axis=1)))\n",
        "    return sess.run(concatenated_states)\n",
        "\n",
        "  def save_output_files(self, sess, num_steps):\n",
        "    if self._run_dir:\n",
        "      fields = self.get_concatenated_states(sess)\n",
        "      num_secs = int(round(self._params.dt * num_steps))\n",
        "      for i, f in enumerate(INIT_STATE_KEYS):\n",
        "        filename = f'{f}-{num_secs}.np'\n",
        "        print(f'Saving {filename}.')\n",
        "        save_np_array(self._run_dir, filename, fields[i])\n",
        "\n",
        "\n",
        "class ParallelFullGridInputSerializer:\n",
        "  \"\"\"A class to handle full grid input for `TPUSimulation` in parallel.\"\"\"\n",
        "\n",
        "  def __init__(\n",
        "      self, sim: TPUSimulation, init_files_manager: InitFilesManager) -\u003e None:\n",
        "    \"\"\"Constructs parallel input ops for initializing TPU and host variables.\n",
        "\n",
        "    When executed, the ops read initial field values from their full grid\n",
        "    representations in serialized `TensorProto` format on disk. Subgrids are\n",
        "    selected from the full grids and loaded into each TPU. `init_files_manager`\n",
        "    provides a function that returns a file name for each relevant TPU variable\n",
        "    key.\n",
        "\n",
        "    All reads and writes happen in parallel from multiple host and TPU devices.\n",
        "    Operations are done for all variable keys in parallel.\n",
        "\n",
        "    Args:\n",
        "      sim: A `TPUSimulation` object.\n",
        "      init_files_manager: An instance of `InitFilesManager`.\n",
        "    \"\"\"\n",
        "    # This is made a class member to delay destruction (and consequent deletion\n",
        "    # of files).\n",
        "    self._init_files_manager = init_files_manager\n",
        "\n",
        "    assign_var_ops = []\n",
        "    host_keys = sim.host_variable_store(replica_id=0).variables.keys()\n",
        "    for key, var in sim.tpu_variable_store(replica_id=0).variables.items():\n",
        "      for replica_id in range(sim.num_replicas):\n",
        "        coordinates = tuple(sim.coordinates(replica_id).astype(int))\n",
        "        subgrid = self._init_files_manager.read_file(\n",
        "            key, coordinates, var.dtype)\n",
        "\n",
        "        if subgrid is None:\n",
        "          break\n",
        "\n",
        "        tpu_device = sim.device_assignment.tpu_device(replica=replica_id)\n",
        "        with tf.device(tpu_device):\n",
        "          replica_var = sim.tpu_variable_store(replica_id).variables[key]\n",
        "          assign_var_ops.append(tf.assign(replica_var, subgrid))\n",
        "\n",
        "        if key in host_keys:\n",
        "          host_device = sim.device_assignment.host_device(replica=replica_id)\n",
        "          with tf.device(host_device):\n",
        "            replica_var = sim.host_variable_store(replica_id).variables[key]\n",
        "            assign_var_ops.append(tf.assign(replica_var, subgrid))\n",
        "\n",
        "    self._grouped_assign_var_ops = tf.group(assign_var_ops)\n",
        "\n",
        "  def load_input_to_tpu_and_host_vars(self, sess: tf.Session) -\u003e None:\n",
        "    \"\"\"Loads values from files to tpu vars. And copies to host vars.\n",
        "\n",
        "    Args:\n",
        "      sess: The `tf.Session` used to execute this.\n",
        "    \"\"\"\n",
        "    sess.run(self._grouped_assign_var_ops)\n",
        "\n",
        "\n",
        "def get_session_runner_builder(params: SaintVenantParams,\n",
        "                               init_files_manager: InitFilesManager,\n",
        "                               run_dir: Optional[Text], sim):\n",
        "  \"\"\"Gets session runner builder.\"\"\"\n",
        "  step_count_ph = tf.placeholder(tf.int32, shape=[])\n",
        "  replica_inputs = [[step_count_ph] for _ in range(sim.num_replicas)]\n",
        "  step_builder = sim.step_builder(replica_inputs=replica_inputs)\n",
        "  steps = step_builder()\n",
        "  inner_runner = TimedNestedLoopStepRunner(step_count_ph, params.num_cycles,\n",
        "                                           params.num_steps_per_cycle,\n",
        "                                           run_dir, params, sim)\n",
        "  parallel_full_2d_grid_input_serializer = ParallelFullGridInputSerializer(\n",
        "      sim, init_files_manager)\n",
        "\n",
        "  def run_session(sess, start_step):\n",
        "    parallel_full_2d_grid_input_serializer.load_input_to_tpu_and_host_vars(sess)\n",
        "    return inner_runner.run_step(sess, steps, start_step)\n",
        "\n",
        "  return run_session\n",
        "\n",
        "\n",
        "def get_grid_parametrization_data(dt, cx, cy, resolution, dem_shape):\n",
        "  \"\"\"Creates a `GridParametrizationData`.\"\"\"\n",
        "  fx_physical = dem_shape[0]\n",
        "  fy_physical = dem_shape[1]\n",
        "  grid_size_dim_0_divisor = 1\n",
        "  grid_size_dim_1_divisor = 1\n",
        "  nx_divisor = grid_size_dim_0_divisor\n",
        "  ny_divisor = grid_size_dim_1_divisor\n",
        "  left_padding = _get_padding(cy, ny_divisor, fy_physical)\n",
        "  top_padding = _get_padding(cx, nx_divisor, fx_physical)\n",
        "  fx = fx_physical + top_padding\n",
        "  fy = fy_physical + left_padding\n",
        "  nx, rx = divmod(fx + 2 * (cx - 1), cx)\n",
        "  ny, ry = divmod(fy + 2 * (cy - 1), cy)\n",
        "  assert rx + ry == 0\n",
        "  if nx - top_padding \u003c 3:\n",
        "    raise ValueError('nx_divisor ({}) is incompatible with the number of '\n",
        "                     'points and cores on the x-axis ({}, {}).'.format(\n",
        "                         nx_divisor, fx_physical, cx))\n",
        "  if ny - left_padding \u003c 3:\n",
        "    raise ValueError('ny_divisor ({}) is incompatible with the number of '\n",
        "                     'points and cores on the y-axis ({}, {}).'.format(\n",
        "                         ny_divisor, fy_physical, cy))\n",
        "\n",
        "  return GridParametrizationData(\n",
        "      dt=dt,\n",
        "      cx=cx,\n",
        "      cy=cy,\n",
        "      cz=1,\n",
        "      lx=resolution * (fx - 1),\n",
        "      ly=resolution * (fy - 1),\n",
        "      lz=1,\n",
        "      nx=int(nx),\n",
        "      ny=int(ny),\n",
        "      nz=1,\n",
        "      fx_physical=fx_physical,\n",
        "      fy_physical=fy_physical,\n",
        "      fz_physical=1,\n",
        "  )\n",
        "\n",
        "\n",
        "def get_sv_params(dem_shape, resolution, num_secs,\n",
        "                  num_secs_per_cycle, cx, cy, dt):\n",
        "  \"\"\"Returns a `SaintVenantParams` instance.\"\"\"\n",
        "  params = SaintVenantParams(\n",
        "      grid_parametrization_data=get_grid_parametrization_data(\n",
        "          dt, cx, cy, resolution, dem_shape),\n",
        "      grid_size_dim_0_divisor=1,\n",
        "      grid_size_dim_1_divisor=1,\n",
        "      difference_method=1,  # DIFFERENCE_METHOD_MATMUL\n",
        "      num_secs=num_secs,\n",
        "      num_secs_per_cycle=num_secs_per_cycle)\n",
        "  return params\n",
        "\n",
        "\n",
        "def save_np_array(run_dir: Text, filename: Text, np_array: np.ndarray):\n",
        "  with tf.io.gfile.GFile(os.path.join(run_dir, filename), 'wb') as f:\n",
        "    np.save(f, np_array)\n",
        "\n",
        "\n",
        "class TPUSimulationManager:\n",
        "  \"\"\"A class that manages a simulation.\n",
        "\n",
        "  This concrete class is largely responsible for creating and running the\n",
        "  simulation. It requires a `GridParametrization`, which it uses to define\n",
        "  the computation grid. It also requires a `TPUSimulationBuilder` in order to\n",
        "  create the `TPUSimulation`. Next, it requires a `RunnerBuilder` to create the\n",
        "  runner that will actually perform the simulation.\n",
        "\n",
        "  Given these ingredients, the manager creates a `tf.Session`, and uses it\n",
        "  to initialize the `TPUSimulation`. It uses the `TPUSimulation` to initialize\n",
        "  the `TPUSimulationRunner` object. Having assembled the runner, the simulation\n",
        "  is started with a call to `run_simulation`. Finally, the TPU session is shut\n",
        "  down.\n",
        "  \"\"\"\n",
        "\n",
        "  def __init__(\n",
        "      self,\n",
        "      params: GridParametrization,\n",
        "      simulation_builder,\n",
        "      session_runner_builder) -\u003e None:\n",
        "    \"\"\"Initializes a `TPUSimulationManager`.\n",
        "\n",
        "    Args:\n",
        "      params: An instance of `GridParametrization`.\n",
        "      simulation_builder: An instance of `TPUSimulationBuilder`.\n",
        "      session_runner_builder: An instance of `SessionRunnerBuilder`.\n",
        "    \"\"\"\n",
        "    self._params = params\n",
        "    config = tf.ConfigProto(isolate_session_state=True)\n",
        "    self._sess = tf.Session(config=config, target=TPU_WORKER)\n",
        "    self._sim = simulation_builder(self._sess)\n",
        "    self._session_runner = session_runner_builder(self._sim)\n",
        "    self._sess.run(tf.global_variables_initializer())\n",
        "\n",
        "  def run_simulation(self, start_time_secs: float = 0) -\u003e None:\n",
        "    \"\"\"Runs the simulation.\"\"\"\n",
        "    start_step = int(round(start_time_secs / self._params.dt))\n",
        "    self._session_runner(self._sess, start_step)\n",
        "\n",
        "  def __del__(self):\n",
        "    tf.tpu.shutdown_system()\n",
        "\n",
        "\n",
        "def get_sim_builder(\n",
        "    params: SaintVenantParams,\n",
        "    unpadded_dem: np.ndarray,\n",
        "    unpadded_manning_matrix: np.ndarray,\n",
        "    inflow_bcs: Sequence[InflowBoundaryCondition],\n",
        "    outflow_bcs: Sequence[OutflowBoundaryCondition],\n",
        "    init_files_manager: InitFilesManager,\n",
        "    input_file_format: Optional[Text] = None,\n",
        "    start_time_secs: float = 0) -\u003e Callable[[tf.Session], TPUSimulation]:\n",
        "  \"\"\"Returns a function that builds a TPUSimulation given a `tf.Session`.\"\"\"\n",
        "\n",
        "  init_fn_builder = (\n",
        "      SaintVenantRealisticInitFnBuilder(\n",
        "          params, INIT_STATE_KEYS, unpadded_dem, unpadded_manning_matrix,\n",
        "          inflow_bcs, outflow_bcs, init_files_manager, input_file_format,\n",
        "          start_time_secs))\n",
        "\n",
        "  step_fn = SaintVenantRiverChannelStep(\n",
        "      ApplyKernelOp(), params, inflow_bcs, outflow_bcs)\n",
        "\n",
        "  step_fn = DynamicStepUpdater(\n",
        "      step_fn, num_secs_per_while_loop=params.num_secs_per_cycle, dt=params.dt)\n",
        "\n",
        "  step_fn = build_step_fn(params, step_fn, STATE_KEYS,\n",
        "                          ADDITIONAL_STATE_KEYS)\n",
        "\n",
        "  def sim_builder(sess):\n",
        "    topology = tf.tpu.experimental.Topology(\n",
        "        sess.run(tf.tpu.initialize_system()))\n",
        "\n",
        "    return TPUSimulation(\n",
        "        init_fn=init_fn_builder.init_fn,\n",
        "        step_fn=step_fn,\n",
        "        computation_shape=params.computation_shape,\n",
        "        tpu_topology=topology,\n",
        "        host_vars_filter=INIT_STATE_KEYS)\n",
        "\n",
        "  return sim_builder\n",
        "\n",
        "\n",
        "def run_simulation(\n",
        "    dem_tiff_filename: Text, resolution: int, flux: float, num_secs: float,\n",
        "    num_secs_per_cycle: float, dt: float, cx: int, cy: int,\n",
        "    run_dir: Text, start_time_secs: float = 0) -\u003e None:\n",
        "  \"\"\"Runs the simulation.\"\"\"\n",
        "  slope = 1e-4\n",
        "  input_file_format = ('{}' + f'-{start_time_secs}.np' if start_time_secs \u003e 0\n",
        "                       else None)\n",
        "\n",
        "  unpadded_dem = load_dem_from_tiff_file(dem_tiff_filename)\n",
        "  unpadded_river_mask = np.ones_like(unpadded_dem, dtype=bool)\n",
        "  unpadded_manning_matrix = get_manning_matrix_from_river_mask(\n",
        "      unpadded_river_mask, MANNING_COEFF_RIVER, MANNING_COEFF_FLOODPLAIN)\n",
        "  params = get_sv_params(unpadded_dem.shape, resolution, num_secs,\n",
        "                         num_secs_per_cycle, cx, cy, dt)\n",
        "  print('params:', params)\n",
        "\n",
        "  inflow_bcs = [\n",
        "      InflowBoundaryCondition(\n",
        "          boundary_side=BoundarySide.LEFT,\n",
        "          fraction_start=0.23288305,\n",
        "          fraction_end=0.34899329,\n",
        "          left_padding=params.left_padding,\n",
        "          top_padding=params.top_padding,\n",
        "          dt=params.dt,\n",
        "          resolution=resolution,\n",
        "          slope=slope,\n",
        "          flux=flux,\n",
        "          unpadded_dem=unpadded_dem,\n",
        "          unpadded_manning_matrix=unpadded_manning_matrix)\n",
        "  ]\n",
        "\n",
        "  outflow_bcs = [\n",
        "      OutflowBoundaryCondition(\n",
        "          boundary_side=BoundarySide.RIGHT,\n",
        "          fraction_start=0.5123189,\n",
        "          fraction_end=0.6520423,\n",
        "          left_padding=params.left_padding,\n",
        "          top_padding=params.top_padding,\n",
        "          unpadded_manning_matrix=unpadded_manning_matrix,\n",
        "          slope=slope)\n",
        "  ]\n",
        "\n",
        "  init_files_manager = InitFilesManager(\n",
        "      params, three_d_subgrid_of_2d_grid, run_dir)\n",
        "\n",
        "  sim_builder = get_sim_builder(\n",
        "      params, unpadded_dem, unpadded_manning_matrix, inflow_bcs, outflow_bcs,\n",
        "      init_files_manager, input_file_format, start_time_secs)\n",
        "\n",
        "  def session_runner_builder(sim):\n",
        "    return get_session_runner_builder(params, init_files_manager, run_dir, sim)\n",
        "\n",
        "  manager = TPUSimulationManager(params, sim_builder, session_runner_builder)\n",
        "  manager.run_simulation(start_time_secs)\n"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "CxPstECog-RC"
      },
      "source": [
        "If you receive an error about the mesh_shape expected to be length 4 instead of length 3, reallocate your TPU."
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "id": "M_dSjxEy4mJP",
        "outputId": "b792a754-0123-4110-abcf-6ed28c6ba6f5"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "params: fx_physical: 2683, fy_physical: 5766, fz_physical: 1, fx: 2683, fy: 5766,fz: 1, nx: 2683, ny: 2884, nz: 1, core_nx: 2681, core_ny: 2882, core_nz: None, lx: 21456, ly: 46120, lz: 1, dt: 0.5, dx: 8.0, dy: 8.0, dz: None, computation_shape: [1 2 1]\n",
            "Calculating approximate normal depth flux for BoundarySide.RIGHT (slope=0.000100, resolution=8.000000).\n",
            "Ran 1000 in 6.2304463386535645s (6.2304463386535645 ms / step)\n",
            "Saving h-500.np.\n",
            "Saving q_x-500.np.\n",
            "Saving q_y-500.np.\n"
          ]
        }
      ],
      "source": [
        "resolution, flux, dt, cx = 8, 15000, 0.5, 1\n",
        "if PUBLIC_COLAB:\n",
        "  cy = 8\n",
        "  os.environ['GOOGLE_CLOUD_PROJECT'] = PROJECT_ID\n",
        "  TPU_WORKER = 'grpc://' + os.environ['COLAB_TPU_ADDR']\n",
        "  dem_tiff_filename = get_dem_tiff_filename(resolution)\n",
        "  run_dir = f'gs://{BUCKET}'\n",
        "\n",
        "start_time_secs, num_secs, num_secs_per_cycle = 0, 500, 500\n",
        "\n",
        "with tf.Graph().as_default():\n",
        "  run_simulation(dem_tiff_filename, resolution, flux, num_secs,\n",
        "                num_secs_per_cycle, dt, cx, cy, run_dir,\n",
        "                start_time_secs)"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "id": "LMlhkJOhn5sU"
      },
      "outputs": [],
      "source": [
        "expected = np.array([\n",
        "  ['h', 0.000000e+00, 1.910070e+00, 8.129882e+03],\n",
        "  ['q_x', -4.545712e+00, 5.435991e+00, 3.824785e+03],\n",
        "  ['q_y', -1.656595e+00, 3.350131e+00, 7.021423e+03]\n",
        "])\n",
        "final_num_secs = start_time_secs + num_secs\n",
        "outputs = []\n",
        "for x in INIT_STATE_KEYS:\n",
        "  filename = os.path.join(run_dir, f'{x}-{final_num_secs}.np')\n",
        "  with tf.io.gfile.GFile(filename, 'rb') as f:\n",
        "    field = np.load(f)\n",
        "    outputs.append([x, field.min(), field.max(), field.sum()])\n",
        "outputs = np.array(outputs)\n",
        "np.testing.assert_allclose(outputs[:, 1:].astype(np.float32),\n",
        "                           expected[:, 1:].astype(np.float32), rtol=1e-4)\n"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "H-3HsWRYIElL"
      },
      "source": [
        "\u003cpre\u003e\n",
        "1000 steps\n",
        "cx = cy = 1: 12.14ms per step\n",
        "cx = 1 cy = 2: 6.14ms per step\n",
        "cx = 2 cy = 1: 8.088ms per step\n",
        "cx = 1 cy = 8: 2.017ms per step\n",
        "cx = 8 cy = 1: 2.116ms per step\n",
        "\n",
        "500 secs; dt=0.1\n",
        "h 0.0 1.9129864 8136.6304\n",
        "q_x -4.5550237 5.438705 3830.6675\n",
        "q_y -1.6585451 3.3519006 7030.7686\n",
        "\n",
        "500 secs; dt=0.5\n",
        "EXPECTED_1 = ((0.000000e+00, 1.910070e+00, 8.129882e+03),\n",
        "              (-4.545712e+00, 5.435991e+00, 3.824785e+03),\n",
        "              (-1.656595e+00, 3.350131e+00, 7.021423e+03))\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "id": "3TPvJ1pnnpgW"
      },
      "outputs": [],
      "source": [
        "# Run for a longer duration so we can get interesting results.\n",
        "if PUBLIC_COLAB:\n",
        "  start_time_secs, num_secs, num_secs_per_cycle = 0, 50000, 5000\n",
        "  with tf.Graph().as_default():\n",
        "    run_simulation(dem_tiff_filename, resolution, flux, num_secs,\n",
        "                  num_secs_per_cycle, dt, cx, cy, run_dir,\n",
        "                  start_time_secs)"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "id": "X8ZfxdOmofhW",
        "outputId": "59ee9759-fe6c-42e7-c461-8ea95c672cdd"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Loading heightmap: h-500.np\n"
          ]
        }
      ],
      "source": [
        "# Load the DEM and the heightmaps.\n",
        "dem = load_dem_from_tiff_file(dem_tiff_filename)\n",
        "dem_shape = dem.shape\n",
        "heightmaps = {}\n",
        "\n",
        "for t in range(start_time_secs + num_secs_per_cycle, \n",
        "               num_secs + num_secs_per_cycle, num_secs_per_cycle):\n",
        "  filename = os.path.join(run_dir, f'h-{t}.np')\n",
        "  print(f'Loading heightmap: h-{t}.np')\n",
        "  with tf.io.gfile.GFile(filename, 'rb') as f:\n",
        "    heightmaps[t] = np.load(f)\n",
        "\n",
        "# For plotting, scale down the DEM and the heightmaps by a factor of 10.\n",
        "resize_factor = 10\n",
        "resize_shape = [dem_shape[0] // resize_factor, dem_shape[1] // resize_factor]\n",
        "scaled_dem = transform.resize(dem, resize_shape)\n",
        "scaled_heightmaps = {}\n",
        "\n",
        "for ts, hmap in heightmaps.items():\n",
        "  scaled_heightmaps[ts] = transform.resize(hmap, resize_shape)"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "GGJE-8Bvyg06"
      },
      "source": [
        "Adjust the slider to plot the water level at the selected time."
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "colab": {
          "height": 272
        },
        "id": "JCQKRyzOrGtm",
        "outputId": "ee0158c8-a2c3-4abe-af5e-3ad97b3a2125"
      },
      "outputs": [
        {
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAeYAAAD/CAYAAAAkPXUoAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90\nbGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsT\nAAALEwEAmpwYAAEAAElEQVR4nOz9a5Bty3YWiH25VlWtVaue+3H2ufccXb1AV7Qu5qHulsMNCLWF\naSBQiH5ACAgQjdqibbBMQIQlYTugaRShJmg6CNoPZIOBBiFkaEA4wLwaEJhntw1BI6HLle4595yz\n99mP2vVcr3pN/1j1ZX3rq5FzzlW197l1rmpEVNRac82ZOTJz5PjGGDkyZ6qqCnd0R3d0R3d0R3d0\nO6jzxWbgju7oju7oju7oji7pDpjv6I7u6I7u6I5uEd0B8x3d0R3d0R3d0S2iO2C+ozu6ozu6ozu6\nRXQHzHd0R3d0R3d0R7eI7oD5ju7oju7oju7oFtEdMN/RT2tKKX1TSun9j6CeKqX0M193PVLfv0wp\nfdNHVd8dXY9SSr8npfSnvth83NHtojtgvqPXSiml700p/RW79q8L176toayPBERvE7Vpc0rpj6eU\nfp9eq6rqM1VV/Z3XytwN6AKQTlJKR/L31fL7V6aU/nZKaZRS+lcppV9iz/+6lNK7KaVhSukvppTu\nf/StuKM7ej10B8x39LrpRwH8gpRSFwBSSp8AsAzg6+3az7y497VRSmnpdZZ/RwvTn62qal3+fkp+\n+zMA/n8AHgD43wP4cymlNwAgpfQZAH8EwG8A8CaAEYD/80fL+h3d0eujO2C+o9dN/xQzIP55F9+/\nEcDfBvATdu0nq6p6nFL6j1NKP55SOkwp/VRK6bcAQEppDcBfBfCWeFhvpZQ6KaXvSSn9ZEppJ6X0\nw/SeLryuKqX0HSmlLwD475qYvSjzz6eUnqeUPp9S+i65PlbPLKX081NKL1JKyxfff/MF77sppb+W\nUvqKNh20aJvt2e8E8OsB/O8ufv/LF9ffoZd54Z3+P1NKf+qijn+RUvr0RTTjWUrpvZTSL5Uyt1JK\nfzSl9CSl9EFK6ffRiPooKKX0aQBfD+B3V1U1rqrqzwP4FwD+w4tbfj2Av1xV1Y9WVXUE4P8I4D9I\nKW0Uyvvui3YcppR+IqX0zRfXi7Jz8fsvTCn9g5TS3kUf/aaL61sppT95ISPvppT+DymlzsVvvyml\n9PdTSn/gQg4+n1L65VLmV6WU/u4FL38DwEP5rX8xRjsXdf7TlNKbr65n7+jjQnfAfEevlaqqOgbw\njzEDX1z8/3sA/r5do7f8DMCvBLAJ4D8G8F+llL6+qqohgF8O4LF4WI8BfBeAXwXgFwN4C8AugP+T\nsfGLAfwbAP69Ol4vlOtfBvDPAbwN4JsB/PaU0r93Udc/xCU4AMCvA/Dnqqo6SSn9KgC/C8B/AOCN\nizb+mab+uWabM1VV9QMA/jSA33/x+7cU6vgWAP8NgHuYeaJ/DbP5/zaA34uZB0r6EwBOMYti/HwA\nvxTAfxIVmmYh5b2avy+vafe3pJReptl6+P9Krn8GwE9VVXUo1/75xXX+/s+lD34SwDGATwf8fS2A\n3wbg366qagMzGXjn4uei7Fzw/VcB/GHMxvPnAfhnF8/9YQBbAL764tnfiNm4kf6nmBmeDwH8fgB/\nNKWULn77QQD/w8Vv/zmAb5fnvv2i3E9hFin4TwGMvU139NOAqqq6+7v7e61/AH4PgL9w8fmfA/ga\nAL/Mrn174dm/COB/e/H5mwC8b7//OIBvlu+fBHACYAnAVwKoAHx1DW+5TMwU6hfs9+8F8P+4+Pyf\nAPjvLj4nAO8B+MaL738VwHfIcx3MQqxfcfG9AvAzW/ZXbZuD+/84gN9n194B8Euk//+G/PYtAI4A\ndC++b1zwt41ZaHgKYFXu/7UA/vYrlomvwwwMuwD+HQBPAPzai99+A4B/ZPd/H4A/fvH5bwH4T+33\nDwB8U1DPz8TM8PklAJYXkJ3vpXzaM92L/vk6ufZbAPydi8+/CcDn5LfBRd9+AsCXY2bwrMnvPwjg\nT118/s0A/gGAn/Mq+/ru7+P3d+cx39FHQT8K4BemlO4BeKOqqn+NmQL6dy6u/eyLe5BS+uUppX90\n4UntAfgVkHBfQF8B4C/QQ8NM2Z5hBjCk91ry+RWYhY33pLzfJWX9OQD/s4tw8jdipnD/njz7h+S5\nl5iB99tNlV6jzdehp/J5DOBFVVVn8h0A1jFrxzKAJ9KWPwLg0atkpqqqH6uq6nFVVWdVVf0DAH8I\nwH908fMRZtEDpU0Ahy1/13o+B+C3Y2acPEsp/ZAsB9TJzqcA/GTA+kMAKwDelWvvYn6cP5T6Rxcf\n13HhlVezSIg+S/pvMItk/FBK6XFK6fdzmeSOfnrRHTDf0UdB/xCzEN13Avj/AEBVVQcAHl9ce1xV\n1edTSj0Afx7AHwDwZlVV2wD+CmYAB8yA0Ok9AL+8qqpt+etXVfWB3NP2FWrvAfi8lbVRVdWvuOB5\nD8BfB/BrMAtj/5mqqip59rfYs6sXoFOka7bZ6VW+Iu49zDzCh9KOzaqqPhPdnFL69Wk+s9r/6kLZ\nShUu2/wvAXy1rRn/3Ivr/P3nCg9fDaAH4LNhwVX1g1VV/ULMgLgC8F9IW0uy8x6AnxEU9wIzr1rz\nB74cM4+9iZ4AuJdmuQP6LPk8qarqP6uq6uswiyL8SszC5Hf004zugPmOXjtVVTUG8N8D+B249DCB\n2Trz78Dl+vIKZgr2OYDTi6SZXyr3PwXwIKW0Jdf+rwC+L10kWqWU3kgpfes1Wf0nAA4ukoVWU0rd\nlNLPTin923LPD2KmLP/Di8/Kx/emWcYwE4R+dYs6r9Nmp6eYrXfemKqqeoKZ8fFfppQ2LxKkfkZK\n6RcX7v/T1Xxmtf99IXoupfStKaV7aUbfgNl671+6KPOzmK3n/u6LhKh/H8DPwcyAAWZr6t+SUvpF\nFyD3ewH8t9X8mjTr+dqU0v/8wgCaYBYdYKSgTnb+NIBfklL6NSmlpZTSg5TSz7uIMvzwxXMbF8/+\nDgCNe5GrqnoXs3nwn6WUVlJKvxCzZQXy+u+mlP4naZZod4CZAXAWl3ZHX8p0B8x39FHR38UsHPr3\n5drfu7j2owBwoVi/CzPFt4uZV/ojvLmqqn+FWULVT12EH9/CLAT6IwD+ekrpEMA/wmyteGG6ULrf\nglmiz+cx847+75h5+6QfwWyN/GlVVZqA9Bcw88R+KKV0AOB/xCxxq6nO67TZ6Y8C+LqL3/9i2/bW\n0G/EzGD4sQue/hxm66+vkr4NwOcwCz//SQD/RVVVf8J+/7cu6v9+AP9RVVXPAaCqqn+JWWLUn8Zs\n/XgDwP+6UE/v4vkXmIWYH2G2PAHUyM6FQfErAPxOzJYl/hkuvfT/DYAhgJ/CTJ5/EMAfa9nuX3dR\nx0sAv/ui7aRPYNbXB5iF1f8uWgD+HX3pUbqMxN3RHd3RHd3RHd3RF5vuPOY7uqM7uqM7uqNbRHfA\nfEd3dEd3dEd3dIvotQFzSumXpdkpO59LKX3P66rnju7oju7oju7oS4leyxrzRVbhZwH8LwC8j9mx\njL+2qqofe+WV3dEd3dEd3dEdfQnR6/KYvwGz029+qpodyfhDAK67heWO7uiO7uiO7uinDb2ut+28\njfnTlt6HbWFJs8P3vxMA+v3+v/mpT30K9N4vj5V9NbRIuVVV5ftSSlee5f+qqnB2dhb+tkgddb/r\nPTyqjeS/NX2PyNunbWvi33m4zr1teGwqo65tXwqkY7QoleSsJEd1zzfJmJf9pdL/QCxjkY6I7o10\nyCJ1lp6JxjDSG36P8xS17aPcqRPpH36PPivfzucifDfp4Lr7/bmbyPy//tf/+kVVVW/49dcFzBGH\nc71WzQ7f/wEA+PSnP1394T/8h3F+fh4CoH7ntahzVDCjCdLpdFBV1ZV6zs/Pw3s7nU7+3u120e3O\nXrBzfn6OyWSC4XCIbreLTqeDpaUlLC0todPp4PT0NJcZAV5pMnQ6nfzcysrKFd5OT09xenqaeeP9\np6enWF5eRqfTwdnZWW4f6/X2Kh9sa0oJy8vLuR6fAK6kSewjnSh6zYl8sS/J3/n5eeZD26wG0NLS\nUuaf48j/rFPH7+NE3i8cM7aPpN/ZX/obn2WZ3W439yn7i7+pfHu/sn7ewz72+h0MXPau0w/aljZ9\n5u3y5yOlyrbqPI9A4uzsLM8pUrfbxdLSErrd7pU+c1n2drHPtC6dNzqndW5GgMR2dbvdzB/5Sinh\n7Oxsro2uN3QOKS8uK9qPUR+2IfZppLdTSplvyj/5Z1+enZ3h5OQEJycnuU+Oj49DLGD7/L9TdJ3X\nVI7ZJxz3aM5dV95/2S/7Ze9G118XML+P2VmzpC/D7PjFIrmyBsqgvKg1qcKtSgvAlUHwiU1BptAQ\ntM7OznLZS0tLWF5exvLycgZPPut8NwE1hZQAz4nlPKuypXHgCiRSNG60qKJgfayTk0PLqDOKSK6c\nHdhVmernSBFEwKvGQjSxPq7emvex94kq0jovoqqqrJy1HJdpfdafd3DS38mr169zKDKOXxVFsuU8\n1T1XB+JKCrRuxKpO8fbzt9KcW5RcDlRGHPz5Gw0KAo2CtD+vjkVVVTg5ObkCznX81/W7y4TWSeJn\n8jqdTjP/amySH+o71+Valn6PxpxlOrlDpfqQc6pt229CrwuY/ymAr0kpfRVmZ8h+G2Yn3oTEgYo6\nCih7mv7drWKdeDqwCkZapwu5esPdbndOUMbj2bn/3W4XvV4PvV5vTqiWli67tgQ6Ee8EeVW+3n5V\nqlSyypu3X+91xat9EE1+fo76VNvmz2q/O6k3FnkzUT9F/HqZbs1+3MjHy4GERlhkfOkY0LBTINd+\n1MhEnVxGcht5PRHvalC9qjGJANmNPuc7AgLltyRz1BfqtVEfaP/RCI8MElfuzltUr/MXGcJ6f8nQ\ncN2gEZiTk5MM2tRvAHJ7WZaOnRve7KMSULmBwugCryvwkp+zs7McbSyBqTsn3peuG8jrycnJnEeu\n/afYw88O9q9Dt9SV91qAuaqq05TSb8PsTSldAH+smh2jV7o/e2ttrN5oMvK30sT1+nwS+OCrl6yC\nm1LCwcEBRqMR1tbWsLKygtXVVSwtLWE0GuH4+PiKEoj4jQRehZaKQe9Ri9cVdjRJ/bsrrKgfVCGx\nHVT00TNallN0zb0IH0MHBioKfteJp5Pp4+wpA1cjJcDVsKcaZJG34KAb1UHFpMqaZfJ/NDd8jpQM\nKn32VYMygCuGhvLo9Ud883oTOHo7qAs0tE9gYR0K1Bw75VllvuRsROPqc5bXHDw4tnq/hrgpWz6n\nGCLWJSNg5nQsLy9nueJSFz1qDTs7T+SH9Z+fn+Pk5CTPaX6O+oDk41cywEpLoBwfX5bR8lVG3XOv\nqkvvvOSYlPiu+511qPPm9Lo8ZlRV9Vcwe0tOm3sbGwLMC3sJlNVLaLI01dN0BcfQjq7RnZ2dYTgc\nYnd3FynN1mS5FnxycoKDgwNMJhP0+/0rEzDiQf+TyI8DVNRfzrMrk8iS1nsJ9FEdwKWS4ITVkLqW\n73y6conAV40MBwXvO11b1n4iTxyjNjJ0G0kVEL2GSAFQ2VD5M6/APVN/xuWgtIboVALWyLOLvl8H\nlEvzNgLjus+ubKM6Soaj8kDZizxIVfYEnmjZhXxExo/qLS2TshD1iT9L8HBHxA06yo4DjnrKwGWO\niRoaS0tLc+vtGj1QIGO9jB4SgNXRiNrgYxONW0nW3AmJfgNwZanUHR8fO+2jyMBzajI0lJ86UAZe\nIzDfhEoKHSg3WoVSrVh/LgpF0SpknQRcBeXpdIq9vT3s7++jqiqsr6+j3+9nUB6PxxgOh1eSBiLe\nm9rg30sAXBIUVcB+X8n6jBQaJ5uuV+kfScFdAZflsQ91AkTjoxOYgkvFGE1q9WA+rqBMoiJm7gJw\n1Ttk/kJVVXP9QkXLMhR4XFmxj/kduOr9UjHq+qT3c5PsvQ5atOxo/vvnuuei/6W55BGIaM7WgZHX\nXzKcXL8pKPsuETcEOJY6xjpnPVFKeaCXq3kvJWOB3yMHyfuyBLbKDyn67mUzZF5aotRno8+qXxWU\nI0PB295EDsqlpVvglgCzWomlJB9V2i4UDlalyaEeGgXNQWRlZQW9Xm8u5Dcej7G3t4fDw0OklDAY\nDLC6ujqXlTmZTAAA/X5/bo3PKZqEJW8guq73R7/xs66nA2UvNuo3HQPtQ53YpOXl5RA0mUjiSsu9\nb/6u/3WMlFRG3Pr1tnxcyI0oyg29ZvYHPTLeo0pVcyBKk10NpLqx96gE54jz+7opMmyvQ5EHqXOg\n5GXV8eEOgHumnnjHZzz8W+exa116nXNa17w57s5Pycj3kLzy7n3Bz2qoUz7dAVL9HTlHUf+ybpVb\nznMPzfv3iO+I1CFw8uUwrcuNBzVINKdH21eSWe/36HmlWwPM2ukqAM58FO/n52ji8T8/0yvx+lO6\n9JTVAh2NRnlNeXl5Gb1eD8vLy1e2+6SU0O/3sby8PAdKJYGP2qf311n4qmTrrOpSqDiaGNE2HW2f\nl08iWOh1500tXf2sYKxtc6OmbTjp4wTITpR7XQdUGWNSDHMeNPKjnm0pIYdjUmelkw+/9zr9ehNg\njWT0uuX45wgsoogBPwO4Ysz43FUPTZd7fH5HIW7lI6rDDbYIlF2fefnaRg3Na9ml/nbDMfJQ/V7l\noQ6MHfC8nEina195pI6fNUTtRoPWyfnEfqE+8oiglkWefB55f/pvHvJvmoe3ApjVAitZkfqfnyMr\nn6QTBJjfjxiFYTVZgX+TyQRHR0eYTqdYWVnJ4W1de+aAEawd4PQet2QVOH3gldya1La5AuB3D9mr\nsEZAqIJFYlsUVCOBUkUSGRa6JKD8R4aF/l6nML5UST0fKmMNJ3NdWeWEQO6A5gouMrB8Hi2itD9O\nVAJkfgbmvSafLyXPWWXYFX9knNcBoM9BHwP+EVw1s1nLrQMzl4W2RhtBxQ0YrUPndem3qP1N10rg\nrm1lvUqR16vEfuSYM3ObIO3l87vKgxqyrFOdIradRlQJ467w1njHR0Qliyyynr2zHfT0fio1tyop\nPLo3TY2D09NTjEYjnJ6eYmVlJStE/VPLjaCui/oOyhEgKs/qKTqo8z7vK59w3keqOCKrnJ/ZT16G\n/tcM9chzqJsEQOxNKA8a6tF6r+t5fRzJAVknvxtQ7GvfswpcZtaWDD8f25JBXDeedVSaj4s8H4Fh\nWyq1k2W73HJOcx2fusGfjYC3ZKwCV/fRlnjVZ5Q3nRPUMxxzJmZ5ORFIe9u9n5oMCAd71QnehkVB\nuDTWbjB6ezy7XOeI6uY6Ut0PIGee+/q4G7HMT9LlC436+rhzvNrIA3BLgLkOjHwgozADy3DBdq+N\n5VG4O51OzqQEMPefIVp6yQRePq/gTL7W1tbQ6/VykgQ9xAi8lE+GVFSBktzoIH91Ho2W6yHJOiXj\n4+GHi7hHwN/arPNE1Ebpf6mDcsn6VsUereFTSfvBEQDmjE1XqKxD5c3l05XQTegmZdwElPnfjXfv\nS+2X4+PjrKA1iUjnuD5bAmXlweuJfvcyeb+GnVU3EhR0nJpATL9H874EQPqces2+pKh6yvmNxtH7\npHRPpJeAeVD2CFtkUJZkiUbO+fl5TgJW3a1r+BoRUKMoigL6WOp+9yjK4XQrgLmk3F1IHJR1PdTD\nopFiobBT4E9PT+cSwPQZVW66lud7elnuyspK/o1Z2t1uFysrK3NJa2yXt0EVooJ1NHhav/PB/2q9\n6QSJrFK2zcNZDtC6TuJh6TbkAKTXvc6Stf2lCNSufFw+fCkjym+oW+vzeqK1rtJ4vAq67rhd97lo\nPkQ6QilKLPWIRR0vGgaPeNA2OUVGlIOe3lOKgpXKj8qO1kj9e+RZuyNQMuAiUK5zChaROZXjyEDw\nunwMXS7YLg03+9Km1gvML/NFa8puRLDMyBiK6FYAMxCv+ahlVrLAqqq6EiZQ4v26NkxP4/j4GMfH\nx9krVl4U/NihesiACgeJYbCjoyNMJhMMBoM8sCVDIfJCHaz1N+8fL88tOzVuSkrD18u9/5QY8o6s\nUqeSMivx778vUubHnVQGfAIDlwcScGz1EBqeFEdLX5cbXLa0bA/X+TNfbHpV41wyQhzw/Dswf7iI\nluHzrAmA6/o0MpaVPwexKAIWgaiT64EStZmXdXrCHYFIV7hsR+2Iyud/Xz8u6ZQIU/x3d5qolzmn\nIocvGl8fN53DHl2taytwi4A5UkRqhUYDT8XE9bWSoDJphpPr+PgYk8kEh4eHqKoKKysrc8+UhJ1l\neAcztKHW1dra2ty6tFvd3i4X5CjUyOuRIlB+aaxElrVb3uTDt1xoP+gkoOIvecpujep1r9vvq5u8\nX8rUpMS9//WP29V4kAOfLYVYVSYiT73J2LoOfbHGMZpfESiXDNiULiNs1DMk1UFaH3/T+9qSOyTR\n+Gg7VME7/25guU5zg6Tk2Ub3tmlD9D0yQCIQ83Z736j+9TaWDJLIAKXei3QzgCt5SXUGB+em81VV\n1dwuHd7XNCduDTAD89aQAmCk1DVRKQr5RuFeesjT6RTHx8c4OTnB2tpaeAoL61Av28FIBYmgubS0\nhJWVlTyo3W4Xx8fHYVtdGDnZFLT559c9UUwpsg5dEPTZiBctS8uJALlk2etkUCVCI4aJFlHfvGpw\n+DiQKwiV3whEuCTDt+zoxNelEFUG7iGwXtb3pdzvTfMimkcM+0+nUwDzekUVMe+/qRHiRrnPTx8r\nV/IOLq5flEogVsdb6VrU9iZZ4v3qXGnb3AvlvdR/3q5S37teUxD1+eVGiLZL+z/ildc1eZP3uAGg\nhlVEtwaYtcHqkaky8YX1aKI5uOl5qZqUxT3JPFLTeUkp5RB3lPRBXlmnrukypL20tITj42OMx2P0\ner25ssmTA7IqVpIPtisW9cij/oiENzIE3CDwsYkUt5fhyTO6H/f8/BzHx8c4OzvL/V4yLr7Uqa3S\nKj3H3/0Vo8BlmFWXXbS+6HCWL3VQBuIwphtBETEaweQgnbeelVwCr8iL9ftVWUeA60ZEyfNyAzni\nKZp3CnglKgHwdQ0S6gjKq3vDWq4uC7ahCLAVlNnfDpTatzQa1FhS0v7S+ccoqkZbIrwo0a0BZjKr\n5/8yvNq0R9MHip3i2XbsDPWQo4V7D5G4x+oDrqFFChr/JpNJfpYvuNA6PYEhEj630H2iu5UX9a2S\nT0pfR9F+bQJl9Sp0ywENIYIDE+2o4CjozlPpe+na66A6RfYqKfKMIwXcxCPlyfMxVMmrTLmM141D\niZq8lNtGEXjUeVhq+FRVlY/fpSFEWY+8H5+70Vo+fyvNc/fynOcIJCK9FOkK54W/KxiWAN9l8ybG\nHOvRcyGUVO+7ftH2lvir8+A5ZpotTXL88Lbq/IrkivpQMcdzk5qMmVsBzCmlLPjqVQHzFlUdALmn\nF8X7oyPdnA9VZnzewVPrdDBOKc156cDsmE6G0fv9fs7UjpKo6ixcvy+ycJvK0fuU11I9dWvJWpe+\nmUZBmYl2zH73MOrHgW4CPq4Q6+65DihrYpL/5uvRemIYx0Kf0yhNCXg/7l51ZNyX7uPvzEGZTCZX\n9s6SPKwd1VWqrwSEdaTlefIS/0dAqp9Lxq8/48ZApLPq+I+cGX5mhDQqR3W5vkzDQdLLj/RplPjn\neKH9qVukorVsJlwqaRKtZmJ7VneTPgBuCTADCF/OrZ6AhnmBS0USTTQFWFdYTR3iQklrjoOooKsC\nW9rsv7S0hNPTU4zHY3Q6HQwGA6ysrGA0GuX79GQsF+BoMrmF6xZ50ySPFMYiioE8u8B5+FojH+wH\nbUcbajIwbgvVtalkaDWV1+Z6JCeRV6UeiSdNloC9Tp4+Lp6yUls5chDVFzioLtI8kqiOJp0TjVMT\neFdVNTe3fA6WPG3PII/A1p8reYO+zFjXRj4TAahGIevawDbqIR7aV5HOrGuj9innQNQHOtbaNs4Z\nOhtu3OpuH/JPw9j7uES3Apg1uQKI08lLAuPCX/LGVLB9/VZJBUgBU1+Z5sJIbzkKx6hXurq6ipWV\nFZycnOD4+DgD/8rKypzX6YkJvrZOvj2zO1LOiyijtverYqJS53N6KIkmauiywiLk1u8idB1QLwHf\nIs8sWv917qnjKyqPsurZ2mrla7luBOq1j5q+GPWqMlfFSnlUsFhk7kTUxllwbzXaRcF7vWw1yPx3\nb6c+ozJBZ6duLtYZk67L1XnS8iL+NGdC62nSdw7Ues0TiCPvXyNLWgb1Gd9MyPZUVZUTi7ksW9e2\nOroVwAxcxuWb9tRqJ5TCy34/MD+YCiZ63e8BLveLRoOoIRYtl4KkbdBs7dFohOPjYywtLaHX66Hf\n7+Pk5AQrKyuYTqc5ZBYpU/KpilOv63piyaK8CVFYHYDZHyRfD9f1Zm1LG89R26ntqaOP0tOOlAlw\nM6NCqTQPot+iZ5UH/azHO7riB65uI/L6Sp7Iq6YvhjEAzOsM37nhQMVrJV5LxhIQh8FVmUeA6TIW\njY8DLOtwwFWQjIBNHZ4I+JvGx+evtrXkfPEZerS+RTOaW9RFLu9sn+onPqM6KQJllqt163GoKv+M\nqvT7/TCJL3LaSnQrgJkNVw9Vf1MFoB2mnVVn7XlZbh25MHrSTOQx+PnbmpVMpae/Ly8vY3l5Ob88\nnNeWlpZygtjJyQmGwyEmk0m2uLwvVIjcqCiBcp0ALKL01PhQAykqywUWiLOBm+pbBJRvqsBvAjA+\nHm3Liyz16DmV50geF6mLn1NKc0c76hyLcjS0jFJG8Jcq1QFr6fcmKsm1esgKFNH4Rb+VQNDHTpMF\nI/Bs26ZS3ZF8NHndvE5dprsL6viigalOUVSmGlSa8+LyH7XPIwvaf+pBr6ys5JMfdctiybiJ6FYA\nM3AVQCLw9Pccu7KK1qE9zZ2/q2erZfG7vo2Kzykv9DL81XzAfJIZ20WQZQib14bDIU5PT9HtdjEe\nj7G/vw8A2NramlvHjTwfB0gXrNIkuY4Xp+WWQmLkQzMSaUX2er2Fkr4WVXqLetR19Xl5TfWVfmvb\nz5FS1Oe0v6PkOTc466jk8aoxofxHdbmxeh15el20iKHyqqjJ+G3zPBAbXw6qUV1N2dRR2Q6QrudK\nz7elaBwiD955Ay6dLOb3+Dpu5BCod+38avv0fs0ZiPiNdKte1zFRsD45OZl7j7nfr/WX6NYBsw4S\nG6bnU0dvQCIIAKgFU33GhZOkgxx5ywy38xo9YypM1q3nS/Pe6XSaf19eXs5JJd1uF5PJBJPJBMAM\nlLe3tzEej/NeOBcEbQv5piItTTQHz0gBl7wxTzTz/vLJwfYdHx/nLXA+3iVaVNHfVBkvAsrRb/58\nnfdS95w+40qW90aH4XhdixgylCtfe1bvwMHgpkbe66SPEpC9vlfRB5HBQ1J9pbotAh4v08dMr0Xz\n2dvWxGupnsjoiMpwHaURVO8D512T4CId6aCqOsxzLJRK8zoykhS/dBeR61Pez620JbpVwAxctZZ4\njQ3WgWNnsNF62IKXXbLglFTASwKqAEMh0W1PXq8LhK7PTqdTLC0tZa9yeXkZa2tr2NzcxPHxMQ4O\nDtDr9cI1cZ+ILhjAvNC5IGu7oiSHOhDW7/pZt0wByPu2NapQ59m5EbEIfVSg7J4My4ieiyILkbKK\nQFHvjZRNHa91irXOG3LPizKgxqVu81t0aeJLiV6XARCVSxnwaKH+tTWQmu6rm59NZdWBclRuZOBF\n+tmNEO+DiJ824Ww6PG10TWkuce7qO9GBeWfSia8U1oRnp1sDzGrJKCg7kLjVqBYIFUYTRSCqfPC/\nZz5HQl23buETSZWcGgC8vrS0hOXlZYzHY+zt7eH8/Byrq6vodrtzGYDRe0gdsB2UffIQPH3LGYCi\nUJf6yw2XlBImk0nmmff5vd7XUV1tFMWrUpLXAeWme+vAsc74K4GxUknRNcl2dI/Kj46Hy7qG7Dw6\nc0evnkoJZpwXTfNSHRn9rfTMdci9wTZRhKa55kZilIGuddXJvJaRUipuW4rKdv2qGOBOj+Y+efIY\niaDMZN8S3SpgTinlsKdmigJXwy8qlMyO9vW3SBE5EJUUv3p6Wm+UFBMdXOKKTbdCsU6GwXXgdb/k\nxsYG+v0+xuNxFig9d7sEylq373NkvSnNZ+P6ZHfwKIV8WIe2azqdzp0z7mPg5auH7vS6PBMvu05R\nROv8WkYd0EYTOvIsSp7SqyId3zoP3I1dfx6YnxsaxWpjQF1X+f90oMhApqGvc7NOhzlI1tXjBnBU\nnlPptzo5b2vkt+FRZc2dDu0r4OquAq5Xt5HDkgdfMkL4ogpe8zV86jcuTer9Ed0aYGbH+ZqtD4Av\nqCv4lDL+vEO9s0v8RCDlvyu5N9okxK7EFZiYLDUejzGdTrG2tjYXRtSwMQ2ZOqBxYSmFWZUPBxG9\nxn7XevUdpfxel1gR9d1HQYsAnhs7PnYlI8Pri4xEV6Yl8nJVQSkvTeRGQN3z6rX43OF3zlOfT1F5\ntwWUXwcf2ieLPheV42VGssbvpXL0tzby3iSzpfsiUp1c91yTPoj0kIKce9GekOvy2tYA9rkW9UWE\nIT4vIyJvGk2M6NYAs26Viia7/nfPzpWIPq/leAJZk5BROKJ7S8rNU+/rqLQViyCg3vHp6WlOHmNi\n1erqKgaDAc7OzjAej69MRlem3jbn138vCZp6yXyjFq1VN6p8zJwWEepXQVH5JYXa1qDQvvZlgCaF\n3cZw82slT6SOSsZAqYyozRxj3beu8lWSo9c9pm3pdfJxXbAvGa4sMwKEpjmlZTXJYlRnSSbbeJkR\nKEf1+DORZ8zPunareinilVGGurZG/yNnyY3fun7XdkSJrno/d+Hc+lB2SpdHPJbWk4HL01qA+c7R\nIwWjAXOrSs9dreOJ/yMh1wFY1LrUUHIJ8J00xM3/HNjxeIzxeDy3vUDLc/5LoBzdx7FR4jhxTZzX\nNPkuygVoWrt20HjVtKhijjxVLUflqiRv12lHSaHcdC3XeXSFWNc/rnQ5nnV8877bAsy3gSKHg58j\ng770OdJxvMfnbxvjsM77jr6XjNZF9GAEsqU5pjqEv/vcVActKgO4evZFEyi7znQM0Ht8yYH1qeev\nL+Komxe3ApgBXAlNlDpeOzPqrCiTWD87KEUUeQGlvbulje9uCXKAGK739fAoCUsHWZUyjRPugea5\nrFz7i4BZjRq9rnyXrE/lJwJlesmsI3qXdgQE2k9RHy5KiwJ63b2RomujtBbxEBe576aGSiRbi/Cg\nY+fzrw2gf7HpJobSq+TBdz/wejQH2xrtPg4qh6VyIwOgyZuOeHP97PWW+qGuHer1lhItozocM6Ly\nFUSpx/hsG9zwe7Xduo+aY8y36vEZvvK2Dn+AWwLMakk4WBEQFZzoeXnDFHwcVJyaFJKHvSNQ9sle\n8rDo0ZcOAkgpzXn9vhVKQVyfOzk5mUuyYh2aqEVvdzqdhtZzNHlL26ToFTN8DWAuzMQzYlXYPfzZ\nRNHkWASsHTAiRdyknEsWfB3I6Li24bcE6reZIg+tiW5Dm77YoKzeWgRcJTBRo5rf64zEJmPJHYmI\nx6ieOpknf6rrSs9F8qM6lt+j88e1rChSQ15814s/q+Q5SQRT9b5ZlgNp5FGzfp5PASCf5cA2dTqd\n/PbEuojtrQBm4Krl5Z95jw6kJ6coECkwq1XTNDnZeezg0jqw8hxRnZKOLFwlFy7fB6xCwfafnp4i\npVlGtKfrn56e5mQD7We1TEmqBNh2Aqu/ek3X6rn+zdd3an8tAljeR23uabLA/f+iQM//TYrGJ2lT\nmcpT3b1t5LaJFu3/On5Y3qsu+0uRmjzhEiByfvrccUPRt7cBV3eE+JzX6JzfE615l8bWjVHNIleQ\nVE+WXiPr0KON+UybueP9EiX+UkcRWPX+Un+6XtW2af8oj97m6XSK0WiUx+f4+Dgn8+ruoVuf/BVZ\nXkC8hlfaFhUBuHYs74usH+cFuBSYyINrAudIkSoPkQBouSVhUyvQJxfL0oiBeuB+SIn2K4FXs6y1\nDTxPGZi9m5aZ85zofPFGv98PD524juL2PonGuI7cEGsa7+h6E4BGstHEW5t+qPNSbkKlcbhOfS7P\ndUZL3fMs43UR64mWil4nueKOrus8j0jlV0HOy3NDXue06taSDvKyCZ5RIqvLuQMbnSNdUmM5fKte\n9IIePh/t/fWImz6nDoie+gjMYwXBU08hVB3o/epr1epFR9jC+k5PT3F4eIizszP0ej0Al+9EIO8a\njS3RjYA5pfQOgEMAZwBOq6r6t1JK9wH8WQBfCeAdAL+mqqrdunLUiipZMsDVPcRReFcVcnQiV+Ql\nOi+sK/KUI0UfAWSdEmhzT5Nl59EAFXS1FPmMnkzj4KzAzD5SA0YtPK6PU8AmkwnG43E+vD1qw3Uo\nEvyoXP+d4+6Z4RHQR2VE3n1b773tmDofdSBXMky8zJv09U2AsSSnTQaRG92vA5y9X/X6q6pPlXR0\nzX93PjiX3fN1LxiYBx4HbQWOCCzJC+e5PgPMv6dZd1x4W1kfv7ue0O8ERoZ2eQSlJz4pgLNtmuTL\nc6d9/ii/BEXtGzUuGFFkiBm43M7JCCB50IQt7oZhfTzkic+zfcfHx/n8Br5hyteSozB7iV6Fx/zv\nVlX1Qr5/D4C/VVXV96eUvufi+3c3FeITxRWWCgyAK8epuRDqc4uQWo2LeGleb6ktpTrr6lFB8Amu\n1qOvRXufOIhH/cS269utUkro9Xpzp4VV1ezdowRlDWE39dV1qAn02A7d311SmtFzXld0vS1/Lpcl\noI3qj2THr0dg3ASEbajOM2pLbe5VGXzVcuJ8uKHlMhotd7UpW/tbvUytw2Uiql/L0n3//v537soo\nzQO20V/0Q/4cCF1WI+MeiCNzCrjMOSGPup2T/Gt0TUG5lEHtuknXZ6N2O1CyHdHhRnwGQAZ88p3S\n5fo2QV4PS2Iomq/sPT8/x2QymQNa6kIepRxFS+pyn0ivI5T9rQC+6eLznwDwd9ACmGlxkc7Ozq4c\n56jgAcx7td4BOrBarmfNuUcVnfbVRpmWJrQCpD/nz9cp1yjEw++RBxCBbh3/FHxOFk6gyWSClZWV\nDNJqIZ6cnKDf74cvqair53WQjrMmj5RAt8mDVmW1KM91slDiJ6JIbus80OtQSW68rkixLVpPybuP\nwPM6dUQ86hjyT8+S13eo+64FLSe6RiBMKeH4+Dh7mXUgXMc7PTUuDUVgWVoq4r30HnVJKwJlzUNx\nHRjpV3rrLFsPl9GtrktLSxmQ1cjXeiO58jGK9Bx58nHh79E19gmf564RbZ/nEjGT2iOHrJefj4+P\nUVVV1n/6mkdts7ehDd0UmCsAfz2lVAH4I1VV/QCAN6uqenLRiCcppUetCjIgUetRresoEco/87sv\n2FPAOQjuVXEQHcydz5IyKwlH9L9UZp3QNpFmY0eeGoUsClm7YcPzuQHk8I2GjFJKWFtbm/Pi6+i6\nwNGGOGY68SMjoEkxKq91RlL0XNQ+Bfjo/qhsf8YV1U0Bsq4N/r2ubTetg1Q3nxYpNzJA2W/uiepa\nI+e8n2JWaruCIHUR106jFxfUzV0fV0ag+GpY6icqfaD+3cvqKUdnD7D9bTw3zwTX3THUrdqHKs8K\n7KX13Lq+0HZ5Xk2do+NzNhpL37GifEfGPdvqBldVVVhdXc3j4/U1JbHp/RHdFJh/QVVVjy/A92+k\nlP5V2wdTSt8J4DsB4NGjGXZHlqgrKe98ByEF9gjEIwGNrLCSYvbvJVDW66oAgn5o/NxEJUH0PiIf\nGhXQ9RXta7WAO53LDHVe02hGE7UFuesQy1bLn3WpElWl1VReW0+5NF4ui1F5kXKJvjswex1aXqmc\nqI2uTKO2tlkLexV0XbnweR+1RUGN2wtXVlbmknEcdAjkpTwTNeY1fBt5yyp7us/VaTKZYDQa5flV\nAmIt29usgOMy4PWqztP2e3n8XgoL10VydNuky7Anc/FaydGK6vbvroO1/Gj+Rc6eetT6rDo0fN7X\nxr1sN77b6hXghsBcVdXji//PUkp/AcA3AHiaUvpkNfOWPwngWeHZHwDwAwDw6U9/utKEA21YaXJ4\nQ33SRIlhwNVEA+MpVLDquSsfzledF6TllQSk9HyJtzZA7v1S4ivyNgBkz5kKiJZyW7ouKNeBqPe/\nj6V7EaUJWyp70THUsiPlU+eZ1IGpK7SSsmpLpTEoteV1GlTXocgYr1N2aozq3lF6SvpHr5kASyD1\nHSAsl/W5NxXdw7J4P410gsJ4PMZwOMyGskaw6hR5CZSi+eAA6vqxSZ+U5C4yNkv8ai5M6dm6srWd\nTTIZLQNFvKv8RB6+O35O0RpyXTtITfP42sCcUloD0Kmq6vDi8y8F8HsB/AiAbwfw/Rf//1JTWerB\nEgSjsJJnFOpv2kE62fid/1UAVHFGFjcFOlKa3rHOrwO1Zl06GEdll5Rl1J7Sc+SlrqyIXPg14a7t\nO3jbWoZOkbGlZfKeOiPHeagbtzYUjb9+175SZQ/UKwUnb5cr90ihRfzo86X21hkE0e9fTPK50sYg\njeatnrrn2c30ND1xyYk6yoHTjXS9rvNHdQ2N3qOjI5ycnOS9rossx0SOR0mvlICuzVyNZC/SmXp/\nncGv90dJYIsaCapzIyfJ55LyT1AuzQkdw4hnp1Kf1/HvdBOP+U0Af+Gi4iUAP1hV1f87pfRPAfxw\nSuk7AHwBwK9uU1hKKVurJycneaJocgaAnN7u52PzfxR+iyxW3qsn1fgznCTuqbhA1gko72d7KATA\n1dCNh1vbWKl+PZo4i4CRKhT2DfluUvbOn0+QNsq+TkmUJnndeJSebeLD+XHFG8mEfo4UXtN4lkBZ\n19aiNkVllcjbXzeWTUbfR0VRu5tkyMFNcyaicnWbDO9zI1QTfABc8aY9fyOl+b2q/s744+NjHBwc\nYDwe5/eul0LG0WeVC5e/yEgr9RWf0f9KkZzXGXzurdaBP/vUZdznm36OjCDWqzpH71N5KDk+dTIV\nvbHK+SvpBS+3zfrztYG5qqqfAvBzg+s7AL550fIib0wFUNc3fdsOqU5Qjcda70O9HR1w3l83qATe\nCJzcMnMhuylFinYRUI6eUUMiKq+kJEtj0NROV0B11nHpOU0+Yd3X6Qflm/9Vbq5rYCwCsBHvJVl0\n/urqXJReJygvajwCV+cVr/k9LFs94Eh2dH4CmANf/qbr1RGIeMLU2dkZhsNhGA08OTnJW6C466Gk\nsN04dACIeNFnm/ox+tyWIjkrtSPSlQqqfq1EHq0o6WM3kLWeiBctw/WQ6++mcYp0/CLG7a05+Us9\nU2+cW6IUeldCJQWmz+oas3Z2yarRCe2D59ZXCYyqqppLoHKLi//bKvISXRd8mqgphEqqm1BtlG8d\nYNUJdTSJ/NQ2t6CjOpr4cEAseRdtgLfO4FB5jowizpcSf8qHK/QmHl43lRRgE7CUvBz/7PerF6X5\nK/qc9qfepwcU6Y4GnfcOQufns72t3O86Ho/zlkLOf3rRKysr+bcoccx1XpvxqgOMUj+X+r5E1GHK\nly8vqg5XPV4H2Pps9CIcn8N+TfeRu3PF3zX6x7r1XuXD8YgRD83y199L7WafLdLHtwKYSZHiiLJp\noyzCSOnqAFH4+byX5c9Gv5U6vo3i9YEqCazX91FTqQ9cqbUB2pvUGylP/a2kgIDLF2u0UeQlKslE\n07hEFrj/XrKgfXJ7FmhJodTxUiejTc++DqozDko8Nt2j/QbEnovLL+/T/xqaZpl6aAXJdwBUVYXJ\nZILj4+P8R10zGAzyCVBKfNaX1rRMgoDKQqQnonBxyaAhRYeVlEDTDUVdAvQ+8/Z4myOHhP9Z18rK\nypV2ebmsS5/Va77MwENRouRi8qnbQulIsXxuE11ZWclHbZIPj9Rq9r3Kn+JG03y8FcBcVcB5BXQS\nv88LYjTIpDoFTVpeXs4HZPhLt8s8xWEXr8evO3C1MRx8oD4qQK7jjdfVwy/dF5Wp5Ua/la5FYxP1\nu3/nn3vKLEuVd8RjyWiK6onaVqI6ICp5AS7/UZbtIsbFbaFSn/pvdUaYzkf3WiJwKhlBvM+TNnW9\nU7cS6qETLJdeMQ8E4W/9fn9uD3JpyxPH1deeXd9F4OrlRvNIQUbL8eteR2T8sa/0OV0/5/O+5q6f\n1VvV6CWP7FSDWqOkOkZ6Apr2sS5BkGdd8mQZehoZf9PX5nrmuL5F7+TkBBsbG1hdXZ176yD7048C\n1oiHHmKiYF6iWwHMRyfA6LyD9e7VpAHg0hJTa6gE1MBVb06TOqJsb9bh17Q8vzcCXX52MC9ZpSXg\nfpXA3AZES0ClHr1THTiU+qWOp0XujX7jRHAl5mtXCn4OgE3eg/PG722eK/FfkiWXnbZgXFfPdX9/\nFRTJd8l7VX7UayrNUQWEJmNTn/E2K9Brgunx8TFGo9Hc/uazs7N8XCNw+aIC8qvHMZb4raoqA5Lr\nOyp4zeR2L5n3Roq+SQ6b9JL+18+e9a0GgibLarn+UgsSTxAcj8dZN/d6vbn1d56upTo86hdeJ/gC\nuHIWhraLfc75PxgMMJ1O545BJd8nJyc4PDzMnrK+iEPHTNtI75w8cZy1P+sw7FYA8/7hBO8fAp/e\nTljqXBUWt/j0N14vgQjvc4tKrZomkHflWMr8LilV5yX6/jrAWCeJT6xIsPishmg8JNNUJyma3P6b\nt790b0nZ6vcIuDxDVsvk5HQrv24cSvWXnm0Cw9I9kcxFXlNbWhR0b1JXXXl1hrD+1/npJ8uxHDXC\n6mRU+5FlKx8eBqZCXVpawtnZGY6OjjAcDgEA6+vrc3XwoB1tmx7LqDLt8690bKaGinXtmcZAybDR\nPvb+8vsiua3rO5YbrSMrKPM/9YaHko+Pj3NIWMGXhhCBlPfwN+WDSXLkReVFPVPg8kAZzcbX8SK/\nDMvrGeDUf5PJJL/4ot/vYzweYzqdZoOBfcrxosyyPeST/JWObHW6FcB8enaOn3o2xtfcGyCl8ssa\n9LtbmCXB1Gd1MH0QI69Qn4+A13+PfrspRWXVWcbePxpC8SM7tSwN4bjFyXJVEJvaW/JYdJxKVnpk\nsPj4RiCsE5XPlrz9SHk0USRbTeNTKkfvi7yXkod8HdAsKd6I30VBvG39EZhQiWkykRrQ+pzLIf+i\nHJToftUpTcY4MDuJazgcYjqd5nVFD08vLS1d8RzVi/MxVID11yrqvQR8DcX6YSdablMSrPcH+Y3m\noQKWhq5VhtRo8jlJPhV4/c1PSvQq1aBRXeU6qGTMs2+j/CHFB5/vviSgyXnkl0sTNNZYLg9dohxO\np9PMMyMu7Cfd+hudo+10K4AZAHb2hjg9X0WvOxNshiZ8HYnkAtoErP6sg3npXqWS8rwuKNcBDX8v\nTV62QyeX/6ZWuU9CgjafIyCz/Toh6vqipMhLxlGkpL1dkeEQlVPiiffVyYRPjLYA6wqujh9/zr2n\nuvtKBsBNQNl5bWtovApygPK5orJMjyPqJ4Iaz5P2V+tFIEOFTQ8qen0iZY6KmV7S6urqlZe0uOHn\nL8Ioya8avXVlKd8EO77FycFIyynJa+lP+03rJigfHx/PjQ0wf0gK+aI3rOvE/q5hrc/1kAI9y9fn\n2HcRFujvvv4dOXfUC+q5Rnk0OmbA/Csee73eXM6SjwHL5W/eFxpdKNGtAObBag/vPZ/g6CRhbfny\nTSYpXa4X+JoKKfpc52mo4qPwA+3W4ep+K4FyNEF1YkRWNYkCRGWi5XgIyYVCvWR/xgVK1987nctX\ns2mZKuRNXlVkRGg5PlGa+ivqG+/rUjizLpv1pp5itKZdZ6i4IRgBZp0nV2c0LkKu2K7T9lfNg3qc\naiwyskUAICgDQK/XuxLxcvA6OzvDZDLBdDqdS97SaImOB6/p0ZgEDSeWQ578aE71IvVa5DERlFJK\nc68h5FxVfl1fuHOg3qGDoYOy9weT2cbjcX5/Mo2fbrebs87Zn6pLFFi1jQr+5FGNML0eHffLseeb\nq1xO1JFTT5blq3HGseU1GhP9fv+K58x6Ofb6SlnXHb7G73ObckBefRydbgUwb6x2sbG5jufDc7zR\nvwwxayiAjY+UunsFJS+G17UTm7ymErXxktUqKqX4a7iFAqdgQqXkWbpqlanS9/V41qHrKPSMCfr6\nmjZ/iXld2z1xSuvULQNRH9X1f8njKAmyP6cAWDICSl5HG4qArWRc6TN+8pvev0j/tKU6IH8VAH8d\nijwFhv7Yj3xbE+f9eDye6zMCgGZQU94YbiR4MKGHv2k4ukRNy1s6XpqQpH2qXjnLYr0OzDpOnJ9a\nt3qRLqNqdPt6pj7nRojrpslkgoODAxweHs7x0O/3c/QAuHyTFuWZfcrx0PocPHldQ7ts83Q6zd9p\neKmO47h5hIT8s+2asc26dKlEE/po6PHdynSANERNPvVexRo3diLjjEYBZbPOgcxjXvzlI6QKwBtb\ny3g+PMPpvdk1DZkokJa84ehzWFc1fzynCw2ptHZFaqPY3HtVACMvbJ+X6YCi1mcU6tGyo/7SdSoC\nNQW5qi6TL7Sfoj53bzs6wB+Yf7PMov3XBEaRcoomi99fZ0Tx9zbyo5OxBH6lSEBpm0RJvkuAv6hB\nEZXj7S3V3dRvbYgy7F6hAilBjmt1Kysrc/tPS16e8jqdTjEcDjEejwEgG5uamKN9oPyxP6LkLe8D\nBRx/QxvJE01diQOX84nXPcFLn2V9zpPrLYLJysrKXN8Dl4Y8Q9XT6RSHh4c4OjrCdDpFp9OZO/hE\no2fcV83XHWq2tUc81MnQ8L4aC7oOrRnvCvJsq3vgNHA1u5rPsu/8lDbtAwBXEveAyxMoVSdG4xxF\nWvU3N960X1TWSnQrgLmTgE896GFymnCOhOXC+mbknUT3AFc9JH3Ww1IOlO61Xpd8AkUWMjBvXUZK\nMAoLOW8KyNFbcbQ96rlxsqgQR1nZSiVeVeG/TloUrPUZH5M2Bp/XF41RU1lAMyg7ldoZTeq6ORH9\n5oo+mivR2Nd54XV1qFKiQQhceoi6v7SqqgwqmhSkXp96YyRuYSKw6xamaP44r1o2efVXNWq/cC5p\nuxRQtd2ul3wtlvPP69LPGo71+afeGgFVHZuUUn5hBg9D4f7rk5OTDLh8Jaaf2608sj7fHuZ9q84A\n9QxPQGPfLC8v5zCyXucSBN+b4LolcjzUqFL+dZlAdTz1vBozwMy4A2YhfHVkdDlR+96jg+psqS5W\nedI5ENGtAOYE4BMbXRxdREzqADiyTJ2agDtS0v7ZLVze38Y78fp04vrE0nv0Oq81hdUixUdB1Ht0\nkkYgrZY064sm2+sG3euSj1HJKHNjpuTZ+udSPV5WXf/UAUNbrzd6Xseszuv167qM4mN9E5n3usmb\nlq2eq8oqFah7O/o5MqxV4VLR14FxyaBUr8qXHEpLN569rLwqIPA+AlU0NnVyrPPcjWN9XSU/ExgJ\nwjs7O9jf358ro9vt5mQm9YB9jDxiwT89stiTTXVMmdmugEqdc3x8nI0F1qmRBA+RK+lSnvahGj1L\nS0t577HqdgVx8qvrynruhXrRumzC0LnKpHr/WqeOe3QQktKtAOYKwMYKsN3voItzVNXVF2lTKEpe\nRx25ZV3n2VBYvV71IKMJVVLyHCQVWvVQPWQDzK+ZePaoh6n19yhxSCezJtIp6PMa+akDkNtOTcBI\nKgFOk1HXVPd1+63puRJfCkhucJY8Xv2vwOjhuTrDuC2pIqQyA+aPTWW4ORofbZOvWzpP6p0qeGp7\nvS0eCtftMMD8gRrOj/LpWb4M++p39V7dy4741XuVFwfHKJx8dHSUE7i4F3symaDf72MwGGQgjpbI\nvCzVV+wbgtbx8fFcH+m6M/tDly+YNMbPLOf8/DyDN42Kbrc7lxFPPnWM+DxDzr433A0IHSP288nJ\nSeZfD3whj6oTl5aW8is619bWcHx8fOUFTKpbI8CuqsuIUIluBTCfngOjU2BteQbSasU4IDooRt/b\neiX6LOtUD9KfUS+W92vIguV4opYKjO9P5LpaVF/0Ao1o/TiyrnXCkU8KKctTZfJxBeIStQG6Rdv8\nxewjBy39rONKUsApgboq4yjD3OdYqYwmQ9k9LnpwqqgIMHUGgRo+aoh4exwgFeDc6+UcdN2iSp06\n6Ozs7IoB4fNHt2OpQa7GkxriHrqu40W9dB1z3bVxcnKCo6MjHB4e5utcO97a2srbvxxIvC16uElV\nVfmUM9dt1GcEXw2hM7F0NBphNBrlMvgbgVfXeplstry8jMFgMKeL+Vm/a6a1hr1pILhO9JA0cJl1\nzpA+iTwyzJ5Swng8zsB9eHiIyWQyZ0zoHGJf0HBgv+i6fYluBTAvdYDpKTA9q7C50kGveymsCng6\nMd2CjZSmN9ytY71Oy1BDTjoRSJygKysrcxPp9PR0LtVfQ8EK5MDs8AIKWASorCcKe0ehIvcSorD4\nHc3Tx7lPHBi8LW2jS27Y+fVXxSsNXoYtx+NxnleRFxN5rDrPNbTp/LIOP1mLilNDzjrH1BvXsjX8\nrB6qEw1s9eBKRoSPlyp0j2K5Aa0Z6eTz9PQUk8kE4/E4J74RJJeWlvIBGeolKx8O9ORFT+ait8iw\nNPlSw4cZzRo+1yxuXfvu9/tzIV+Ctm7R4tYsAqF6y8yknk6nWF5ezveen5/nOjSxTA0CjWawT5mM\nOBgM5sZ9dXU1P0ePmkYWn/GjPLV/lV/Og36/PyfvEd0KYD4+m73E4vM7J/jy7QH6/ZU8sXzyRmFk\ntT742SerT261PPWae6R+Xa+x01VwgPlQECcsB5qD6XzrxNT1Bw2jqHJQK/rjCL5tDKmblhvJDe95\nleDzxaYoUrIIKEfe8nXqLhH7mwdRAJhbV9aIj0bIVNY1+qPbWxQ8VIHq/ZERq8AfzSOWT/74m+6B\n1SRJLhMpgGk0wPtKdVQExpzfCqSaO6J1Hx0d4eDgID+/vr4ehr1dT2ib2Kf0CPmmJYaXvbyzs9m7\npvf393O9TLrTviPPKysrObGsqmZrtaxLQ9nsN+p/j/QpuPJzSmlOx3a73bmkNp0L/F294FIklp6w\nRzxVhtlPvV5v7lk1DEnk+eDgYG4LbES3Apgnx2f47LNjPD84xfrgAZaW0txesjrgKSlZV8SRxcrr\nqhAc1NWK12f0ux6/pgKgYSwt2wGf/OoE1ElQ4r2Joue8rz5qgPKxKIUt2/yuikXDlLqmpEDlfRqN\nu6/pf5zI5bkNOUAB9Zb8dUjDmvRWojFWUI7WKkm6R1kThBw83Rj3OlW3uLGt8kL50S2Grlg17M36\n2B6t22XODW39i9aOgcu9xJPJBKPRKINyv9+fe8Wk649ID+pRk8PhML/acGlpCb1eLydseZLXcDjE\n06dPAcxeAEGw1bawv1gXMMt4ZlSD67O6jqwOjRpvagzxu0ZitG/oDfsbo1yGGOXUw2donDAKoeFw\nJnvxsJOUUgZvRlwZLVDDgvdphINedoluBTCfnJ7jx79wgK9+awv9pQ6Ojg7zpnZXrgCuKBD1gB38\nVDAjr8AVWGkC8TcPBXn2pk5urydSmLoVxBXJouTPRoqobdtvQs57nXdc4oG/RVECzTpdXV3FYDDI\na2u8znUqJlmcnJxgZWUlX/PQGICs7Fm3God1fHqbFumbujIWHYdFQFllRL2R6459ZCCrwiwZmnov\nMB+q5TjzpQcEd42maRa3tiUin4MKvOqFKUgC83pGwcFzNHQt2/sy6ttOZ7ZnmOCifcQy9b9mMe/s\n7GAymaCqLl+g4GvHPj7aNwpsBCHOJwX34XB4JWv5/PwcBwcHOD8/x8bGxpW9w/5CCnrQurSgBhp/\n53iyL9zA0tPVUpqF6v2wJBoT2ucsRyMgmk+kRtJ4PM7r89QV6i2zD6qqyuv1TB5jspiPc7/fzzx1\nOp25vi7RrQDm87NzTCcn2Fzt4OhwHyeT8Zylo5u8HTz0PrcGPQSi5Jac/xaFN9QqpqBpYoc+W/L6\nfN3oOgCsbYw+k6JEsdL9bbzrJl6i9tQBTV2kg2O6vLyMXq+Hfr+fx5ST5ezsDFtbW0gp4fDwEOPx\nGKurqzmkRqudoExgXltbyzLFrNLT01McHBzkNTlug1DjzHMCXD6axrLJKNR7IllvouuC6nUAOTJy\nI15KYOOk9+k67mg0yid/Rffo3HTDvMSjPqtzGcCcl0pPUQE38ry1Db5O6/d5WJoKW8OevreZ8ndy\ncoLxeIydnR0cHh5idXUVq6urVzxV5Yd1uuesSa5nZ2cZZFZWVnB4eIjhcJjnXrfbxXg8xtHRUZ4X\n4/EY/X4f/X4/87u6uprnFsGW80yPoWSfKyirDuc81Tmne92rqsr7n3VtmaBMTFBjh2WpIUcji9eB\nmSfNfmAeEfmmQaF5T7oOri//oZFPL5sG/9nZGQ4PD7OeKdGtAOaT6RR7+0M82Rlh/OXr6Fs2nZ+X\nrRYrMG+Zu9VLYVQPtrSm5t5RBCR6PqzeqyFQJVUkJfDifdHz+pt/dt78cxsvr3S9raJ2D6gUGWhL\nauWura1hbW0th8qoLDip1tfXsbS0lBNeKAf0qjnBuE+TSXs8Y1knNYF9fX09K2L2Ta/Xy8kok8lk\nTkF4X6nnGAHxosaOl/mqqVSuzw/noQ0oK0iop+nPR/NQt9RQkTrAOJ+am1GadwqcquwZOuX99NLZ\njtL8VMWv93gonECsnqXKMnD5mkJGe6js2Q9HR0c4OjrC8fFxBkUmeHl/uiFBECaf/N/tdrPXznD2\n0dFRDs32+31Mp1Ps7+9noOHJYDSQT05OsLGxgfX1daSUcHBwkBO+PBLhxoJHFsnbcDice+kIDQIS\n13rH4zFOTk7m+NE9y5GcaIhdjUDmCnmImS+s4Pxl3ywvL2MymWRDgtnWXGJRcGcUYTwe5zGrm9O3\nApgB4HB3D599dwXv/MwNfHorAVW8zqWgqqEJTnz+zhCNPleaROwgDbVESlSFS8sqWc3qsdeBsZbv\n1/VaiacSLeqNl4CmVLZHCUoGQVteWMba2hoePXqUAfn8/BzD4TBPipQS1tbW0Ov18ulFHHNdn+z1\nevn4QPLEEBZw+aJ2rgmdnJzMgT8tf3oTo9EoK03KkydZqaem/6O2NnnQ0T11IN3kxS5CbcdL71UZ\njTzYKLIQzUPgcp5r/kaJL6+j5FUTKDyxKKX5g0D4u8/fkqFbMkZZLmUp2rVB+WYIlTqMyUT6/l+G\nTNfX17OxqEdcOiCrx+nhYraNCXmaPKVGC/liGJl8sUx9L/Hh4SFGo1GejzqO5I/1anKc6lMaZRpB\nSOkyw1x3pehxrd1ud87QbooWqmGiW8MUqFkn26lePw8tOT09xerq6pXT6wjKfGZ1dRW9Xi8bMjRu\nSnQrgLnT7WJ1Yx3odvGPf2qIe//GGh4uX90orpZoRJ5952u/wNUDNDih2Kma7KB73tT694lYslCd\nSsrFy4qecQCI7mmiEuDWAWoJHDR6URL8Ut11vG1tbeHTn/40+v0+9vb25t56xUnCzf1cV/ZtEpyo\nPEiB1ja95bW1tax0COjALIzF0CCVBNs3Go3w8uXLOcXknmG0pFLq09LvbcbyowJnL6+NkaE5ARpF\nqpN9/a9zR9cB6/oxmnMKAqpYNdpGoHSDALjcg6ogRIND57/PKTfMdc2VdVF5M9zJrGqukVLOqexT\nSnk5h/OAMqr6UXUa26v9onqURqk6NdomGguMXOi+YPVeq6qaW1oCkL1EnSeRt+xbP73v1HByZ4dl\n6zo07+d9Eam+J8+6BYrlrq2tYWNjAwDyK0ZpkBNsaaywLzWETmdCddd4PMbBwQGqqsLm5mZexojo\nVgBzd2kJW/c28WB7gM3VJRyfA0gJ3c7lkXC0+JRcIfJev4feNTuIk8mtSa5rMB1eQzAOQLS2moC4\nydKP7i1RSeHXebhtFXidVenX60C5xEcdr6xnc3MTn/nMZ7C1tYWnT5/mMBEnMcGV61h7e3uYTCb5\n+mAwyIpnMBhki5QTaDAYYG1tDSsrKzkBg8p5PB7PrWVTLjgZGd5jFidlhOFETtjIc/J+KXmMdePv\nAFYy5LTcJmOoDbXhyecYx4CkYB1tQSqV6/NHlbwCrO/pJR9UugQgB3ytR8dNvTnyzzIj8pA5QZjy\noslHk8kEw+EwZwK7LOlZ1VVVhV4V66M+8/ViGqq6nh05I9rHbnDouBBANIrB52jgaNujLGjnn56u\nfvfsbN3qpGPD53wcybdGP0g6xmo0qEHpGHNwcJCTuQjMrIf8cj829z97VjbrY3id/VlVs3B9iW4F\nMK/2l/Ho/hoebvXwMx718Gi1QtdAI1IyTconUhK65kHvWye7hrgisGoKq/F6tC5VaocKSltyhe7C\nqfXW8er36b0lUGbfteWxdE3rXVtbw1d+5Vdia2srJ58AmNtLyM35DBGllHJWKIWeWxk2NjbyWrJ6\n2cAs5La3t5dPRNJkG4a0U0o5hK3bfPQlCVz/1pDeon2k/dIEtlG/lX6nonwV4FxXpnvKDnLKL/ta\nk2RK9fiz0TXN4AYw5/1wPnsCKflgmd4/ul7LdnCtWeeFen+uE6jACcydTicnbh0dHWF/fz/3F5U6\nAY7PUDepp619Tl2kRpEmUXmIW/NxSBpK1//8U4OH5foav/ZFXUa4j6X2vYetff7wFZR6qhjLUyOa\nfc92RXLo81J5Y54BZYd75IkXPPyEyWKMyHnOUafTmdv7rdGPbreblyLq5uatAOZBr4OvfLSKbqeD\nN9cSeuk8HEy1jvibCowrQrWCKGiRgLtl7luimoDYy9O112gNsslLiO69iWdaVy9wNexTAnRtVxuq\nM0L4mV7BV33VV+Gtt97KynVra2su61q3L/DZ5eXlvOVgMpnkfYfMJuXbYUiTyQSHh4fY2dnBaDSa\nazeT+riPk4B/enqKnZ2dK4fgr6+vY2trKytcgjo9fAJ1NA7X6TelJjn0e181OEd8UO5LURRVlKr0\n1fv1cpuMbpbBOU4PlGDmBjKvlTx1va5GunuJ+qftcO+TbTg5OcFoNMLh4SH29/fzuiRP0FJe9UCL\nJifA14R1jBQkdVuUgqDKp4OVArKuteucIZCp5+ogr/KneT78rveyDM61KDyu4Xcl6oMIlL1O9onf\np7JCo4jypVjCSAXH1ZPN+Gxk2HB7Z3QAidKtAObz89nJX/cGHWz3KqgMNln+KoClUFPT75Hwt1V+\nPlE5qVwwgKuJXBGVlFobPuq+N11vc09khfrvdUaHlq1ez5d/+Zfjy77sywDMQspbW1vodmen9/D/\ncDjEyspKPneXiVhc46GXu7Gxgel0ivF4nJUhM6m5X3MymcwpG7W2ueVjMBjkgywI4imlvA69sbGB\nqpqFoxi+63Rmpy7xoAbg8lCC10GvEnQjqjPmlAf+qcflz7GPfVtSxL+uDXt57rlzDZRKnErTeeR/\nBVHlTeuggaaGmCYWant5P/l2r5rRFMpjVc32v/KYzKhN9BD53ftSAU/1DX8DcCVyoLsJtE98DNlH\n+sdydVxotCiYss4IGCOQ1LFnPQrIlBmv12VFQZoGtkZLIuMskgP1qHVMtH7N3Nf1Y/Xy6W0z0qL9\nt7y8jN3d3TweJboVwFwBePfpED/rE/fnvGUgzvzU60olK7gJsBfxQKKyKXic0G6VRyGcaIJE5BMu\nmqTKTxMge3ltqen+ut9LigUAHj58iE996lMAZiFmhq6BmYe7t7eHFy9e5DVers1pYpEK/Xg8BoAc\nqtbtMA5kui7J79PpFE+fPsXz58/zbxzT0WiEXq+XD9dnXfRG7t27h/Pz83wS0/r6erbwdVvMq6K6\nPq9T6G2pZGRpPy4iRxynCBRKAB0ZuixL54SuNbfxiKM20XBQT00zmJV3ypMSx1fXo6kTqNSrqpo7\nVMRBmWAXOQsRuAHzIWn3DPndDRZvk+olXTfWZCcAObyr80//O39uUNWRGiIcU/KqbY08YNXzkeEV\nkbZfx9Yjg7yu8qVeux6TStL2upHBttX1x60A5vHxGdZXKnxqs4NOisNaQPs1U6frAm+pHA19APNJ\nZC6Afm9bQC7VXUclb0XrbeO1X4cWbU9VVVhfX8dXfdVX5T3C3CNZVRWePXuGd955B7u7u3lzvlve\n2l71ngDk9SCST25ecwuc4W6Goui98Mg+Jp4xtM5yuS1rb28PJycn2N7ezr8xm3Y6nb5ycGZf1vX/\n6/asvY4SLwrM7F8Nx7onys9Nhm1piaoUfXLDQj1L9bwJ9mrU6XpzBNbqseq2LHrNup+Z9UbA4zxr\nv/rvLEsjEEw60/21+kpDBWqCLw1eyru/xMJ5cSOrFP52g8qXH6uqutInnpwX/dfQsmfvK/i54cM5\nr141+17HwuVNzxSPMsq1Ds8NcCdO+yuiWwHMm6td/Kp/8yHWOme1QuiT7VUrm0jgWY9b7JoJ6esz\nrhg4Ydp6+a+Smur0CfdR8bG8vIyv+IqvwL179zIfvV4Pp6enePbsGT73uc/l7VLA/JqQEgU+SjxR\ni5Yhbwd2Tk5OOj1UQhM2uIcTmHnRXAM/OjrKyoSHIty/fx/r6+uYTqfo9/vY2NiYC283RXAWpTog\n1HlUZ7gtWl9kJJdkyIFXQc35ZxkKhq4T3Mh0z7PETwQwkUzRS1bDAZh/sxuBXH+PQqo0/ngu89ra\nWr6m6+TKYyTj3k/eHuWL0SM9O5uhZn3xwsnJCSaTSTYY6S1rgp7W63IbyYH2EX+rW0sF5pc16vSk\n/qbevYJyFFXwMVddodth/TAUTZjrdrtz28JUPl3f+9oyPyufUU6U0q0A5n4XeLAyP3g6ACXALFmW\nbUk7NiIVlMgyrANl3heV8TpB0L0BXvN21X1/XXxpf7/99ts5hA0gn3S0v7+Pd999N+/3a+oz9m2d\nkFOJqrLRrRj04FJKc+twOkmBy5D35uYmUko5y1b3qt6/fz9vedF6j46OcmKSykiJXgWA3rRsv680\nF+u80yjUyOuRYavP6n+tX3lQL7UOOJR3rY/A6l4kML+Dg8pZ5VEPldA66FG5B6XGHqMvzOrV6E0E\n1O6dRsS+0ZOl9vf38+sgmZxIOdf6dCmA/Khny/K1Hr1G8uQwnUMM4Xs4WPtY+zLyvDX07kaYO1L6\nuaTLI4Oev/sWOd3a5jkqpfwCtkOz7vlMHTUCc0rpjwH4lQCeVVX1sy+u3QfwZwF8JYB3APyaqqp2\nL377XgDfAeAMwHdVVfXXmuogRdYxrws/RYuqrSIrTfqmz8pXXdha+fS1Hv/8qkgtxjpPwemjAGUS\nBfjRo0c52UvpxYsXePfdd7GzszO3JsO+9mQhn4gRiOsEifjReqis9EXmKldUqPQ+9vb2MB6Psba2\nhtXVVWxtbWWvnycR0SPhNfJXSnzytt2UtF+uU6b3q2+7qZMrX1dTGSXoRYlO+l/roBejCi/ykJ0n\nnYN6T5Tk40q95IWrTJbWW1ke26f5Dv1+PydlsXzmMqhxoP2vYB+BD9tFD5hJkDQodZshgCznus5K\nUmPV/yIjwQ00HWO2l2Fg/dO+8jFQfjqdTvb+VV5ct2mdbfWeOlIuP67HeQ/5UO/ZZZd1aIY2DfWm\nhNA2HvMfB/BfA/iTcu17APytqqq+P6X0PRffvzul9HUAvg3AZwC8BeBvppQ+XVVVfSwDV1PpS1Tn\nBZZAUq81Ab/+3gRaJSDgAEQJIq+SfCJEvDRRnYItKbxFwFzv7Xa7ePToET71qU+hqqp8vCUAPHv2\nDJ/97Gfx8uXLK1tFtG79r5Z0xD/rjkJpbpFr4p6GvXUCAphbfyOIr6+vY2NjA2traxmQT09PMRqN\n8vo5AZ3r6CmluazN101t5Ln0HIkK0dfXImMwyojVe3SZgX3jvDrPqrQ5TgRY5YNjVprveq+Dstbv\nhpn3C+t2D1fBg94SgYp1cRnEkxKVd223gpi3y+vk/WdnZ/nox6iNCkhar37nfZ6I5eF94PJwF/dQ\nWa4e1qHrwjqX2U6NVjA0z7ZGHjafjSKYPm6RbKmM6D1O7Ac1lnTs/bMCth4XqmVE1AjMVVX9aErp\nK+3ytwL4povPfwLA3wHw3RfXf6iqqimAz6eUPgfgGwD8w6Z6XHC0cQW+5p7lteiZ64JjSZm5RaT8\nUHG59f8qKaqfPLQxbCJh5W/+3T2mRUjvX1lZwaNHj/Dw4cO5VzACwP7+Pj7/+c9jd3c3Ky/lLbLQ\nPZSkfPrk0zJ0nVkzTzWkWVWXxw6qkiNx/bDb7eb9qCQqp+l0iq2tLRwfH2M4HGIymWB5eRkbGxvo\ndDpz272uu+YceQ11QKSfF5VJ9Q4jpeP3agJM9BswD/S8n4ovCvWpotY9pmyPArTu63Wwcy+tjkc3\nTLS/VbYUxLUe/u4RE52/+lk9KS034kUBzdviQK0g5fPLz23wdVuOx/n5+ZWkNj3JjX2tYXHgcm3V\nwdWXChg61jFlGXwxhC9DRFvF+JxubfI2l3S7A31pXnnSopbrfcz72T9tjOTrrjG/WVXVkwuGn6SU\nHl1cfxvAP5L73r+41khqYS5KJYCMfo+oDnhKE7cJsEsD1LZuvd5WibYB5dL3Ur+15d/LYT/wZKO3\n3norA5WewjWZTLC7u4ujoyMAV99xGwn4IsZHdC/Xg+mpeb/45GcZnkVMJcLjPRlC5Oljp6en+PDD\nD7Gzs4OlpSWsrq7OrffxRRslr6yuXdrXbamNQmgqO/K4IjmK1nu179TbraoqPKmKAOkypfkC6nnp\nPToPvc+8Hx1M69oTrZl7v2pZbIcCREnHlXj2OqLfPQzrBqm3l89ovysIM0ubwOteKstMKc0ly/EP\nuHx3s5aph3F4Dgd58v7Sdqtc+Dj42FNGXJ+4px7pMAdlNdp9XKKMcC9Px6gNverkr0i7hDM8pfSd\nAL4TAB49ejQnsJECKQFkUO6iPNeW0QacdEAivvVanRHQhpdXTdGELX2PKOofjiPDvA8fPsx7fHmq\nFj0lAjOPudR6fcLSm67jmdeY/OKhJvUGNAFG14DqylZFQyXBE6f0d76E48MPP8RkMsHW1lZ+UxCA\nOf7qDttoojYGXAREi5at3/V57S/1JEt8RJ5lpABJHpLmfTxQhuDC8lThKm++F1oB3T2uqI/IW7Tk\n5qF0pwhESv0T1R3x5EAc/fdoTLQ2Sy+Yfx4O1vcXKx+8pvNGwVfXUXm/nsCo1yPjQSNi2ndqFGs/\n+L3sz2hsfCmGdSlfvjc5Mu60Xu3TSK5LMl+i6wLz05TSJy+85U8CeHZx/X0An5L7vgzA46iAqqp+\nAMAPAMDXfu3XVnWeZkng21ITGNaVXTdJShM5akOJh+vwdBNqw1Pb30mq2Ki4+FKJN954A+vr63ls\nuQ8YmIWnPvzwQzx//vzKOnCdEoomY0RqkRNIFRD9jF41DlWxlzwo8qOZuayHp4zxGFGu8/FFG/Su\nuX4WeQ83oZuUFXnm0Wlai8wbT7Qq3atj456NeswAcvIY1+qZ+epeaqScPaSosu7r4nXA7d5vqX1+\nYpiX2cbjivrY14Kj0LaGnQmYek11r4KUZlGzrco7QZ1ecCQTHAsFc84t8ufLFw7K2ifeRv/dyZcP\n+Fllgn2hWzO1bEbY1DircwrrDOS2dF1g/hEA3w7g+y/+/yW5/oMppT+IWfLX1wD4J4sW/qpAKZpc\nbZ6JFFOdkmszEK8LaBehiM9IGXjb25CCcr/fx/r6Ot5++23cv38/v+OYh/UDyNuinjx5Mpf4E/FE\nAFNQjTwpL0OzQFkeFaMm4jgAqwHgHqH+rmXpW3X4udvt4t69e3OKpNvtYjwe4/j4OB9mzzLarj/V\nURtjq6l89wYZqvToQymq5UZ2nccdySQBRJPzNPlI56iCjL5ZisahKn2v341BBRD1LPVPPW2N4Ljx\n6O3VflfPSoHcPasojK+gomu2vK5rrpo7wb7TMqN+BZDzP7RM7kRQA4tzEcCV9dySbJAv7S/y5Z50\nSe+WDDs3nPy7G1/ar9ovmj0fbW8sRbgWmVdN97bZLvVnMEv0ephSeh/A78YMkH84pfQdAL4A4Fdf\nVPYvU0o/DODHAJwC+K1Vi4xsMnpTpRSVqf9Jr8or0bLbCMttpLq+cAVbR1R8g8EA9+7dw5d92Zfh\n/v37ee2V7zatqstTvfb29vK7Tvlb5HEAl4pDt9dE2dYKpg68vKaKKTru0e/jvfyvCk4VCjAfok4p\nZeCtqiqfl8yjPZn8RhDhm6wYpn1d1HaesW3aT01yrTLjSrpJMemzjCJwvJk8xS0zVK7kjX9+sIe/\nVSql+dcCuo6IvGX+puun2kb3WCMZduWvyYYA5gBV71UAcUNV13AdeNgWP/3KgVnHRU/5Go1G+buG\npX0JQPclK0Vj7f1Tal9dGSV9tAhIRn2lBxix/8mbvpCGXj/bHcmLe9Ta7rZ40CYr+9cWfvrmwv3f\nB+D7WtV++cwit1+rrEUUXVtvuQT4bjV/sT3ntuAKNK9vlRTq+fk51tfX8cYbb+REL+Dy1XfATPm8\n9957eO+99/JLxJlQpXU4mHpyij6j/JXW0JRPVfbuTbMuluVrZKpI1EBgPQraALLXQcW2u7sLANjY\n2MjZ3DwykaBzcHCQE8PqFNWi5KDaJBPqHbqiKcmAyr8f3NDkRany1us6Tuq1c4w0dK17o7mUwLGg\nMtXMb7ZRvTP9rlvk1CuO+pOAr4qZ/zUC4N4y20fZUeMhWgdPKeX14KqavRCDrxD0/buUS82L8O8e\n5lYZ1jb725KcNNztxHLUEFAe2Te+R97BLep7v68NleaV9nOUQe/9yPtUP7gRouOs/dTE9604+Qu4\nGWC1BWMHFx9gHZhFwSwanIjHVx0VaMvfdUgnTx3Pp6en6PV6+MQnPoGHDx+i0+ng6Ogoe8nn5+fY\n2dnBs2fPcjIU+XJh1jNyHeg0MzLyZsmzfi55xHqvJ34poCs4+XOqWD3LWD18lvPgwQOsrKzkRDe+\n05Vtm06nePHiRT5rOzqVqNQGp6YxV+VYd0/dmpryoGPp+4vrDFvlw+vQvuUYq+zwJCX2pWbsMrSt\nL11Qr1q9H5anIUoCli6h6HzwOd/pdHIWcylCo2FjBVw1RtywBJBDyeSLSz/b29vo9XpXjtXUcpSX\n6OAN55W8EeydvzYATHKjKDLutZ/dU2WZEWk7vZ/bUmQARG3S6IA/pwYcSXMb+Kxu32tqG3CLgPk6\nVGeBR9/dyq2jJsUV1V0CgLrnXhctYljUPd+kuBkuvHfvHjY3N/NrFXmu9MnJCZ4/f57fgawH6QMI\nlQmJB304MDtIRt6zT3AlBbzoIBNVpKroPYTOl1no5OVkdHCnwmOoleuDfGkGj+08PDycA+WbjmMd\nNZWr3kLT3HFF6d5hXR11c0w9WB3/8/PzDFYEZgDZK+b7uAnEuj9YvW6dszTiWD7BmksLTAJyoFHj\nhb+pXPq6r4K/yg/LcAOA5atMsG18i5quoWu/uWFdSpzSeaWhfh1T7TNStLauv6kB5SeXkTdtV9t9\n/e5AXReU/VqbuaZ6UQFby9V2Kq8+xiX6WANzk0dMci8qErimMj4OpIP9KpV51BcUvLOzM2xsbGBj\nY2PuoPx+v59DiePxGHt7e3OWpVvqShq25GR2cI7a7QpRxznqFz7nSop/engCf6PnxfA8w87aJuUj\n8op4IMlkMslZrXz24cOH2NzczPug23i2Tu6J3pSaDDS9z5WR/6486jX3JCJvm2Os/csx0heO8Pxp\njqGeTc1yuX9cvWfyrUYFQZFLDppfwGxkfTVoSil/Z10aNXGgBK6+Uz7yyPmbyizL5xvZgHkZ1OiS\nJsUptV379DnngOxlKOiqMaUGiJK2tw1F87gtXQeUIydP/7t+8TwX57fJiftYAzPQLqEk+h4NQt1v\ndfXW1f9R0uvwrEpt4mTv9Xp48OABBoNBVk5ra2v5gH6G2fxEL+c7WvONnqmzpiMFowrMn1VQVoXn\n3oLLmCYdabkEbIY1lehhKajwHr7VqtfrYX19PXvTy8vLc6+8vC3UxtNu80wJECLQdi+LvytQ6sEW\nzNAmcK6vr+dr/X4fALIhSSNL5YS8KcDy7Gl64Go4qbenQKpJQr627SDb1qhWOVZPlN99ycf7MzJs\nPafDvWI3kPyzzxe2S0PU7GM3nCODYRG66dxYBNjdyYv4YJ+rkaXJZW14/lgCs4cLmu7T7zoIpU5a\nFGBvk9L8KEi9yfv37+Phw4fZK+j3+1heXs5bgp48eYKXL18CiNenfB1Zla8qnWitmbyQPFzH3yNr\nXxU8w4ScTMBluDM6IjJaD6NHQp7dI1JPIaU0t5WLRk6/30e/38916glhi1CT5d9GvhdVVNH9Pkbu\nUbUlrUP73pWeJzYpYALIb/eibPId251OZy4BiXUwK7nT6WAymcxdo+Hk3q4Cou7h1aiKJ5xpX3nf\nKbnMalneV04K2F6m16dA2hRWVp3qCV5alofzWbYnt30x6DqGJp+LsMMNyLp1+RJ9LIH5ptbVotb7\nxwV4XwWvbUI6FL719XV88pOfzNt8+P5XrgtPp9O5ELaHsFzJeP97SLGkvKLQdcQzE3lUeaqC53N6\n+Ii2l3Wo8lG5iUCc93g0gGuh9L76/T7W1tbQ7/dzmJRrmvpGoJsosLYGbelZ79fIwyQpSF2X7yhy\noWOmXqcrQR0nAHPAyOQpesrKs2ZWK6ASQBwIPSITGYe+ne66ETmVgeh3D7szQlACf+W/5K1HslIa\nax8TNZaiSJaGuW+jnr2OIen9qPPfve26Mj+WwNyWSt6xWqulDvKwRBur9Isdyq6jRQW/Tnlz8r/9\n9tvY3t7G8fFx3h7FF8IvLy9jZ2fnyuEhUVlap372kJp66u4tUAkwOYfGgYOyn/xFkFRP1wGeFr8n\n/Gg/ORBFQEEeNRmmqqrsJW9ubqLX62WwXl5ezv3XBKaRLLdR7m1l1o0Q9ZK0blVIGkpdVOk6n75O\n6aDs9QOXa/0kDTFqeZr0BMy/+Uq9Xh9bHWMFOO8X3ute6XWjF5Eu4n/OPV1/j8pjP6i8K8+uOyPg\n1va7nqWsu97lvNXIhBoG16U2BsQiVBcJasNLVJZ/rqMvSWD2TnXh8PsWLbtuolyHbgKadc+WJlQb\nfkpEpfXgwYP8pqilpSWsra3h8PAQnc5sb+nz58/xzjvvYDgcztW/yGRRjziy9jVsScXnoUng8kg9\nKl49MIFgSzCMlLmH2Z0/DVVr5rD2pRoOvJc8LC8vo9frze3HZVLY8fHxHDj4lo2b0CJl+PhplEGV\ne92z/jkygtwgJum2mihi4QAVKWnn2ZdR+JwaYJpLwCiHGyB8Llq75b2lNca6uVv3e6TbaHxyXV3b\nHfEFzK/58prODTU22vLooKwGGjB/5GZ0/n3UvjqqM0CvQ2rAtaE6INbyFuHvYw3MruQjAK67phR1\nZtu6b2KZLUJNVvarNhi8bv6tra1he3s778fd3NzMQMPtUe+//z6Ojo6Ka2V15MpSeSjd76E0VS78\noxfhoTNVZF6HK65ofVBBWcv0fci6pqZrmhpeBWZbzMbjMQ4PD5FSyseZDgYDVFWVE8KaKDLa3Mts\nSyXF0nYOuEdb5037nFWjxAFNx9IVYAn8Is+61FYNaXtuQtQvzj/Lp8FVArlS/ZEhU+ov90Cj9WTX\nl352NZ/T8HxpyYjg5Y6Cru/rNfaFzgFvo34vOVhRf7lM30QfL+IotTVO/Zkm+lgDczQRF7G0+Ezp\nepOi0TrrFvabymozUCVheZXg62Wyfwk63W43H7l5fn6O0WiUX2V4cnKC3d1dHB4e4uDgAJPJZO5A\nhVL/qPJQDxhAGDqOni8pawIhFaOCryZ8uSXvHhR5LCXaeMjOPTtedwBheTQYuF/2/Pwch4eH2N/f\nR6/XQ7/fx2AwwNraGo6OjjLvyoO236+/SlLZqFOodUakypT2rxu72o7IEHblHxnhN2kjcCkTavhp\nO7zuyCtmW5m139YoikA5IjUC3UBU2fbfVP5KusXD9NrOqD8clL0vPRGyTfsXlWU3mq9jgC4iP68a\nlIGPMTB7B6rl4ut/i07URe71dZo6XtvU29YbbkOLWH7Og4PNysoKer0eNjY20Ol0soc3mUwwGo0w\nnU4xGo1wcHCA8Xg8570C8bYlD6GV1qpKFCXiAMjJUgw9djqX25cir8E9DLf0FeDdCHNgdCWtypl1\n6NuTeJ/KLZU439C1vr6O9fX1vHau3oavbSsvJWqrEKPn2gLKIsqqxJePU+Rt6303BeSIIu9Uxysy\nhiIZrqpqLvFrEcNfv0fknrKfX62GkLdL91/rvbxH9Zv3u3uLvmav2fJVdblE4HU09XcbcjCu+71t\nOW0pkvXIqIoMphLdOmBeJITg5CHKkrXedC2yzOu81UWs31cB4G2pqS9LwkMvRreDpJSytU9Sz5DP\nvnz5cu5tS1ECSkrzSVzA/HptVV0epVnKuI6OqyRo0qOkctLsVDUUWK4qJg+Nsi49XKIubKZ8ukcS\nvSPaPVD+bW1toaqqbAxtbGzg5cuXGA6HmEwmmX++RrKtZ9BG8bixEfX/66YSKPO7K8NXYXw3zU39\n0/F1HRHxpl53dI/WX2egeKa7zhFdAyfpGd3eFl07VyNUQ/ZupLM9EWjzNxqxrB+Yz0ZfBGwXpZLO\nft1yW2fM8v8i3vutA+brWv6uYCMr+lUBH4FrEa+g7r5Fwy03JQdXFx6+Q5gAqhNZD9BfW1ubA4bJ\nZJItcHqpJSUfJXZpiEuVTd3hJFoen6dyUZBXHjyS4pY/y2QGNQ0S7St/G5Cv56nBonX52h0T1VTR\n07DpdDpYWVnBysoKhsMhXrx4gZ2dHQAzQF5dXQ0P0ffxfFXUNAYOOnXKSo02BywH4dL4kKc6vur0\nAb+70REZWnp/tO7q9/M+9QxVBjTkWzL6vc2RcRKRhs5dNiNj141n8s6ylO8mr1R3LzCfQ4/ZrDPw\nvH3++SayfNO5UKej2+jvpvY63RpgLnV+G/DzydYGlNt2ZBOg+qAsKgAlQVcP7qYU8aPrTZyIuqbL\ncCsnFifaZDKZO5JS167G43E+/hC4BNVIuZQUolrovO7r0NHZtOq183uU1OJtV4XhYLG8vDyXyU2e\n9dWCrnC1Tap8o3AzQRnA3P5ZbfvZ2RkODw8xmUzw4sULHB8fY3t7G5ubm3kMdJsPyyitGzZR6ZkS\nmPk1Hd8S0DHZjt5VCZS1HNcDaoy1aYuOk5bRNL+itlZVNZe8F+mskuGuoOwhX6+z1Mf6vGepa4Ij\ncPXtXCpfagySbx83jyy5Aab9qADflBleR972V2lcNtUb1XdTQK8ru0S3BpjrKJrgddY4UL/227aT\nrmMFLUqujNoYBIuQGw/qQeor8xhqIiC7p6egtry8jPX1dSwtLeX3tk6nUzx//hyTyQT9fj8Dvb7H\nV70IZiN7wpX3qR97qQCnwM21WyqT6I1A5D96naMrSn09oCa+cFuVh/vYb570FRlt/K8vw4g8ewA5\nDH9ycpIT7zY2NvIrJVkXoxOsj/3bZFiWDNY62W4ylkuKTUHFl0WavBHtOzW2mnhwoPHkpDremzwh\nDfv6aW++VKPt4H2cSyUDxvnnM5GcePvUWCQPOheVdzfivO+03528TXrdw/fRcyUZiAy7Jt1Ycora\n6Gnnp1ROZEC9DsPhYwHMpDYdsKh15p6bAlgdRZbjdSmakKW6blI+lX9KKb9wXkFYLd+ULtdE6dkA\nyM9WVZUzr4+Pj7G3t5eP3uQJYKV+IbiW1uj8XpKvR/OzA6gf86fepwK9l02DQQ0W9U70UBL1JLSP\nvE2sXw0DbYPyzn3UfEa9+uXl5fxyC7aZzygwaIJapHTJc+TB+z1tr7cBcwUoH586+S6Bsi8dOHmf\nR+NRAr82OkDnvvIfha5Lz7px5/V7OzX/w2XQn9PvuhzEec37vEzlUeUsMvJcb3ibImpyrhzs2oJy\nVH6p7LrfmwzStnXdlD42wNzUqbwHuDr4keL3QYisfD5XUgKugKNyFqWSkrsOOJc8Op1sAOZOovLk\nKa4Z8eACYN7y7na7GI/H2N/fx9nZGe7du4c333wTKSXs7e1dGTP3btk2t8ZVgesr+/ib95MqGA1r\nu4fifUNQU+9V9zxrBMGf9TZFMlQCAQcCX29V5a1tZr+fn59jOByi3+/n7wDyliu26ybUVvFEsupg\n4WFfB8qm8hfxlAkqEaDwdwfQ6ypZ9U71s16L9IMakV4e71OPVq8BuCLnyouWowaY5jV4PVFSWVVV\nOWKjvPu81TB5WyoZZiUDxX9rU76X08bQ8s9exnUper5Ojm89MLcZlGgQ9LsPvE7umwjC67SYojoW\nAWf1Ehl61UMSCGIMpXHd1BUNcPWlEL6WNJ1Osbq6irfffhuf/OQncX5+jmfPns0Bqve7Z4SSFIiA\nq3uE/ZAJVdq8ph6u1+uWvYa2tS+okPR59VScB5fByKNxudM/1q8GUQRiOqZcfphOp/mUsKWlpRwN\niUBIeVyUmmTP26NtdsOlJNcRSPr4ljwt/d2NKb0n8hIj46ItRWVEhqa303MD9D6VTzVqdE56mRFP\nJE0SdH4cJGnAaxt0KSsydiOPORrjNkZQBJLRM6Xx9XtKjlNUdsnIjOpq0542PEZ0a4C5juFS50YW\nXNSxep8nM7CcOmXwxaTrWIskgo5ue/J+pJJ3AOIEpXJQj4W/d7tdrK6uYmNjAysrK7h37x7Ozs7y\nWjPLjfiKSJWBZ4WWJoaOvY6te//qPfE5rt1W1dU19vPz8yunPbncLSKHeh+Jcq18ss30QrRM/k4j\nii+4OD09xWg0ytncHDNtD3B1y0qTYinNyWiulPqlrbFb8mrUsI5AuQSKdcqWBl6p7kUMYNanuRKR\nUUDifazfjb9oXDSrWYmyE4G16zdvf9RubbvrTI9y+f2+d/qmpPz7coy3L2qjUt1cLOW2qAFS6kfy\n0oYW7ZdbA8xAPHl9Yis1eZPeaR4aigRTeShlXb5uajJSFuHH36ak5UR7iX0CUrkrb6x/eXk5J3r1\nej2cn59jf38/nwTmXqy3Tyeye84O3gTP0rJCtBan5UbhQE32UhDUfcvKrystbY8Cvt6rfVYHgqoA\nKXdudKgiVuNCPWh9u1e328XJyclcUpoCUhM/dfPRqa3Sivqi5A1FY9amLpe1kuEQeUKLzi8lPdM8\naiOvqbx5VrTPAV/acG9Vl6OWl5ezsQk0v26wNP5uIOjSDn/zuePz0se25J2Wxl4/u9HFZ0uA2QYs\n2Y9q0PO6zrFXaWw0eddKtwaY21g8dY1q6jwOZBsvuKqqHP6tW1/+KGmRQfVndDI7WABX1zX1On/T\nwwtUoej7go+Pj3F0dJS9ZS1LeSLpmqGWC1waDpqsRtBpMsJ8sgHzwK/tjNZ0fV2XZTpA1IGUlqte\nqytVrhWPx2M8ePAg7wt3T1Gf9fXFTqeD9fV1rKys4PT0FJPJJLdZIwLRiVB11AaEI0MlGp8SmJbC\n7CUvua7/S3PSQe11zl1V9sq7hs+BS0+5zjgBrh6k432vuyqA2RxkNEWNagdb6sKoD3WeKL8kjpm/\n0MPbUQfKdVQyCFX+eV9Ud1tdGclutLf+OsAf1eXf68q5NcBcx+R1OkLJQTnycHgduLpFp1TedcCy\njpqUxiL1tRFSFzxtlwJySgn9fn8um9N/Pz8/zy9eKIWHgPmjKYH5E7j0Hlr+y8vLWFtby++XjZQe\nn1m0j0rGScnr9TZ5iE29CnqtJycn2N/fzy/80BAZ94U/f/48rwsTSHWvL5ciaAhpv7MvGdY+OjrC\neDye247G/c4OTKUIQJ0cOjBon6vM6TX/nXX4soX+7p6KGkx1YcYmL7yNAd+WfI5FhmbUbu6B9shU\nRPR+ldy40nMG6FBovfqWKH1phXqGTnp6l+6L5m+RJ9nWUKojly8fzwiUo8jHoqTj1xSdKRkii9ZX\nR7cGmF816UC6FRiBMqnu4AA+31RGiZdFBLVUttZ9HSpZgSQCn4Y+l5aWcqhM31Gr67Hn57MXLzBs\nWrIQFVh53T1KKq1Op4Otra0cXvc1LgcZ3Wqk/32s+D16W5Ebag4qbCv3bnMyHx8fYzwe599pWIzH\nY0yn0+zxsxy2h6Fmbk0bjUY4PDzEaDTC+fl53noGIN/LcVDDgOPFc8vZx+vr6xmYmWHv4+3j1CRb\n/ownL/Ga3x9FNbTfI0NYjUDKRpSdHdUXyeCromhuljKbWbcbGjqXdZlNn9Vlt8hgBJD3tHN+aAhc\nIysa8dJolPeLr5fTOGgLyItQkw513c3/KmeRE1IC+Oj7IqCsdJMIQBN9rIB5EUCM1m4iC0xJQbkp\nhB1Z+RG9So/aeViEdDJ3Op25sLAqA/7W6/VQVVUG5F6vl//Oz2evKSSgsMylpSX0er25MHak+Am0\nQHxgAn8fDAZYXl7G8fHx3HnXqgBZRh1g+73AvEdQsvSrqsrgOplMMh8nJyc5E5rlaPjYt5esrq7O\nvW9ZlUBKs2jE5uYmBoMBptNp7ncaRN1ud+7MY3pG9J61zZPJJAM4vW2OJbdWNclJG4r6t2lOOGi7\nV+2kIXs/zaqO35vOjzakXimfpWwQGNUjpdfKZ3UuEmgYYdFrugSjoWOtVw1XNTgV3COwdadF+1KN\nZSBOQKubc4s6IXXk68olUNZ7Snw66Xws8eHGeVM7/L6of5vafOuAuW5Q60A1esYt1ugzMA8OdaDs\nYZRS9uV1wbhtiOQmHnhKCb1eD8vLyzm8SgAiMK+urgJADiUTIKngmeylIJlSyl5e08RUReRrOtx+\nVVVV5kM9h2jy83M0gVQx8fvS0tLcmtzx8fEVnk9OTvKrF4+OjuYSqFgPy0lptga+tLSU37pFUNWI\nAoFSDZeVlRU8ePAAW1tbuY29Xi97yZRHXcvnd46J8s+xHI1G6Pf7ADDnZbeRmci7cA9E723jZWtZ\nXoZ7KQouGtJ3QF4UAF4VlQx+zg1N7FJ+XQ61HM3Ej/STRlz4m0YJKF+UEb3O+2k0qOHoyzH6Odpi\npe2J+sQ/L0o+pr4276BcV29JhzqOOOArtQHliA/XQV5mE906YAYWs4KbPOjIilFh9ZOgSoOjA0cL\nVcNM1wlXXIdcIEv8Oi86iX1tyyc4PURmXR8cHMwlf/EVhAzfHh8fZyAjYLhgqtKikvX3IwOz10iO\nx2O8+eabGAwGc+HhCHS9D1yR6MTWCT6ZTHB0dISdnR0Mh8MMmgSDyWSSvWD2Q6/Xm/M6NOOdEYiT\nkxMsLS1hbW0t9xFBlUBKwwNATtqiglxdXcXKykpO4tF1Z187HAwGeczosfG55eVlbGxsZCOMvLPf\n2gIpEOdaKCgtYlC6geVAoIaxep9R2a8SlBctq6SsS7xy/EoOhHrH7sWyTEapFJg96qP9qmvOHh3j\ncw54UZ981MaPGzslGYtAuQmM6/R7dL2uzBLfUb1RPXXl3kpgVtJBavKkgdhq0kmkygGoB2SWp6FT\nTZzwOus6uq1w1xkZLqh1ZaqXxmcVSNgWhkoZnq6qKq+dnp2dYXd3NwMP3zpFr+/Zs2fZm2S/anKJ\n9r3zwlDs+fk5JpMJVlZWckh4MBhge3s7v8nKM1i1P9xT9n5RwGXG8tHREfb397G/v4/RaIROp5OB\nlGCu26XoGavBwfV33sNw89nZGVZXV7G6ujoHQuxretFcMtje3kZVVTg4OMi/a6ifJ64RvPksoxg8\nmxyYKe719XX0+/3MR6/Xm0vcazOfXB4dKPQ3v1YiHT+vl6DE/mVIlwDic/aLTe6tR+1ZxIiIAMav\n6TIGeQAul9+AeZD15SHtw7Z7jtt6oK+D6nS6UxtQdsOoSZ7ayrbOi1J/Rbq6yQu/dcDsFkXJwlAB\njrwzvd+BOEr6iUjDShpqjRJNXgXVgbLeU/JgtAwHBU0uISisra0BAI6OjrLXy3AZAVzBQvtuMpng\n8PAQ0+k086CeDnlinZp4RM+SIFlVs/cOU5kQrEajUZiVGskBvW/3INiOw8NDPH/+HLu7u5hMJnMZ\ny8z8ZuicY8wscN+rqVEGTZQDkEPGek37nO1cWVnBYDDAyspK9sw9pMtXb3JMGI3odDro9/u5b/g7\nwY2eFdumGdptSA2dm3qpWk5kUKaU5oweGjcauvfnP2qAiJRqSXFf13Ap1a3laN+pwRuF+IFLeYqO\ne20DdlH9H2XfRzyVqAmUfd39VUcB6rx6HbO2PNw6YFaqc/ubQhb6fFsg1udU2Gm1+0lWkdKK+IoU\nSnRvG6GPrOvodw0Zs2xmZq6treHevXtYWlrC8+fP8eLFi7zeDCCvH9NDpmB3u93sfTFE656DCqQq\nhtFolH87OzvLHmu328VnPvOZvOa9vLyM+/fvYzgc1pat3z2ZinUwXP7s2TN84QtfwNHREZaWlrIn\nSdDu9/tYX19Hr9cDgDngptfOxLZOp5PXkfW91eqZ0/vXvu92u3lLlL69ipENPZ2NwOtrysy21vVx\nrj3qNip62eRPjYdFIjfkPZL5knIvGclet/YDx4vypJEXjfB81MBcqsfnYKRkbwrKUV1R3R46j6IL\npeM4S3VF0Y0vFiA3OWpulERGlDpYi7ajTt7qyip5z20Ng1sDzE0CUAfSpfui7234cCXimcMRD031\nukd7HUHXZz1JxPvPQzZUhGtra7h//z5WVlbw7NkzPH78OIewqSg1y3M4HGJlZSUDRafTwWQywcHB\nAcbj8RxvGuInEAHA/v4+3nnnHWxtbWF1dRWnp6fY2dnB5uYmvv7rvx5vvfUWXr58mddyj4+Pczhd\nlbMr+dLeyqqahfzG4zGePn2K9957L4d8GQKmkUHw6vV6GAwG+XAUeszHx8c52W0wGGTFx3VbhpjZ\n9/RiqQjZ76xTj/tkWDulNJe1zWcJ1FyrpofP8WGZ+oIRPkce9fWVdfJZJ3OlqFSb6E7kIesSCqMn\n7lVEa59cIvko6CZe1avyyEplNBn/US7BIqCs176YHrLyQYocL15v8qjbyKsCqH+P7vM6moxV57tE\njcCcUvpjAH4lgGdVVf3si2u/B8D/EsDzi9t+V1VVf+Xit+8F8B0AzgB8V1VVf62Rixqm64ToVZIq\nEQ1BRklHJYos+zbtWYRKYTRVllH4pNvtYjAY4OHDh+j1enj27Bnef//9rPBTShgMBnnLju5HphKl\n93hwcIAvfOELePnyZfZuCDLj8Ri9Xg9ra2t4+PAhOp0OPve5z+Hw8BA/5+f8HGxsbGBvbw9vvfUW\nftbP+ll4+PAhPvjggwzyNBL0PGgPyfnE0BBdVVU4Pj7Gy5cvc+j6/PwcvV4Pq6urGAwG2fvd2NjI\nhgafPTo6yvuOCWwM+zMDml4yfyfPakjo1hbN3gaQPW/uRfZMWsog28jQtBqMOi4R+Cl/5O0mgBF5\niW3u9es0JDhOzB/QvARgfhmKfPs2q9dJPoea7q377Sa8lsChTT3uTb4uHl8ntZHXOo+2CazrxrkO\nUF93JKGNx/zHAfzXAP6kXf+vqqr6A3ohpfR1AL4NwGcAvAXgb6aUPl1VVeP750qd+Lo7IFJqwNV9\nfE0UKZTXYUB4ffwchQxpWPR6PWxubuLBgwfodDr48MMP8fjx4xyipUe4traGlNLc/lj2z3Q6xWg0\nwmQywe7uLvb29jAejzM4cM2Z7aa3vLe3h729PXzjN34jftEv+kUYDofY39/PHuiLFy9wdHSU66Xn\nrnWrJ+D9TIXOe6bTKR4/foynT5/m9xtzuwnXkWmELC8v56zvlFJ+O9PW1tbcaVndbjeDqSZ90cPV\nBDDNLPboC++LwrocL31Gx1iT0fQZNwDJDyMWCsqlAyWuQ4uUoQYKx0vPXSaP0f5mkmfXu2f9OmhR\nneNRsVfRz00RxJJHF3327x8XsG7Tl5Hh6MB7XWoyvF4HNQJzVVU/mlL6ypblfSuAH6qqagrg8yml\nzwH4BgD/sM3DJeEC6rchLEIe2mEZqhDbgHLkCVP5AZfr0tcVjtKzDlakKPxOQNra2sL9+/cBAI8f\nP8bOzg7Oz8/zCyiWlpYwGAxyMhgTiJgZS6+abzGaTCZ5Gw7XMJnctby8jO3tbayurqKqKuzs7GB7\nexsPHjzABx98gJRS9lBXVlZwcnJSDLXq2528XWyzysXx8TE+/PBDPHv2LHuvTLTa3NzM4EpvmFue\nPGGLnh3HgGBLMPHkLvd4vR0OqBw/3bfK3/gM/yvQqmxo0k+kfGnocAxfFVA0UQQK9N6rqpp7Fahn\n7tc9z/s9kfPjRDcBu6h/6kLa0VhrGU0JrIuEahelNs6W9lWks+v6sm2Es6kvP6o5E9FN1ph/W0rp\nNwL47wH8zqqqdgG8DeAfyT3vX1xrJFc80WdSySJcNLymA68JXqV1rNIgUVGyHE82aMsjf3fFW2cN\nR5YzFd7S0hLW19exvr6O8/NzfPjhh3j58iVSutyLTPAmMHNtlWFrKvilpaV8CpYeIUnq9/vY2NjA\nW2+9hTfffDNnXTPs++jRIwwGA5ycnODo6CgDPLO6FaC0zxykHMzIw8nJCR4/fowPP/xwbgx1uxAT\nv/r9/lwI2A8P8XrVU/f7HByivaWlcVbPKlK8zoeWSc9T6/ctafqGr6jsV00loHRjV73eSLZ9zjjQ\naITC8w9uM13HA2+r6xSQo4SwqNzr9FnTMz6GUT1NhkCJt7bXlA/lpS1dB5BfNYhfF5j/LwD+cwDV\nxf//EsBvBhBxFvZcSuk7AXwnALz55pt5va1uL2wuMBBK/d9E7mlqeK3NQCvp3tIohOVZpZGQRnX6\nKU0+mfjd18Dp4alnWFUVnj9/jp2dHXS7Xayvr+dEJuDypKmqqrC7u4v9/f3MO5NzqqrK23o2NjZy\nXf1+H48ePcL9+/cxGAzwxhtv4Pz8HLu7uzg5OcHW1hbu3buH5eVlnJyc4OXLlwAwF0LWPtK2RZ+j\nvjw7O8vhecrQ2dkZ1tfXsbq6mteUB4NBTrLSMS1l7UdjXgJk8uNnrJcmrHrDkdxEdTo5Dx7e5mdt\np9f/Kijy0NQo5Tiroi6tF6vhEfUJoxc0gnVd+nXQTb3cEjh5Hby/zfU6/af7vhf1Aut+0/Lq9Kzf\n07bv6njyeV83L9uS69IStQFcHee2dTfRtYC5qqqnUsn/DcD/6+Lr+wA+Jbd+GYDHhTJ+AMAPAMDX\nfu3XVvRy2EDuZwTmz8yNrD3vmDaWld7Ptc0m79zL8yxk5UUzdfUZbYN62FqX8hVZyV6Obi3hd54A\nxQzog4MD9Pt9rK2tYTAYYH19PbeBmcJM+tra2sLS0lJeV6a3ub6+js3NzRzC3tzcxNtvv52Bejwe\nYzwe54NHeN/+/n4+a5ogr5Z9pKRdkfM+78+zszO8ePEC77//PgDkZKytrS1sbm7msDMzsL2vNdM3\n+qzAqUAYgWlbhaHjHHnN0X1tFF6pLm7jcgPuJhS12+Wz1A43gHXeA1ejZ5RjYH4pQM9PfxXeSqSE\nSwZj6dk6w770W5s6mgA+emtUXT1anuqjSLeqwxTd09bDbTNGkb4kRRGZV+ml1pXZNLZtDJHIwSjR\ntYA5pfTJqqqeXHz99wH8jxeffwTAD6aU/iBmyV9fA+CfNJWnFjSAubf2RAkjrtiAS/AmmOk9pYzU\npozryPNxUOT6H7esuPJ2j0H3quq2Hy9f36PrIKYTyddZAeQwNK3nbreL7e3t3JcEqOFwOLcXmfdx\nyxLLJ8hvbm7mIyMHgwE+8YlPYGlpCUdHR/jggw+wv7+P4+NjDIfD3CccN65B66sjXblTAajy1Xax\n39hPJycnePHiBR4/fpzXkukpv/HGG9jY2Jg7bEP70o0gpbYK4CZWetv7gatJRV63e2Zah2ZmExDr\n+C4pmCYDJPJsonuiBC8qfRo/Kc2fQ6/Xgcs5wOMmtX7PSfgoKPJoF+EhMmKUSmPL3wjKfvhRXV1u\nFPrvCsi6yyDircS3t62pnc676/Eo+bMtaTtvapTW1RFRBMhN4Nxmu9SfAfBNAB6mlN4H8LsBfFNK\n6edhFqZ+B8BvuajsX6aUfhjAjwE4BfBbqxYZ2cBVb0g9yehgDxUsTkauqel+UF5TcIuEO+okT5Bw\nxahGgXe0vwxDv6ticaNBvXBNMtJEI04UHmtJg4CTaTqd5u1Rg8Egt109DJ5xreXzsAoAGA6H+fzr\nw8PD/KIFhoM3NzeRUsK7776Ld955BwcHB9k4oUHhbz8iOLA/Iitdx4n9oUYNx5qRgMePH+P09BT9\nfj9noD98+BD37t3D2dkZDg8P88EhCsopXSYk8cAQjqV+dgWi7YkUmn/25/TZNgCpFAG0tstBkWFf\nPss3CrWp0xWuG6V110plKyhrmzSK4aAcGeFuyLvyXsQzKbW/6T73+PVkKaUmL7ttvaW2cO4SlNsY\nBU3ernvJdYZqSXc2tdmfUdlo0gl19epvbQzhtv3lPEX3lOY1+7OkLyJqk5X9a4PLf7Tm/u8D8H1N\n5dozV9ahOJF1IB1cdVLrQf98joc9qJdVomiyaVgzUhDRs975GtLWox61XD5P71K9HLaZB1qoJ3pw\ncJC9BvYXX0W4tbWF7e3tnInMwzLIJ89S9jDnZDLB3t4eXrx4kV9v2O/3sb29jfv37+c15ydPnuDF\nixfY3d2dO2iERgJBgG+kUu/IlyX0T/ctaz8qcb/03t4eOp3ZOdc0ODY3N7G2tobxeIznz59nsFZi\nf6aUMBwOM09RdriO8SLf21joEcDq/U3KJ/qsa7OaHFW3yyAyMrw9DoRaZt2cIB8Oqi4TmoHva+Uq\nJ3ouOzB/kI3LjS5RLQrSEXmfawSiLaiXymuiiHfdLRAZPVF9bYCKuk/f3xwBk/dHndxqOdqeSA/o\ndV/GbKK2fXpTWSjVXdL/et0dxYhuxclf2iDP2FRwihS6Kx19zR4wn7nZhtyrc6VCUgVIi1XLoLLX\nMDxweaazA5CGjOjx8z5mFnPvbVXNzn4+OTnB6elpPkKTFi4zrPkyBWY/68ThaVcAsvfMVxweHBzg\n8PAQKSVsbGxgMBig2+3i5cuXePnyJcbjcS5Pw/f8zEQvtot1qvEVTcY6Ba/jXlWXZyrr6xHZP8Ph\nML9cY21tbe7MaG7pOj8/x2g0yoZMW0/SZaX0u4OltqOJIiVYZ7X7fWqwci7UhThdIUb/ldpkersC\n1znA59349Rdt+Nqm9h/nFg0sLZ+gybPfo/lZ6geVs+g31zl1xpzWV+qntmDpREOyaWxL9btB4XNM\nQTkqr2Q861wu1a2/83PkTUbj3kSRQfAqqG4Mo7o8WU6dryZQBm4JMFPQde8oyUNTwGUHuEftlrSH\nrus6Vye2rsNGXnvEe5TsBczASkOlegiFt0mvE6iZQKXnOY9Go6x4yC/57Pf7eYtSp9PB3t5ezsam\nh8xsbQA4ODjIb1niSyOYMb29vZ1ftMAzp/f29uYmjq7tEyjdenZwJanBxb7yeyIFwnvZRzyNi0lv\np6enOSxPUCaR1/F4nA9Y8UzqNspyEVJ5bUsKAnWgXHqO9fm8cM+F1whuLEP59vHQZ0p1a9nn5+dz\nx256O3yu8jcN/zHS5JE1XndvTHdKcG4TbDxU7kZIpCP0HlWuTYaS93F0b51eqgNld0oWociYi+as\nfue9EYjW1VECd5IDGjB/6E7b9kR6ookWBe7SHKyTo0VBGbglwAxcfRMKqa7jtIM0HLmoh6ICzjAy\ny1RlxUmu67l1ZTrYK2/KPweL4KIJbDw/ejAYoN/vI6WEvb09PH36FAcHB3PRgNXV1bw/udvtYjgc\n4uXLlzmkzGMpz8/P84lce3t7eZ9yt9vFaDTC0dFRzt6mQcCkLp84LmS04rXP2EYPWdaNkf/m3vXx\n8TFGo1FWtIPBIBsoemiInhVNXpl9rm/NisCqDbUBdPdG9HpE3m+LUNSv6mWWqEl5evlteWE56g27\nF0/5j3jm5yhxzQ0OfY7GlgK3G9H+IpHrKHM3CJz3pr4qgXKJF1XsHgVo0pWRfLu81CXPlcDPjb3I\nkNP7SvNA5XQR4zgCxLYG03WorYHlBnJbUAZuCTBHDKtnVEccRM8ajKz/Om8ZuHyrEid81PlqqddZ\n2Lym4fDIYNCkLoImfyMor62t5Xfsvv/++/jc5z6H4XCI8Xicy7537x4+8YlP5NcBMhN6Y2Nj7oCN\n0WiE999/H/v7+3NZl/SKR6MRAGBtbS176lU1C5376wkZZlRlRw9Ww4rsO1/HdS+EeQK6rUnr0nyC\nvb297PH2+32srq7OJbLpEZrumZOX0iRpUnI+xiUZ8Ha6V9j0jC/BlJRf9GydcvSyShR513V1+nPA\nfHIS5UTnNEPYNLA0zM3fI8OiqqorYeQoasUESeY9RBE1lk2Pps7T8/ZFUQO9L4ogNAF3ySAin+71\nl54vGY0OysDV5KtS+0qg7HWXZE51qPPvxndb7zeS00WA/aZEPACu9kuUp9SGbgUwA1fXJoD5tYy6\nSUIqebB1oKwWtn6PhE0NCDcmSlagtktDa8qXAkdKKZ/xTE95MBgAAJ4/f55fPEHFtL29jUePHuHR\no0dYW1vLb2YCkEGdR2y+fPkST548we7u7pwXw76jF8ktU8DsFYjcfqXjAWBuOxRBmpOPnisjAOxj\n97jZH9PpdO7dzj6WvO/8/BxHR0f5FY56LCh54nGhaqWyn/l6QVV0pTEsUUlhRIqu5GX4c2091hKx\nngi8Nbu2VE/JyIiAx9tTZ6BW1eXLKnxMaahR5hWwo6iLG1b62a+pHDoAA5fznUmDvC9SqG7oRH1W\nB3qlfvNnI3L9wb55VaDDtnK+NvFQ4retnGgfRyBf6rdSvVq/11lqb8RvU5ku527AeduUFgVl4BYB\nc0RtPJI6C63p2dLv3rHMhFavXC1XeqiqlFVI/M1AJComXefUM5kJzN1uF7u7uxlQ+/1+DjFzbzHX\nTjudztxpV0tLSzg8PMSHH36InZ0dTCaTOY+RvDLcR1BjeR4ZcA+G7y5mopUKpyZvMMvcAYR1TyaT\nuWQuHgvK/te1dx4LytA/17X9AAS3vquqmiuT5amR0JYiT1aVuq6PatklD1hJ72W5Xl4dTwpK3W43\nZ/PrGm1d/XXk4NU01zyaovfo/NAln6gcb5/zH3lLdcpfX5WpBowqYM73EhAu0ndNgBHJxyKgXGpn\nE5ixvOuARwlgHfwcuPTZCJT9t1dphChfiz5X+u5Gnf6Vxr2Jh1sNzG0ompjXLUfBgsS3DdGDpELi\nvTwMRb0BFUQVfNbDv06nk8PM+kIFtmV9fT0f4DEajXB+fp7fAkVvmMqXb086PT3FcDjEaDTC3t4e\nDg8PcXBwgOPj47ntUsDV5CAaA8Dl27UIsGwvT9ZKKeXyqqrKIXMCP5P5tF94DZhfA2Qfcl2b9/KY\nUO1zTgB9P7S+o5hls33AvFesbdHf2hiBOr6l6w4IKltR+Q7qdeXW8RZ5Y5QnymHTlqmozIg8IVLv\n5ZgqqTGnZUYesNetHqv+6RJFBMRuKCm4eXsjQyjy3nhvnaIu9QuvNY13HcA25bTo8xHQKznQlwyc\nyGmJDLrIe9TrUW5NybOM5Kst+bj689fFidIc1mTEKFE4kuuSbCl97IH5puQWmgvmYDDA1tYWptMp\njo6O8poViWG6lNJcOFwnk4ZSeR+vKVAxA1oBjW9iIgjz5C16QVxnBYDd3V08ffo0b6Xy/Y3uLfm6\nCBX5dDrN3rcqQPV6Hex4hKeHAMlfFCrkfyaf8ZWM5EWVB8sluKd0+XpKGjP69idtr4bodEJcx1PW\ncshb6bfI0Cspi9I9kVfh5boXwvHScK0mLTqfJYqUYh2YOE+RsvffI+DVvtM26XUtt2QQqZxp9Cby\n3PW/yqbypwZf1HfetyXwqrtWMlzqAKoJlCPjQtvZBBIsq1S+y2rJuCg943kGTe1tIpap9UfGhJPP\nwyYeXLfqvIwiEG5I1hnKP+2BmcRBUC+Sk3k8HmNnZydnJasi4ORVReinAOl2HN6nymF5eXnOW6RH\nynVmAHj58mU+GpNJLOfn53NrwKPRCOPxeE451wEISY0GBWw1InQNkGu6VTV/LGJKKa9F0xPmudtM\nHCOYUrnR0NGsaho+3gYCPU8jY3Ic+eBJZqurq3MnkLkC55j5+mNbgHYlVbLuI4WgykLHpGSR+xi1\nUdBanisaN6ralFVH2g86rhqZKIFltCZc115XuEpRxEq9NV4vKWntL1foukYf8V7qkyYwKBlp2o5S\nHX5PyVjxe718H6OmcS/dH5XlbSrxQyO87s1+dby86nvbkIMyUI4MqREKXBrKdX1964E5UlZ1k+G6\n5ALFSXN6epoPqwDm9wzrYGuGM+9TEObALC0t5XXUlFLe4kRg4qEI6r0+efIE77//PqbT6dx5z36a\nl05OBTTPkHZvhn++j5yfeQIYTxTrdrsYj8e5rfTi9/b25vpoeXkZq6urOarAcpmYdXZ2hslkkl8Z\nycQshl05aYFZJvbx8XE+8Yv7slOavd+ZIXY9gpRGjypYlts0UUuWv4O7Xncw0X7WkJeWqfVFZXi9\ndc/rGEegzLGJ7r8OuQyRPzVMI1DW7Gdei8qKMl29bs5F5nGwfO1LN4D43+9xUFblCyAbp23W++so\nGiuNqkVKO5LXNqAcyYsbUU3zgM+UZK4O4CMDRstUw9/zWW5CJb5Lxu0iAB/JlkZW3PDVfJ4255kD\nHwNgXqTDFrm/VI+GTwFkL5WHhHjSlNbNQdGDNzSpa2VlJYMYvWtu9eEeZSr8/f197O3tYX9/H7u7\nu5hMJnNJUFxTJdB4fe4x8Z6Swtewp/aph/V6vd6Vl4zw0BNgBroMN6+urmalyXLp4bJfV1dXce/e\nPVRVNXc8JnC5VkxA5tnXPBv79PQU0+kUa2tr2ZDR/tV+UUWhk8n7pEn5uLxEz+lvbQDZM5DdOHJw\nU6VQ8n5LyofXtQ+iOVNSYNq+qF26jKDXSyHKqH3KX2So6L1uKHniXR24N7VTn9GlIT+QxtuiMua8\nRgYxcHXN16kOlH0MSkaa1unP13lvJWMzarfWXwfK1Dm63bKNMeLlNfFcZ/AuUqbrUi0riqSQqGO8\njZ1OJztoEd16YI7optZUHblCZNiY4WIAOaFGFb8PlE4Cesqrq6vY2trK50fzNCQK6P7+Pj788MP8\nTmQ99lLrVP4UqKMEC1cEKkAutAxDe19rW6vqMnzNgz30TG1/MYfuadalAQA56/z09BR7e3v5zVBs\nE5/nGAyHQxwcHOSIwnA4nDNMeNKXRym8H7R8/V+6VlJKJa80+s2VW53cRQBWeq5JqXodCnzR6XhN\n9el1VX400pj3oHW6N+pzzNvc1nDSz1EkoMmwqAOwqJyU0txbyvSZqKzICIiMLFJTeNPb3QaUvW73\n9K5DPgYOytou50v5YS5N6VCpJj3fBgfqjO5Fno2Ane30xFQSHSUFZRokfPVuiW4lMJe8i4+CSgqK\nHcytPQCyB8nwqQu7Ctjy8jI2NzexubmZ11kpmJ3O7OjMn/zJn8Tz589z8hUwv1WIZTmgqiHANkRg\nEyl8Cg8VqAO5eq/ccsN7gZmHdHBwMOdx62RUj0UT59bX17N3zWS1jY2N7JGT6J0Ph0Ps7++j3+/P\nrUWrgaLRjMiy175oe137okkRRODDz6XtQN7fvN/rUo9KvawIgKJrzpsm3HhSoJcV9YeWRxlSJURA\nLa2je+iW1BRhKPWVK0SnEsC7cRC1W+cY20k5UyOkyStTY1jrqDOOInLDyNtUZ1RGbb2pnq3TL6Xy\nPUJQMohd/pp4LRld1yHnyeVUeYxk0JdWgMsk342NjVrj6NYAcyRsdZ7FIp7CdflRnuiNMaSlb4hS\n4XKlwHKoBHkONddUp9MpDg4O8Pz5c7x8+TKDtofL1XvQDGdXUnV94kpMw72qEFmPKlv2t77dZ21t\nDdPpFCcnJ9jc3MzJVzwghe9HJl9sd6/Xy2DL/uRJY5PJBMfHx3nNmGBOT5lGBHnmmdjaDobS2Wdt\nqKRQoklX18eRIuEEjM7kjp4tKRP3EEueTx0wAPP7LllfW0XmRoEaGrpk4UaZAzllTNdsXT4jD7bU\nT5Eh4veXAMDvK7XbwY3RID/yta7f+NmN67aecqnNdV50JL+RQRP1YR1vdd651uPtpoHjy4FtDF+v\no/RbJDPXwYtIFimvnlRIoqFKXcTfudS2uroKoD4h99YBc3S9zYDcxDIqUTSQfCmC/sb152iCUkHx\nrGk9DYvnPU+nUwyHw1y+r6+xbAqFhmJd6ZT6imCm1z304tu93MInMbGr0+ng6OgI/X4/t4keL5O2\nWD4zr3u9HiaTSQ5np5Ty6WR8WxaNgaqqskeu+6IJ7Howi+7Z9TOyS2NZIlXe0SQvKTzvc+3X6N7r\nEsvyEHGbZ1w+Sgk3fl9kCBKMPZnLvXP++XKLr8vVGSR1IBApdQf6Np5WG12ixgTv1y18deVrH/l1\nzyqPxiIq33+LgMTvbwPKbcmNAK8r4tnD3qXnrjtfIp14EycuAmVPlCV5+JrXNKG30+lgOp3mZNmI\nbg0wk7QxpKijb7JGcl1SPlS4OBB+0D4BA0Bee6VHyIxkZlX7u2PpcaoA+FtxlIcoNKThO72f3/0e\nEu9Thcm28F6u8aaU0Ov15t7JPJlMsLGxkb1eXbseDod5bza3O/EZJpLx/l6vl/uHZ1/zvdRMHiMv\nfHOWhlWvO8Y6fgp+rtRcLvW5kmLU36/LW6Rs6pSZgqQafQ6iWo96xFqmhuYiHko8A1cNVuDq1kLn\nV8tRvvWa3xP1Rxtl7/0aGWY+R9skqXlbSnOy7pk6xyVqe9NvXk9ksC0KjJE8Rvx6lLGNAVXqi8jY\ndEOndG/bNvEZjXzpb9rPUQKi6lg6Gi9evJjTmU63BpijCeETlPf4dohX5Ym04VH/K4CRXz2snwdv\nkD8NzTLMoe9JVuBdWVnJW6h0XZvKgEKiAqBZ2t43vh4ZgQvvU6FmG7nNiyF8toOnjR0fH6Pf7+c9\n2IPBIIOyrkt3u90MoL1eLx+9Sa+ZEQTdn8wXeOir+0ajEU5PT7G5uZmPHtXXWZZCvC4rURKcy5n2\njRorJe+DdamB5OSenk/yOgWi9+p9UfJf1P46cHeKDJPrktdVUt4lagKnUj11zzbVF42z972fNKd1\nOyhGfeA8R4Bax6OWV/qtVEdpzP35kvFUMgRLhkIUhfP+9LlRKjcypErh5VJb2xJ1rh5mFBnkvFfD\n2dQb3e7snfY7OztzSyAR3RpgVnJr3pWCr5G1sbheJ6niViDUxDAOplpdBGcOGvnnwSIppbyGCyB7\nl/QWNawCXHrouobnvEURCRUgJYaFT05O5hKueISo8kMvlWFsHy+2S9eG6Rlvbm4CQH7lJMM+R0dH\nOQzEhDD2yXg8xubmZj4ulfvBGYlgX9BYqqr5qIL2gQOdZ8dqGNfbFCVgads9nO4Wtj7bNgmopMwj\n+S/d40rQn3md84h9XAppNhklddduyjf7Q8eVf+7xc3cCZYxzr63REfW7g1IdgJZA2Y2J6Hdec1lS\nuS0BZF05vB7JGf/72rLzHbW7jQGsHuurIpavy3fuGJJfjn1KKUf49AyJ8XiM0Wg0d45FiW4NMPvk\n0sarhwjMA+Ai1uXr4lfrZ7g1pcuEMU5cvgeY4emTk5Mcslag5RGWBGQOpHrjvK6ZtZps4Nmtdf2k\ngKUJX3puNxWRe+AUPGZYq6HBNmiWbqczO5mLW8UGgwHOzy/fLlVVVd52xfVmhrhpKADAw4cP0e/3\nc2Rhc3MTy8vLODo6ynuoaSiw3ewXfncF5gagKwJXZB7m1d9oiOj3yEsoKZE2nrODQBuF9MUwXL1u\nN6RLwNoEbiWwcuVed39EJVDwa1EkpFS3X2OZCiTR79GzXl+TZ1kCZTdAFdxu0nelMVjU8PT2lLxl\n8qy7SxblO6qPn93oisZLZYEvGaJOZo4RdWP0jNOtAmYNBWhCCckTUD5qMCap1Vc3KRQo9bg5BU49\nyUuJwrC0tDT39iWWT0HhNQK2e3KRQCu5oLBtmqRW2gJFpcIyyIMaFuTl/Pw8e7aHh4dX9kCnlHLY\nutvt5rdW0fvv9XrZQOEhIuyj7e1tbGxs5OcJ/jqBGE6PxjEaPzVk6hSFgzG/exa9P6//I9lpul7y\nutt4Wl5mSYlHz/j9i1Abj8jvL9Vf8rT0WhQR0XLqSI01r1/rLAFGGyrx0wZU6wzt6Hn3bqkzVE8A\ncR5AHf9uWC1iJPh3L0vLjMpV54Mevm5fuy4tCsoqa5p1racV+tJm0xy6VcBMIsD42o1adHXPf1RU\np0w4iPq2JQIqM5AJWrp1gM/Rcz05OcmnXvEa+0BfpqGWbjRRPIyr17xNrJ+gqdZ1lOmsQstJ4uEe\nhqTH4zEODw/zVqvpdJoz0RnC5z5urY9GCkP7VTXL2F5bW8tnY9PL59u3uAeafOqLOdSIYT/pRC8B\npBo9qkjceo+UaFtAqPseXfeErsgzqVOeUdl1nmebZ53aeLJRW+sMCO9jHYMoa1bBNKqvBMSudyIA\nceCLSA16r6NEJX7qwLyOL9cxboSWni/x5PeW+GI9TVEdH5u6+tVTpvFd4qNEJecKuPoec5UvbRf1\n8sbGBgaDASaTCQ4PD7G/v4/Dw8PsWBDgI3B3uhXA7BPTBy/K0Nb7P2py76hEXJPVteROp5O3SQGX\noOYZ3VU12yc5mUzyNiTNAgUuX2HIulz5UXBLIST1diNlpV6mJpWxbn+O5XFtVycieT86OpprH7ef\nTSaTbLAwE5u80xOuqmoOlD2jO6WU9zX7mbTkj0aAbvNxA2URUNb+0t8UJP2+JgByKoGa8qCyEJWt\nZdUZCm0BvMkTqit30WfcUGjijfeWIhWlZ/xeBxUHNgW3NuRrq2oElsAkGit3VOraF+lUOghtx7fE\nl8p1Ux9EZemcinguASZ/IygDVw82aktRHQ78QNznql/X1tawvr6Ovb29/F4Dtm15eXnuxT9eT0S3\nApiBS0GIXoVVCmXcBmpSOPzMQRmPx/mNUBsbGwCQM5M5YQDMha79tYa00AhUWpdb9johS1sInDyz\nUbMM/RhHTlDdd+yJI1VV5Yzu09PTDMDA5QstmFSm3jrXulk3txf0ej0sLy/j9PQ0bynTozg9m5rl\nE5AZnte9ziXFr8q0BMquXCPwjOqou6dOMfqYUT6YpKdWecSnltPW83Kq440y02QEXJdKSl5/izLq\nS7w6yPC3CDB0bNTjrBvbiF99Tsv28alrv1LJE3VAjnSpltnWWFLvvxR5cwArveYwMi6aZF89Wc9B\n8vsXkb1ojKKEWW0X73v8+DG+8IUv5AOXKB/UTSy/jQFxa4DZs4IjS/R1TPLrkNcfhSd0Auga7GQy\nwdHREVZXV6+sxWro3kGWoWxuCzo7O8vZ2er1etYgy1NeSdqf/tk9QYKYZlw7QHW7XfR6vTmPhf3A\nVz+y/tPTU6yurqLT6WA8Hucws4b5NQlNJ58acARttWx9n/hoNJqrO6WEfr+f+989LF1rdwXj415S\ncj75SoCrfedLAvq8K20Sn9OIi3p1KkM3MW7beEZavxuI16UIrNooWwfayMCKgFHr8DFQOdCy1GDT\nLNw6wyEC7BJo1hlPdUYf66grt2Q8RfKnv7G/ouUz51F1UlR/qZ1RvysY+nJUqV0RRfMx0uGlXCIa\nZjT2d3Z2sLOzg6qqsL29jV6vl3mhQxMl25Xo1gCzKrkozFI3gK+LImXkE1jPj/Z9adHkW1lZwf37\n93MWM08NI9gqETgUfHhNEwpKpC944HMsr07Zaf8SjDXhimFg3TcNzGcJ69GMKV0mXxF4NfubR3my\nLfqOZ+9/fqeQ8/3MCsy8zr7lC0gYTmJ/qBfFPde0bl3x6Hhr2ExlotSPBMnIoFOvS42P0slCkQJs\nAny/5jzWKQi9R9sXZb7q73UZp5EhWAc2kWHk7fH+0Otejic4Rf3n5fK7Gxt6j3rpTQabPuPKP9Jv\nTb+X+G1bRqnOEnlEpGQsta0vAiyVOc/biJI5/fmmNug9kczULXXpVkw6Cmtra/lgJdVRpW2bdXRr\ngFnDmE2K4qMEaedFrSo97EOPzCRfLpRMAuBAEuBoXVEQGNZVD5mhW3qEKhzOXwQq6pGSVCAjEEhp\nth/vjTfewGAwmNtTzUNFeNa1grKuqQPzR5VqOB7AnLASULkWrH1HcOV3JnrQK2b5nEh6njlwmTzG\n+7QMTv7BYDC35SKlNLd3kWPHkHzpVYYlUlBjEiDlxY/wUyNKoyYRCEaemRqKLpOLgLLWxWc8tEce\n/KhYBb2IvxLgal/V3Rv1rden0QLtB31O/5eAlXLlwKzGVGR8KUXzy3+vMxKc70gW+ExUtlOdVxmN\nmddbWhsmlfJbnM+SgaXRCDVgSl7sIuS6T+tXWSl5ygrKvE7DX+8F4lePNumNWwHMHIS2Qu2T6FWE\nzEp86cCoR6vKUnmKnvMydTJzfy7BFph/oxQPPtftAJrApJ6n168A44cfkEreATADs62tLWxtbWUQ\ndo9RrVmdPP5qPNZNY8aPYlSvkZ9piOj6MMH1/Px8biJ4SDs6HjUCOF7j6/x8nHnQS1VVWF9fz4YR\nTx7zk9Z8snv7dHzZNr1HDQNdM1el7/LP/lClzrY4H9G4u8dQp9j5Xw1oX8dUgGkCozpgcL4i76vO\ni/R+UtmK7nd+fTw5DiofLDM6kKNubtUBr+uSErlBEz2vMlHiK5KNEs+qc+sAhvNRDbmovMjgYn9S\n17gB3EbPl8bYydtaZ3joOKvR7/drf/sc5bxsasetAeZFKJrsrxqUvT610jTtXderdL1SeVNSJavK\njVnXBCFaYLyuk4HbifR0GVc8wHwoW+9x8gxr4NK7X15ezifWcP+xlgsgh3/ppapAKk+dTicnKClw\ndDqduUNW3CJV5bKysoKqmh1CQgNFhZ1Kk1ul1Cs9Pz/PJ5Ox75QHGj5qzBBAeQIaQ+TMmCfvEViy\nTyNFp8sBmlTnho2HzUvyybp0XEoK0XnT71pmaV55+Rw3V/RNYBndEwGbtq1UblQH762qau41qtEY\n6f1anxsZmtvAP80PUIPY82aaqK0ejPjl9QgoI4Cpe77U1yxbjZBIF2sCYumeUv+rUakRssjQjcoq\nkesRlhXpZ6AelNUZU1yI+sqN7oiPiG4FMHuDSE2C6pOmzkJt6oi6OvTgDF8/UK/B21FS1goEqkzp\njXU6nbnkMB7AoQqGYVWeRa37pHmvhpJKiUrOq3p1DKEzQYteIsvTNVLte/LAsDd/I58MUxPIFYQU\nWHRdXQ0RhuV5prZHLWg8KNhxTZ+eNu/TenQLlfJBgCYgs3wmsPF40RKVlIgaeRxrVYx1iUSuzOo8\nHOXBn/F5o0qjaT7xv7eplOTi5USg7Dz6Z1f2bYCk5AFpuQqmHg3wxDGOl46Rhll1jpQMM9ZdanPJ\nQCn1VXTN19IjUv6i8YjKLMkJy9MoT0RqsDsvmmdS4svlqcR72+8lA9D50vEG5g3fOnxRfaI81xlt\ntwKYdRIAzSDKjlQPVS3ZprraWFish5NM3/S0yJ65kpXmAqAgxdcaTqfTDJAKhN1uF8Ph8MoaqoeO\nPCFK+0B/jyaXgtV4PMbR0VEGH1dauqVJtyCpR0oeuG9ZQ4Ka6c1yCNLcCkSPRNeu9ejO1dXVOcXI\nDHFPJONGf25Rq6oqX+Pz6v10u938MhFtqx4XyiiHt5ufdUwcSFRO1MtSUHTAbmNxN1E05hHINoFD\npPBU/kpg3HYOkg8gXsMt9YMbFbrMEd2rQOJLM2rAqUKloUnetCw/mKLtWJUAw+/xMhcZt6i+SD+4\n0VZXLvmpA2Wvyw2vaKmpzlgrAfcibdW+jOYl9REwP8Zt5VjXyam7ub3zRh5zSulTAP4kgE8AOAfw\nA1VV/aGU0n0AfxbAVwJ4B8Cvqapq9+KZ7wXwHQDOAHxXVVV/rameprAbSSf+RV1zQqHX6ga1DVGp\nM1Tph1a0LdMFuqSsCMCsp9/v51C2CyCFV8FYw8ScIAqmmhxFxRP1Gevj27AODg4wmUyykaDrvJ6Q\nwT+fnCqcADJf9Pz1ndYOfLpezPD62dnZlZC+AhtD3lyX1pCxJq55f5JHetaMGLDv9ahSPdSDE03f\nbkXDUV9R6WPOOt3I0TFhORoerDNAI9nXOnWsmyx9fS4qv6Tkoh0K0Xz0OiI5r0sqi5RpxLt6et4f\neg9lhPNd+1rr0uRNbyPlVuvzfta2XIdKY1PqD/4WgWJUbhQlqAMjvaeNLtc+LdXrPHlfLqLPVV7c\n4IgMSF5X/ept8770/lVPudPp5Nfc8thgnuMfURuP+RTA76yq6v+bUtoA8D+klP4GgN8E4G9VVfX9\nKaXvAfA9AL47pfR1AL4NwGcAvAXgb6aUPl1VVby7HHFoo9TpPtF4LQKvuueb7lFPGbhc72jDY/Sb\nKhPyr/fyj8qAoWzlR61zetR8ngNPENK9wDqpFJxdIFXwWCcBiglo3O7kZ3Pzz9eZGUZWwOLvqgDp\nZei2LhpEHIvBYJBfDcnXYPZ6vbmsdh46wjFj2znJNJFMx4U8kU8aAO6hkl+uRfP0sU6nk8/5Pj09\nxXQ6xfHxcc5k12UFn7zKh97HfuAyinp/JeNTy+dnV34up86X3hsBWd292ibKUjQvS95e9NnLdMO8\nri9K90SgrN6RXvd5r0pa11PVU9Y5rf0Q9Vlbivq8bbkux1HZHmHTNjiwaV11oNqGlOcoxM3fnP9F\ndD0/61iUwF2X6FzHRXy7bHnIOqWUTypkbsx0OsXh4WGR90ZgrqrqCYAnF58PU0o/DuBtAN8K4Jsu\nbvsTAP4OgO++uP5DVVVNAXw+pfQ5AN8A4B/W1dMEctopDspULpFFU1IAdcQyer1eVtoR+NdNgrq2\nlBQlB1e39ug6rU8Aeos8qEPf26whY12Ddg9A26wZxp1OB4PBIBsIeiA791C7YEahcxoMvV5vLnGM\nbWVfa9a4evm8hwLNsHqn08H6+npu+2QywXQ6zbJBAPbXYPo4qxKlgmXd/jYtADm0z/VtvixjZWUF\na2trc0ZLBMouI6r4VI7ZV1QSypuCwk28LiX3InitRMqzlqFtaOPRRF6YttX3Yev9KnN1BorzEQG7\nhywjvrxMTTpUQ9N1ky5z6P0lvr2eEiA71ZXpxqX/5gZi1I4SQLelqC1eT8R/NPZ15eo9DppNgKy6\nQiMiUVu9DJZbwgnWcX4+O5r45cuXYT8BC64xp5S+EsDPB/CPAbx5AdqoqupJSunRxW1vA/hH8tj7\nF9e8rO8E8J0A8OjRI/95jnRAos7QFHYqY30uKs9BhaSJXhwcTkK9t846L1GTlUuw0wSI4XCYQ6qs\nXxOz+Hqx4XCYQYDr4fRe1bBQIWeYkIqe3m1KCaurq7h//z6qqsLBwUEOHbMe4KrXr9649nNKl5EA\n3SpEcNGMcfX02Qa2aTqdYjQa5c386+vrSGn2ntPhcJj3BnMM9bAQzUdgf3Q6l4fLk8gD+46Z8Sml\nnPy1v7+PlFI+KOb4+DgbMDzbnCe8AcgevSuFSPmRB+DqQTs+wdmvbQCwSZG60o2UfATGThr+Y1ua\nFLnOKQU3N6YcPHRJpok0+qC8KiirnJaUed28LxnopbXmOg+zNO5KrsfqeIrGTXnyteHIuPA2NIFV\nWwelDeBGYNrUL2r41wEycHmQkgJyncPl463LLap/mczL19MOBgMcHh7i5cuXN/OYhZF1AH8ewG+v\nquqgptOjH66MYFVVPwDgBwDg05/+dFUa8JIQuJJgtqyuN/rgR5NDy1Vw4mRS5eiDtYjFWDexVQkA\nl4dp8M1Ua2trec2U24D0sIvhcJjXUgky/uapCBDUitf+ImCdn59jZ2cnn+MdgbKCiBoy2u/MHKeB\nQ2LYXcPiLFfBm7zQ8+/1ehgMBlnwR6NRDmnznde61UknvmZeayIYvRqXvbOzs5z4xjX30WiEBw8e\n5HdJAzODbjQaYTweY29vD3t7e3MZ6O41q6Hg48/7VcZVpkmetV0yXEu/a3kKoB7O9fL4m3slbpRF\n+Rh1c8bbrKDrBh5wKXNRqFjHvATIPi9UIZc8u6Y5HwFl9FzJmGgysiK+Sr83eaDAfCSIPKkh4fc3\n9UNdu9yIquvL0hhE7fI5pHqkCZAp76VzBdgfkeHqBiJ1E8872N3dxdnZGTY2NrC9vZ0jatx2WueQ\ntgLmlNIyZqD8p6uq+m8vLj9NKX2ymnnLnwTw7OL6+wA+JY9/GYDHLeqY61jvzDqBpXLXlz1457rA\nenkM5Wr2tQun8xm1oQ1FZblhQgHWtyppctDZ2Vn2zjQ8zCQqFR4HXQ//aUKRehDT6TQnfdHwAa6C\nsmZwq+BGhpECO3nTd1X7bwRYXTcfDAZzSWFra2s5zH18fJwT5pRf9Zg0IkIPnCDNPmMbuU6sE3Z9\nfR3b29vZQ6Z3fX5+noF5NBphdXV1Dmi0fg9tRnLkABWBt8tUSd5coXiEJMo3qCszAmhVYCVvsGQ8\n8L/y6tvpWC4NL+2XqEyt3xV1BJQatnTgivRAqU+8vVHbo3lfojb6JhovbWepDj/fwI0yBae6ut1R\nivhuo0Obrkd63XWHLyk4qbyrLnOnwg0y1q2nAVLfUibX19fR7Xbx4sULHB0d4eHDh3jw4AH6/T5O\nTk6yp8w8mBK1ycpOAP4ogB+vquoPyk8/AuDbAXz/xf+/JNd/MKX0BzFL/voaAP+kqR4lXzwH5gef\n3/lfFYxTSTj8GoFdFb6GepVKghXdp7/5hG+yCPnmJa5ndjqz/c0a0lVvU8vS5AXv00gZRH1O71y9\nV+DqSzEUSCPvwoWd4WIqX24L8zVqfWsSlbRmZauC5lu7dH8315cJmpxIHFeWyy1q6j3TEKBBxLb2\n+/08qWgIMITO8DXfn82T0qqqyuvyHFcHNgfOSCm4vERy5XLn46v3aaKSy6lTHYDwNyp19b7aGqss\nh+Qyzd8ZCVJvmYaUenraDle02r/Kv66N+3i06QdSW9CO2r0IRY6D1qW8usHkesHBbJFxKz3j373f\nne+6euvaVwfEEdBqNE2XLlQnR+WlNAtTM+Fzf38fo9Eo64TNzU0sLy9nUH7w4AHeeustrK6uYjQa\n4enTp3jx4kXWQ3z1b0RtPOZfAOA3APgXKaV/dnHtd2EGyD+cUvoOAF8A8KsvOutfppR+GMCPYZbR\n/VurmoxsUgmw2CGuwPQ5TzZpsvR9olXV5RYHekYstyREbSZayYKMgL1OqPVdnrqezHcPc02TPGsm\nNtsFzCs7CiYQW8Ma8uWzTp69yGv+ncpAFTc9f4ad9Dflw71MT8ij4aIvqdDXQRJcdb393r17+WAS\nlR9mU7NszR7vdC7fsKUvzuD14XCIDz74AI8fP87vk04p5b3UNETIl1vhJdDQ/nCZUkUbyVCJfB61\nDS/WkfIbrStHHpbz5N9dptgXetBMGwMgAoSIj5JRouWXdENb8jpdDuqAQX/3e+qMAToZGkFq8iqj\ncpra1CQ7ixosbZ6lQaV6p+SMMGTNOcn+UE9Z+1PLBZAzq8/Pz/Hs2TM8f/48v7SCjtPLly8xHo/x\nyU9+Em+++SZWVlawv7+PL3zhC3jy5EmOou3u7t5su1RVVX8f8boxAHxz4ZnvA/B9TWXbM40DF1mH\n+qxbvE2kA+EJGk1CX7LgovtLiqF0r/YFwUvPyO52uzkjudvtYm9vb05I1TLU57WdFEZP4Ip40TCu\ntpuCG4E4J4IqMv6m/wlswOyFFGyfj6ECI/lgGLzT6WBzczNvtdJTmei5EmQfPXqEzc3NubPJaTnz\nP+vjSy00nKnyRY/46OgIOzs7eO+993JoS18WQjBnIgj5ixS01qOy42NUmgvRM3pdDZ26o0T1mYhH\nJ63Pw78Rz2qIeN0RKHOsdWmDwOx7zb09JeOy1HbVJ/68ymcTOQ+RY8HfIoCuu1fLLNWr8sy+rDMo\nXN9F+i/SvT63I35uSqqTXb+xDjUKde2cBrImeDFfhEY0jT32qSbRamTp8PAQT58+xcHBAba3t7G9\nvZ114MHBAU5PT/Hmm2/iwYMHSCnhxYsXeOedd/DkyZOcG7Ozs4Pj4+PacPatOPkLuPr+ZR3MUkIG\nycNYdURh1zUqLS/a1sD7dCK70Lon42Xwf5PX7e2iguB2IRUyWm+Hh4dzRzr6Wpm+ctHbVeojVeKe\ntMRQMOvU8CLL1jHxMCqfYRmdzuwMbYZ7+a5k7VcqX+3flNKVjHXlk9u7JpNJzjIfDAZ5GYC8cWy5\nlFFV80l/wLyXz4kIIL/16/j4OGfLn56e5oxwGkVMIOPyxCLeWjQ+OoaRvEVgqPVS6UQhY32O5FEV\n1hNRyeBw3uue0SUklWnORd37rnNW26lGWtO882WU6BnVG4uQjlPdsz6e3m4f02hN3MfSE928D9o4\nIJHxVBrXqJzI4C99V1451zQyRX3GuchoFOWYc5r/CbAEXvYpwZmRNn/zXK/Xy14wc0eePXuW37l8\n//59PHz4EMBsSavX62FlZQUPHjzA9vY2zs7O8OLFC7z77rt49uxZXn9mguhgMMDm5uaV/ibdCmDu\ndrvY2trKEy5KVy9ZYx5KrRN+TixdewTiU2goiCw/2k+sITa1qn0SlrybkkDqNQA5ZM0/gnOv18PO\nzk5ul/afHkZCXimUXpeCrxoiBHSCC0PC3K7U7Xazl6verLdHJwMtWwIzeeD+YIIiFaCeoqRZyNq3\n9H7JM/lleZqspaDM591D4ndViqr4KJ8cAxpOa2trWVEwZDUajXBycoKDg4P8piqWSfDxpDmVj5JS\nc2Xvf+TVjUj9rJEOBwQljXyUSPnyuat9FxlqPp5uZPOzjo+v16t88Tc9ptb7yUFPDdGoTfq/RKo3\nSr+3oZJhE+V4OEVAHOmVSNYcgOv0VYnvtgaIe9m8RkNLDS7WyyU7Rk449zXKpV40jXY3dIbDIQ4O\nDnIeCo1oPtPtdvMZ+MPhEM+fP8eHH36Iw8NDrK2t4f79+3j06BGqqsLu7i46nU7WMaurqzg+Psaz\nZ8/w3nvvYXd3FxsbG9lgPzk5wfr6eo7GlehWADOFiet7wPz6YrQGAFyCsv/Oa+ods/PpJVCAKAwE\nYSYW6XGcPLRDw8C8l99dSbINbT1p/tc1YgocAZlWGYXm8PAQT548wdHR0ZyVr+E/vaaKhkLqE1mf\nZb9wDZfARwuTa908QCRSpqoU+ZnZ1f4qSvYlcHngCC1WHU9vH0PFw+Ewt4sZ9vfv359bh3fFq4YL\n69b1dydtDzBLkONakUYMOOmYVLa8vIy1tbV8epmua6sMaD0OJA6y5FvlSUFZozxOCtqq/FU+OCci\nvvya94+OufdbtFShZauxovPT26YvQ9GyooM+fL3RI2Web9EGYJzaAJePo//m1/S/GyiR/iBx7viO\nCa1LjZkm3p3HUr9EoM7naGRzblPfa4idYMsyaGhrmzUpFLh8jSpP3uOpezwYaTqdYmVlJYebuX94\nMBjMLQnqEbzLy8s4PDzE5z73OTx9+hRVVeHhw4d44403cP/+fZyenuL58+fodrt444038PDhQ3S7\n3RzqfvLkSd6ZQYcAQE4U022jEd0KYJ5MJviJn/gJVNUs3MokHk4uBUG3/Kh8mK3LgaHiJ2hQwVDR\n69ohid4Ps4QJNuPxOAuEhoV5uIR7isBVbyXyiPw6JxIPpBiNRrntFL7V1VX0ej3s7+/js5/9LJ48\neTIHsLquXPKYfBI6OCuPVFhsI9vNSbG1tTUXEo0mLOvk3my+dEKzk1NKObT04MGDXJ8CpI6X9hsz\nu7nfm+NLD1XfCOXeG7/TuKrLwndg4eRi8oda4fSgl5aWsuGkp6g5uSxGoMzr3gf6jBtgTcDi5Tqw\nav2R0qWC99CzP0/ZUPmM+FDQ9JwC/a/eMOvReaDj5lE19djqjAydF6WQv9bT5rcSAPs9pbkayYU/\nS2Bh36jsK7Crwa5tc4NGeeQ4urxqGeRHjSodG+oBvoKVoOmGOJ+h/lMe9KwCbedwOMyHDSkgajSN\nJxpSl9LhYe7C+fk5nj59ip/4iZ/AixcvMBgM8IlPfAKf/OQnsba2huFwiN3dXSwvL+MTn/gEHjx4\ngPPzc7x48QJPnz7Fy5cvM390GDqdTj6lkH0XLQ+RbgUwszP0/cM6wQiQwLwlq1aYrgdwXypfAsGk\nKU00Yll6ChS9ZS7KU6mndJkmz8xeFRbyr+vW3GrkwKyKU6+pkudZqmwf/+gtHx4e4id+4ifwwQcf\nZIEGMBeeBzCnDL3veD/r1//cI63eoE4MnjjGcfG2av+yPB5jCQCrq6t5vDj+e3t7ePnyJba3t7Mn\npOPsxoUqp7OzM6yvr+eN/ezHs7OzKy+QULDSSaKkoO1rj+rlq4I6PT3F0dFRNjoYtqIhsr6+ng07\negnKi3oFrgxV0SmP2ke+3SxaUnHSfmBZ/lsdsfxISev8JZFHN4q0bvKrOwkUVNRYdwD2PuU1zSPw\n5YrS+rQSI2jaV9pHbbxN7zfvL/YPvSl6bszmVz41qkejM+pv3fZJuVMwo2HMswrcIHf5YD16iJF6\n3ryXz+lSDe/hcpiGkTlOumSlYW3eR1BnO3iOgO4lJiDybXMEXF7jbgyC8WAwyPqeuzKePXuGz33u\nc9jf38eDBw/w9ttv4xOf+AR6vR52d3dxcHCA1dVVvPnmm9ja2srPfPjhh3n7FAAMh0NMp9PsHHJM\nPYwf0a0A5qWlJdy7d28uW5UTRAdLrV5gJqAcAODyTUXqKbOTFJj9vcAUAgI4AZtAwmQkWkAMT1L5\n68skAGRB0fWO0oDwHk48TVbREA3DLMPhED/+4z+Od999N4fzKPzq9ZHYnyWPjPWpwcIJr/t02X/0\nBHn2NUEZuJoUx1AUQZmTQ5O1jo+P8eLFCzx58gTdbhf37t27ss/XvSANfdEg4qlfzL5WAFBP2EFM\nLXq23RW51q2ePBUf28AQGcef48m1J76li2OuilBBJQKsklfmRgrX1+nF+r3R81qXGkNav/PgSjza\n7qbGAZ/xSI63iWNDXjTpjuXr+Dv/SlEkSHUIP0eRFC1Tlzi07d5HUd/UXed3LnNsbGzkU+40DEsj\nlk5Ct9vNyzbqHXc6szPj3XkhKDMap8DJbZY0srVP2G4HX5anr1TVucH73EvnuFIP6xzl/Tp/1Wmh\nA0B+Cco8H5/OGeVHDQ2dB9T7BGkuc02nU+zv7+P999/H06dPASADsmZYD4dDbG5u4tGjR+j1ejg4\nOMCzZ8/w5MkTAMD29jYA4OXLl/mwI64nsx1t6FYAMz3SkpXuHg8HeW1tLXsnPIdUD3aglULB0j89\nBEGTivid6xQqRADmrB4aDO7h0ftXT7cUMlKvn/fTmiUorq2toapmp1Q9e/YMjx8/zspQQ2zuEZBP\nBWV+13aoJ6PlUHh1jNin9AZ93zGJ3qq+mlGzrRniefnyJQ4ODrCysoJ79+5hfX19LvLgRoUCmnqt\nXLdmO+jh+LIFr6viUvlgO9wAiLwj3s+zb3WZgzJDy/zo6AiPHz/G8vIy1tfXr4xNVH7bSUzwUO9E\n29j0rJLz4eCpBkDUH24EKR/qjaqxE3l6yp8n/9GIpmFMQ9vbSxlScAXiyIP+VpI5nVsOxh7tUO/b\nn9W2LS8vY3t7O58YxVyITucy54FgRXB1D1jL5HymcUjw5DIKn1f+PHGUn2mI+xiqvtJxLBldGm3T\n5QnNI9DfOYcpT/SKGf3SbUYEY+p7jZhp/kZKKS+T8l4aO0dHR3j69Gne7ri2toZ79+5ha2sLg8Eg\n131ycoLt7W3cu3cPZ2dnePr0KXZ2dvDy5Uv0ej08ePAAJycnePHiBarq8kRCTS5TWbj1HnNJeajl\nxz8NbXCd4PT0NFs/al0DlyB7eHiYz1HWjqYgaiIJhR64TCxQJc2B1SQTvQ+4BEBOOJ2k3j59LSLr\n18l/cHCAvb29/J9Wo76KMvK4eN2tVk2scnDg77SsqYTZTwzRRoc7aNiM+wS73W72sKlcJpMJ9vf3\ncXh4iPPzc2xubqLf7+dwL4C586tdiUeekvcrEL92kIqDEQBtH8c78rLc0+KzR0dHePfdd/NaFpUZ\n+5CZ2h9++CEODg7yi0Ec7EpzogTOfl0NDPIRlV8H1g6eUV1qxPgSSErzIWiSG1PKr/PvQKd9z8RD\nDfPqHNY5yvnhfCh/viatfRUBqxsrPufcYwTmvWw+o21mTgzzEJhoyWNn2U693w0NAho9X41g6BZA\n9gHzYrQ/CNiqDxgZZDhd+1cNebZPl/bYjxoN1HtUf9Fbd9lR2VpfX8/eP8vQ/B7KBRO9SNxmqqFw\n8jgej7G7u4vHjx/j2bNnSCnh4cOHuH//PtbW1jIv5Jt6an9/H5PJJCeSra6u4tGjR9lxAoC1tbW5\nbPFoznkUU+lWADNJlYYDDIBs+dFrY4YrvVK1OI+OjvIa6XA4zFtnOMF0/5vWr+tfvhamk1STXdQS\n08m6sbExpzQUMNTjpsVKfnkYBQd2Op3mYx7VaGCfRAdWaOiIgkXhVGD2ZBwmmHW7XYzH4+yNaF9o\nZECjDARj1s/x0TZwLLjOwz/dIqXruTqRI1CJPDHeo6Cu6+2uiGnsudxpGa6kOV7vvfcednZ2cjs1\nqkDP+MmTJ9jb2wvXmVTeI2ryeP0elUu2je0vlRd5tFFfOPi74aLhZvcU+byGt6PoQ1Svb+viPPC1\nTU8Q4nX/XaMK3o86zhF/mkTqXrAbHZEO0znDsmhoM0/BAZyRMeAy6U2jXerAUAcy8ZEg7SDBZTuW\nz/q63W7eacEX5ygYUqewD3mWP8uYTqdzc0T1kxL7hRElfmbiLZ8l/zQQGAFgH1F3MrEKQHbWqMuc\n59PT2Yt/Hj9+jOfPn+dzDh49eoQ333wzH6HJhFLqpuXlZezv72N3dxdVVeWs6zfffDPvc66q2Vn+\nHI+o7ZpPUqJbBcxK6uWxgaurqznU2el0ckIWhefJkyfY2dnB/v7+3NnEaq3SO9LwV+SVqLVG0s7U\n07DofbMshp729/ev7JnVEA0/azagKo3Ia1DPl/2ioKJAw/+6RYHl8ll9GxfD8imlvB6q5eqf9qEa\nH+rRnJ6e4vDwMAs5JxUnI615Tmo+R9480uBgpmPlylEVjZJ6yVzLcw9ADcGSJ055e/z4cY6gcFJy\nfM7PZ2/n2t3dRb/fz4kmETg7EDSRjgXb7caZ8tumHgcSLU/XG9mv7pVrdMYNWv/vPERhYtarbXMj\nTMdbQ6C8pqDF8tTbLPWD/tf+II+RceFGDa+pzPE6HQs9r12Xszh/GBWg1+yJrARn5tSoh+cGkzoV\n1Dm6zVHLUyNZI2vUb5PJBIPBIDsf5Jd9z6VAhnN1CYvlMDKgyw1nZ2dXZIsyQF3J5UmG2bvdbuZD\nj+JlGexH7l/mnByNRlhbW8Nbb72F+/fv48GDB1haWsLLly9xcnKCzc3NfP41t1rxjVFLS0t5u9Vo\nNMKLFy+yDtMcEk+qi+QsolsFzMqoKgAuoFOp0UPmIQ4vX77EkydP8oK7ngyj4SUFP1cWDjzA/Iuz\nPRznoSyd+EwGc+XjayoKlARIflfL35Wj8qLAE01uBWS2kxOv3+/nPqS3zs8M+ZT6hkTwZB9xjX84\nHObDNQjY9BgjwdWkDbaR7Y/6uUmGIu/aoyPsM+1bklr8eo33vnjxAo8fP859QO9B3++8s7OTDxbh\n0ocaH/pXB8p1bfZoTvSb90t0b/SbAzuv+dKOynTk+Xp9ESg7oGu5lBEvO8qtcAMh+p1elpPONSV9\nPjIy6sYnMpxSSuj3++j3+9mo59GQnL/n5+f5NwB5JwgjU51OJz/DSKJmcKucuWGr3q+OEX/X7aoE\nQ87L09PTfEgPd1AQrA8PD3F0dJSTsvi+cgVizil6tFwP53zTyBnb4nKs80f7lfUwSYy6jNubdnd3\ncXR0lNepP/nJT+KNN97A5uZm5mVvbw8A8Oabb2JzcxMpzV6Qs7+/j4ODAwCX22o5BprQqUaNG0aR\nXJTo1gCzT9yUUk5nX19fz2EVJiQtLS3h+PgYT58+xQcffDB3yAYnH3CZPcvPamH7IPt1VT468CQV\ncJYPXL6qUZWDJkawHlU8mvil66DsD10TcyXB/vN1bp98AOY8aCaEcO8fJ79PVJYVGS1s2+npaY5U\n0DumV6Dn0Hp/6nXtA1XM7rm2lSX3wlSRqxLQ6IeGO5U0tDYajfDBBx/MGTIMm9F4ZKYnT0hTI0n7\nLhpHBZQ2VDIyI7AvGQBepyrsCOAjhaPeaslAKvHkdXEOExB0m4yDjQMQx9eXkZwXygA/67O6ThvN\nN+2PJsPK+5b8sE08+GZ1dRXn5+fY29ube8c6QU3/c+mE+TEsh+VrlIzGsO8AcR49guOfuXzDIyV5\nZsB0Os3bHcnf2toatre3cwQJQD6fAcBcNE8N9Mj4pj70fmZ/qKz5+9C5fs85vrq6mpPtNjY28rn5\nzO7m9dXVVQyHQ7x48QKj0Wguv4bOBnWcLrm5h1yK8tFZKdGtAWYnrtERPDqdy6065+ezzdwffPAB\ndnd35yxHDRVr+MOTwlTZR0Dm3/mfiQ1cc/COp/UYhbsir0w9Wg48f6NCV0BnXa7EUro8Ecd5Jy/q\nTRMA9/b2cHR0NNc/kZKhwqVHzPop/Lq1iiFyGlDkSdfgHWyXlpayoHoosg1FipdtUQNHJz49Eyrw\npvVsejEffPABXr58OWfNE5TPz8+z17C2toa1tbW5+vjnCsgVT9SOuuv8TeUwSm5q049evl5zYGsC\nf+WnzijQPlcDk/LmyU6cO1ofZZhzUPMd1NhUwKYMOMC2AWUtu47cSKmqKq8rM8LHeUKet7e3s9HH\n15lWVRXqBIZuNUlLdZAmeKmMuz5UwHaeabAw27vX6+Hk5ASj0QgHBwc4ODjInnun08lhYG5H0qVD\njiXHlqCndase1990TLgzhPcdHR3lNeDj4+Oc+NXr9bC9vY2trS08ePAAvV4v5xzR4x0MBvnY3ul0\nivfffz+fqsgjNQ8ODvLLJ7jf3J0JN7pVdtRpHAwGORQf0a0DZgoMGWdjGXY9ODjAkydP8OzZszwo\n6hFxUD1ECly1BPWaK0rvZJbLsDMNBgBzHjlT++k58brv+eNaCgVPhZMZzKqcVPmoh8cJpUaCtlnD\n5xqOqqpqbh1ZPUVNrGG/cnLpqVWcZCyf60m6xu3WsEchqGwIyjR8SmDihlBpu5b2s8uWeuLat1Ho\nT5X1aDTCO++8g3feeSf3GTDLwOQk49qaLr8o7wqUJYVeZxwo1QFgXVh2ESrd74aheu0RuJe8TX2e\nfQ5cyrmfkuaetMqpJkcyz0Pv4xzW8eZzqjNcD9SNU12bdA6QJ/JK45ZeOwFkc3MzLy/t7e1lsAOQ\nt5Ry1wMNFgK4RwDYJjouGvXRuaCyrv3jhiLvXVlZwe7ubj6Pnoch8R494e7g4ACj0ejK1iXte15X\n50DB2OcorzFkThkbj8fZm71//34+P4CeMQ+IevnyJUajEba2tnKf04B4+vRpPr1rOp1mg5svw6Gu\n0iUrl4WSrFMvMirJsHlEtw6YAWTgo7u/vr6Ofr+PnZ0dvPvuuzg8PJwDJ2D+MAFPhgLmzzHW//zs\nVo6HjCnIXBti5jItQVqto9EIKaW5c1s9NK3E51knrTzNyFY+FQw1JM1Qv3oBup1KJ4EqTwokreHx\neIzxeDzXD2q90jNXfrzvKYClMKP2vRoMvhdVQ+W+5FDnqel46kTmdY4XywbmvXk1bFjmZDLBZz/7\nWbz33nvZOOO6Mt/JyrAi15Q1M3aRkLyCctReBwv3Yr0PtE9Lnvd1Sb2pEijr58h40v+UNQ2L67Ps\nRypuTZbUPeq+1OJ95qBckq86b1jHSPuV+S/0cmms0zhXI9hBkX+MQNGwU28fQE6a4nfqDM5j7zs3\naN0och1FPtXQU6Cnx59Sygdo8FkaFpwfelIZvWQensI+UeNI26U86Rzl2GnyKB0DGsVcBlhfXweA\nvMac0uz1r1wjn0wmePz4Mfb29vJWKIa1mWzHsxh0F4mOe93SFPUiI5oMndclId4qYKb3RAuRnvPK\nygqeP3+Oz3/+8/nEG+CqkLni085yDzn6rPf6d+6FIxgS+A4PDzGZTPKbSEajETY3N9HpzBIDhsPh\n3PoyB8etU+ByLyGPceRzBF0KLY9903UjXWtRD9wTwdies7OzvLebioAWJxPDyKuCchSqUU9dE4N0\ncqvFrP3K+xxUFcwJoqoEXeFHE4TPqGHF8j1s52tDOukIyu+++27Orubr4jY3N/PaGQ0rVRbKNwHA\nZVLb4bJM3t1z8Tmg95W8vkVBuc5bjMpu8jYVXLWtDuoewdDlo5RSDn/Se4zmvAKT86XXgcvT9pzP\nOiMwIhrJ29vbWFlZwWg0ykc4Mg+BHjHXZgFkA39/fx/9fh9HR0cYDodzW30Y2aqqKodRmYXMMx0I\netxqSR2lRo56yvyuBoL3l0boqH9YFpPEKO96ahf3nFdVlV91yKRS8qh9rcm6aihpRJDgprk4mkjK\nJTTmjRweHmYgZD88fPgQ6+vrSCnl3JoXL17k1zlSrmgQMTFNj9VUXaH6SmVF5zH5pu52mQ1lqZXE\nfQSklgUZp2B++OGHeOedd3LIxCeKdkzkLZVA1ztUwYBEQ4FHUALIFs9wOMyDz7R5lstwk+7t1dCH\n8w9cTgKdRARrEoFeQ9cM33S73bk1F+0L8s0kEb7UmzzRSuSLICjwbv1FfauTSL1OVaQaumf/8h71\n5nUNLFL0StpP7CsCrpapY+mGhXtKJJY5Ho/xkz/5k/ipn/qpDMrsJyYlqsLwrOuIb62j9FtkgPiz\nyqcCUaQoVGG0BeYm0jrc4NAx1HaokaWempfpn3V9lN81QUvHOvKwtA+iNvi1tkYJy2ekhEfKvnjx\nAh9++CGOjo6ynK+srGSvjsmW1HF8UU5VzZaY2C5Gr4bDYY7WdbvdvEYKIBvT6pmRdEsj+0a9al1r\n13voIOiJeh6B4bzWPqbeYMInvVSNhtHg15fzkAf9rJEE3Waohgr5JmZwzo7HY3S7Xdy/fz+HsNkP\nu7u7ePHiBQ4ODnI7z87OcmIxdQmNKc5r1qf4QTnw+UXZ4LhMJpMcZYiS75xuDTCT1BJaXl7GaDTC\n06dPr5xHTXLwAa7u/617hqHaiOgBcZM5Qy8MTdF6pcDofj3dI61C6Ja4Kx/vByVmAfKUM96nIKIJ\nLwByyKSqqpwFSk+a1iWBXL1c7VPPmFSlqxNWAZoTlJNc9yeyDAo7FUvJ260jVcJ6NKoqajcw9LrL\nCvuShz28++67+PznP49utzt32AAjOVVVZUvdx7yJ6gAgAmXlz9fOfSwiYG/yHKN7m9oT8eh1k181\nmkpRAFdsrviBq++H1nA2gV4NQ72u7a2jJrnrdDo5CQhAnpedTgdPnz7Fhx9+iJOTkwzEvKfT6cy9\nWYlJVFV1uQee/cQ1TcoYy2dfaiRKDXpd22b/cZ4zkqb5OwqAAOZeNMH+jjxanfcc46qqcrgXQDZg\nOSbUg3QKdE+/jpEaHdRXVXV5MpqG+fUwEbaJRzSfn1++f/nw8DA/y0xtjgHXjrVPdXunyjbbrHKk\nuo/3sq/0dcau40p0a4BZJ6UOGE+KAuYTEEpKzb2UyGuJOie6z4/KBDB3IpeGUdRK5OTRfXvRQEah\nI+0Pt6wYKvNQFCecKiiCoVqqqrDc46UV6wrNATnyNjU0qRNMFQHrVisypctj9VTZajmqbNhelQV6\nTWrNa8hYy6oj8sN9mi9fvsxZ/5zo/l5sHycFzGhMI7kreYt63cO8+psbRaV2NbU7CmdqaC8in3uu\nVB3Qqfy9bf5ZeVavTqMDlHU+owdiEMg47nquQRvF2BTJWFqanV+/tbWFqqpysh89Ix5Owbl0fn6e\n98/S+9R1WBr5XKsFZp4ywbiqLs9fV0OUc0LPbmCfMdTNF19odEKjS540yrr4G186o23kizUU7M/P\nz7NHyKW86XQ6dwa4JsJxrRe4NCoog5onoZEBlUfdNkv91el0sn4+OTnB8+fP8eLFCxwdHc1lvzN6\nwSQ1Gkeq19kHkcz4XHMDh2NAQNYIJp8vOQukWwPMJAo+O5wneKny533+Xz/75KqbkPxNlTcFVwWe\na8a0Cj1RQcNqVBzMpFRPkZYnj73TNTYNT/sWAv5pCFf/RwCnoWHy5H2n4TgKoyrJSPnX9TGzaTkJ\nx+Px3OvXmJVIj0DXjFiOht/Y1x6u9ExP5cO9NgdnnXCacPPixQs8f/4cw+EQALCxsZHHi6DMpB6u\nP7mhpGPZxnj0z6X71VBxCz2itp6hl+kAWFqC8XL4X9vMshRYlT83xNTgZohSn3UlDWAOhJUfzer2\nKFBdO9zo53fK7+rqKu7du4dut4uDg4P8ux7/y0M4mCw1nU7zG4mY16FGO/NqeJwwgByGrapZeJu8\n6asENSJGgCJIqafGua9RHRL5UCOa7aWXqm3h0pm+c4DjMRgMcju4N5hgyUxthpr1hTjkk/pKt4dx\nCUmz8SkfND7Y79yqyAjn3t5ebs/y8jI2NzextraWQ8uafJbS5cuUInlRo1VlWKOMXNdnn6j8qN7n\nfSW6lcDMhnMQSxPbnykpuiYFRY9BPS0NFVGgh8MhJpNJPjgDmE/lB+YVM4VIFQ6BiOtRnLTedq6n\nq3erSo7leoiO38kX6/ZtU269qXWowKHfVflpO5kXQH44IXk/wUzXkPT1bcqfGh6qaNwL52TX9fCS\nV+lGjxo4o9EIo9Eonw7EtT6u5bGfO535xC6uP+lBC1qXyp0ms6jMuUyXAFnHmrLZ5Am3ITdq3ZiJ\n+G5THj8reEa8q9EE4ErddVmrvEdDthoxIi9t+yry1jlfCYIce4IG8z3Ozs7ymiWTQZkHwsgKE6X2\n9vZy4pMaV51OJ4PEysoKNjY28pkNBHl6wcDlu4O5l7eqqnxksXriDCkzTEu9Qk+YOoF9qNv7+BsN\nHIKXLhlxDGjU6+sj6VErz7qeS7B1g9MNLRr1BLPJZJJPGauqKgOhGg5cWqJ+4O6ewWCQ9zvTcdLo\nA/97ZNCNPjfW2E9qKLnO57306D82WdmcEJpdqG9WAa6uF+qE8nLaKhQN8wCXnq9bmxTKfr+fQzdM\nOtCkH/UwOUHVaiIoM4mDdajVxRAU/3QQfY2NbeU1NRZUOBSAo9/U+2ujyCjwDP8qH7o2Q1KPmtYw\ny9J1aAq6hr4YudAwph7RqhNJ+yAy5M7OZm+WefnyJZ4/f569e/Kt4Xj2EY0DXcdnOJPeiYexFZjI\nl3vGHv1QYIhku+T5+TNN5CG6yIvShL2mcl1ePKKiSV8e3VCDSw0W3h8ZXc6rGqmRoV5Hbkyxb5is\nRRDjXNdllZ2dHTx//jy/mAWYbRfic4z0qHdEL1j7StdwKYOUNR7co97naDTKnh51hcohI1Kauc2E\nL9ahyxfMWyHQsRx6nnpgh44pDQ7dVsT+o9GtETF925Ma5Rr2ZdiZS4F0kNiuo6OjvDTADPelpSVs\nbGzMLWXw5Ei2s6qqfHY/vXI/BMnlhrxHsq7RPzpYamBQl2kUkn3ZpGNvDTCrwuFAeWw+aohOVL9X\nvbZojVE9T044D2ur8tTj17gFQNc3KIQ+sEo80ayqKuzv7+c1ViZ2cVJwHYcgE7WJAqGTQMHVlb0q\nObVqySOFjO31erVdXP9iJOH09HTuBQ7R5vvIClXApTLkZNeMSY8OcLmD/a1KmeOm4Mw95pzU3May\ntLSEBw8eZMULXGayUrHSc2AmqJ57DWBuskUy5PKmMqtyq96i9p1GSiIAjLxd73vnQ7158sT/7Etd\nI70u1c1dHTfni6QJg+TPDQUHOZ+DdeRATrnq9Xp5fZTzg/OKHtf/v72zDbF9u+/6d+15OGce9pmH\nM2fuvbm5mBISMBVMQUrhvqlFNKIYfSGkYC1YjC9SbKEgTd6oSMEXGvWFCtEWI7aGQFsMoVhjtYhQ\nmyY1kqT3FoMN15s758ycmdnzcObp7NnLF3t/1v7u31n/PXNOizOX+//BMHv/9/9h/X/rt37f39Na\na2dnR/v7+yUq1O12tba2VoqOWBEPQE8pFQB0nhLadcPTr/Gxzljzwk0MKPK4FM0SEsfbxPOen5/X\n+fl5KZACoDiHecpso+sLOXlEAvAkRI2BAEjD39rKXfQr98eTl8ZpKg8HM7bgF/oSXlPf43lqjCIM\nG/Q8etVTpi4LtDHKFu/NO/LeeL8ekfQ/jCRfV8INvBrdGmCO3jIWJNaQCzIMhhHxHt7BNY/E7yM9\n6z1EJeAD00NyLLdIB1xlRBBOYY4dncXzAWQEJhZp8Vw2iQAMvBqx5jV4u/x+sRDMeYYHi3XLQOQ9\nUJYYUEzGj8DPs7xt7iHxToAsz2Kg847IhufNGByEyNy4SimVnNPFxUVZhB5jAy+Z8JaH2ySVPaR9\nuhthMf4IZyKznhaAPHriSga+RDlpAteaFxi96xhCi96nR0OiceBjJkZWrhNBiRSNjiajmjbWKKZW\n/DppErQ97N5kBNS8fvdsKOrjmBuMg8GgrAfNIhSMPbzjlZUVDQaDsma8h6YJa+ecC/gBJEy57HQ6\nRZ6QNa+2xuvGaEgpTYS5eYfz8/Oyo9vMzEyZRUII/fLysuRaSRcyvo+OjvTkyRPNzIzXkyCd6MYo\nY85DyLQDQxid5nUC9BuFV3jxNVmWNLGaIHqBZ5BKog6JrX6RA5/77CFyN7bd0ak5EMgO13Bfj2h4\npJN+4H2IzrnR5NHLJro1wOweEdVzdIQrcFcyXBcVCwzy79KYwVAMPUeKIIswuPUWlVaTh4KAYB36\n0oEILZ89tML1CA6l/ymlMuARQADPlXIESj8OGLpSx8JEcOM5rrDJqcQ2x+iFNDlHm/CcNLlIAXzw\nre6wnM/OznRwcFCUGGG0lFIZ3HgiGBa9Xq8MaELt7JHMPMeNjY3yPr6ATM7j+ZCdTmdiJaEYtfBU\nR1PI171evtcoFhK6DNa8Zb93DZRdhmJbIijzXE+VvAgo84zozdfa3HTvaYDe9D5+Xa09/nzuQV50\neXlZS0tL6nTGq4rhEDx58qRU+AJiy8vLZXEZQOjk5ES9Xq8UHzHml5aWivx4HhwD++zsTCsrKwUc\nAVhpcqMN97BYpY9iVPQHck4ol/HA2EN3UbXMHGOiZawHzW5V7gkyHxgP3FN5yD86jT53g6nT6ZQ1\nIebn54sOcLByb5coBGFi/vyZHj6WVAAf8HTP3KOanjZBFmqRoShLHpKGF5A7U67H6EMibdJ4//om\nujXA7MDg4ckoVG6xcA4AgeKUNHG9W0bu2UbFEZVlU1iwCYijxy5p4pmADJ3lwuLegw9e3gGQOj8/\nL7kTz11L49ytA52Dunv3zgfuT7jMQ0O0D4uY98AavLy8LALHQARceLaH9AiXkW/CM2Aqht+DnWFY\np5bFGrCieVcAGh5Imliv2qtKff/u+/fv6969e89s7o7nAPjjFXmleAxJRSvY+xVyOagR52Lk1MBk\nmqxFUHaF6df4O9BWvqNU3NC4DvA1tc+f8zwA/yLGQM279vs5HzwVsrKyUpZVRcbxZPf29rS/v18U\ncbfb1crKSin+9JAxBidyxyYmhMSJAiHvT5480fHxcTEIKQIF8AFMaRwtIoLjfUOfYUCiR1m6eGFh\noeyixD3hhadliCj5uB0MBqX4Df3Ce5+cnBSg9/QBoMpn2spSy7wLy46S72ZMuSN1fHyso6OjicgZ\n8h2nNblOcMCNUceaPMSxEA1DHw/oMZc315VE0DgH/mDouZ5uolsDzB6+oHNijjAWgvX7/WIxLi8v\na319vVQVegd7oUGtE1zguL+H/KaRd14NyBHyWLjkShxPPIZEsMzc6lpZWSkDy5U/goMA1AqvPGTs\n78w0D36Llca0OYY3GbReAMHyg/AEcMWgop8ZjBSxEIYiasA0K3jjiwgwzYJ8tO9QQ/so2Ol0OhN5\n+IWFhZKfWl1dLX3ihg9e0crKSuElFn4Mv3s/uEzFMDCGCwrGPWIHvuhRvwiYudy5oSZpInTo17j3\n6kZhbNvzgHI0GmrnXWeMXYemKTmAGGIrWdIZzDn2Oeys2gUPut2u1tfXC7Ds7e2VMCXLs965c0cr\nKytaWVkpYIxnxBQqn1d8cHBQ5vqi8Akf+1aFEDJKnQptZhvG1dXVErZm1yffBYnxQUU045fxRxTJ\nDUMA1XXy2dlZiSQ58ABKAL2/K3RyclI8bdf3jEH6CgcFQ8FTDK7n4LHrbDcGmtIb7i3zPUap0MHe\nTn9OHA/gFPod3nK96343GGp0a4BZenZqgzMZAeePl1xcXNRLL72kjY2NsvE4HRKVdc1TIZzgIBk9\nDY5DrniiAq6F8FzJetVlDOfVvBIP2TJgPAwTv7s37FXOHmWQJuc1Ao4sMck7eRrBp0hIk4vcDwbD\nSsrt7W31er0CZM5fLNsYGWBgI7wMNvfandfc2/PR5OK8bRTHMNjxfN279q3fvH8Af6ZM8R9vx4sA\naTPkURp4w/vEuZo1ZeH/p1HTtS6HzmtyjaQe3MOpGaURkF124rhwGea9amHB2N6rQLnJ4L0u8Qxk\nmr5YXl7WnTt3dHZ2VnYFOz091aNHj7Szs6Pd3V0dHR2VJTBXV1fV7XarxidAee/ePa2vrxeQv7i4\nKJsiEIJllgm55tPT07JQBl4pVcaEkT3aRcicFejoT3jNdD/C6ky7ksYFjRjXGKDIJYWNXp3NfeFR\nrD3xlIeHjeG11/BAvE/0dslVu1xGAPOwMdchG24wu+6MMhbB2PVo1NfuHLrMR+/Y+wgHBN7Ad1/h\nzJ2pJro1wFyzHly5ubIlj7C4uKiNjQ2trKxIGq4n6wsKYGFi+Xh+RBorFQcyB9cIum6FeSijpjj8\nfVxxe+gp3jcCcwx3uPLzoi0XUDcwEH4/h3dkUFJdzTabLkyEt2PO2z0yaazUdnd3S2iMtnlI3Xnp\nbXQgxgJ++vTphDEAaPqWm9w7eoY+4AmZuyERrXFAuNPplOpYLwYizOhKHiVC+zAsnNexr2vW+1Ve\n5TSKcsd3/8/zUL5RyUy73hUlx+A7Cj0qNvesYn0H9/Z28VvN+/D01VU88Oe7J4dxhzeIDBA2pkJ5\na2tLDx8+nFi5C0OVgk2Mfq5l7JBvxptmT2By0sgYSpsKaBwLX2/dx6gb3p7PpWqawi9pPKeb+6yu\nrk785lE56dmpoG5setGZtwvj14uqaJd7ybwD5yI/7pF6XzHeXCZqIWjXOVEm6Fcf9/Ecl1PuD0+Q\n2Vr0NMob94VHbvy6EwgWwSsfA+Tim+hKYE4pvSbp30p6WdJA0udyzv8spfT3JP1NSTujUz+Tc/61\n0TWflvQTki4l/e2c869f9ZzwzIkB596JNC6mYh7p/Px8mbiPsvV5cFiwdADHHIBdefI75N9rnoOD\nOd8joEYFExWyK0WudeGijQ7Gfp3PH4yC5wLnlZlMlQAgUbYeGnY+xPfq9/va3d0tYT/mBLoxxX3d\nS/QByvmAsBeouAGFYqOQxq1oN8awTOEHHi9T0VAEKaVi6KFgWYmM3+KKY3jq/g4oIOdPlAs+R0/W\nZSh+biJXatM8Sf/N81o1D8HbCs89yoKBhlFEPpLoAvckQkF4FSBx78brDxjXcTEXpgxSgVvjgb8j\nz6DNpDkwQNkAwt+FcPDZ2Zn29va0u7tbvMb19XV1u91iIPq8XsYkgOxrprMr1N7enh4/flymEFJY\nhheMMeCpGKYLMf4wHDGqfPc4wNqNUSIBS0tL6vf7pajL5147MLux7Z4u3338IrPuARMa9+Iw5Ase\n+TKiPmYAUK8fioDpsu5yzPO5T20MuP7hevQLPHCDE0PHDaJoRMfx5p6085b8OdEJXwCJ/vA6oya6\njsfcl/QzOeffTSl1JX09pfSV0W//JOf8j/zklNJHJH1C0vdLep+k/5xS+nDOefoyPkaR2a6M6GCE\nKaU0sZ2YKz4EAIYj5FwL82rPQfAgt8KcoVEpR29AGk9B8fBt7TyeQ5vdK+Y+DmB+b2kcBoYvXgiF\nB+qehFeW+xxegC4qb4yawWC40MbDhw/18OFDDQYDLS0tlYXxaYtXdEeBd+sXIuTjITPuhTdPmBkP\nhjC6b6HHvEuvouZaPA5C4m7I+ZSPuGCMA4RHcrzva4AZjTH3nJuUSg14otE3zdOOsoUSQZ5qoOzP\n874DnHzLO7yk1dVVLS4uFtmhytiXXGS3MkCRBXV8Jaa5uTmdn5/r8PCwbJ3KBhCEUZEj5J92e1t9\nulOv19Pu7q6kYSV+t9st04CWlpY0MzNTNpug6AkjAVkjTOx7Kc/Ozmp5eVkrKyulIptCqPPzcx0c\nHOjJkyean5/X/fv3SxGUywtjFoMEkPWZC/SZAyljm2t8TAEAp6enJdQO31wnYMBI43X/3eD2ULDL\nrOsZxouDk48T5MGBMRpmPnZoj3u7fPc2RCMNefVrvJ3eJn9/aTyjxKdDRqPA21/zol0/SCr5feQb\nXmJs+poMUadEuhKYc85bkrZGn49SSm9IenXKJR+X9IWc87mkP0gpfUfSD0r6rauedRXBWJQK+VFy\nLQwuBMkBlmIjGOIA6fd3RruXGkGUTqwV0rgH7ufHd4mWGWAS25DSOJeBBcr3CHrS2OtEOaJUUDju\nZcNHBgwC5Mo7pTQxfQklenh4qMFgMJG7xZtA6bgF7RY6n2M0YnZ2tszldOXFs3kv2ui7Onk7fclB\nD7XV1rCVNOFFo+Rif0eKobYm69oHt9/H+yIqwyhnHPd71Qy7JqpFepwH8BbFzTvAOxQuynxxcVEL\nCwvKOZdtRH25SSqBmesPKB8dHZVIDZs2HB8fa2trSzs7O8U4W15enthz3IETRQsYY/AigwcHB9ra\n2tL8/LxWV1eL8c64pB3vvPOOnjx5opTGW8x6KNINRIw1wtadTqfMFGBJVzxfDA5JE3LqeUYHJh9n\neJleC+Hjw2XBgYs2Y6w6GLnM8WzGSdQ1HiHytvpYdtlxUG2Se9oRDVk/z5/Fu3GNOwZOLv9xLCKz\nACT9iOMRV+nyCKN/B0scT+K78Dt5cq8Fot3w1MPd0yJez5VjTil9QNIPSPptSa9L+smU0l+X9DUN\nvep9DUH7f9hlb2s6kF/32ROeBnkXrBS8PEJZfMbKhlEANwM9AquDHd8dPKN3FMMu7lVHwYlCROWz\n573xGH1+MN4G9wSUpWeX8nQL1/NO7kUjJIT5yFXhJRD+8rZG8EYIaaP/uTfu93A+uAchTSoXDAnm\nafv0BPqu0+mUfk7p2QIfwtdxoBNqgv/+nu7x17xab3cT0Pm5tWPRAq+Bq983erfxuqbfa6Bfs/rh\niRsvrmyl8ZjC4scLY83i3d1dPX36tBQOAYgUY/Z6PfV6PR0fH5fKZfKsLGm5u7uri4sLLS8va21t\nrXikyG7OuUxBIjWFocMqfLOzwwUrWARkY2OjKOGFhYWy0Az5WZ7HVDivNPZlE3POunv37sSmEicn\nJ2UxDiqz8cbj9oFxnq57XN4XLlP0Tc3w93MdRJEFTxVFMPNxF0HIc63c00PmUZZ4jkcgo/EZr6lF\nyiDAzXWOn+vtnsYTZB9dT1+7t45RAc+8Fga+oStdp3i6xt/LC23pF28bGIThFPs60rWBOaW0LOmX\nJf10zvkwpfQvJf0DSXn0/x9L+huSak97xjRIKX1S0iclaXNz8zrPL0xHAPgPowA0LE8UredRYZor\ndR+A0mQHI7DeBlemkbnR+uQ+KDPCq5ImioqksaeLV4q3gVfCbwAXIVz3hDEWUKa8OxEFlNjJyYn2\n9/cnltFzfnjok7ZJ45Ci8435mg7Szguur4WG3Cp1YPXCNVcgXO+FYX4u7+CKBN4Tau33+6W9UPQ+\nooJ0oPOc6TR5rZF7vTUvvAbe7jV4SC7WXiAfXlPgshxlFr7jEQKsKCoAhqgLCo7VpVhZ7e7du2XN\ncNIt/X6/bJt5dnZWDJ979+4VkH/48KF2d3eLYUX4d2Njo0QufOMQvPa9vT0dHh6WdyWfzcwNxjdT\n3Jjew5QgQJcaFX7LOZfpdMgV9wKUz87OChizmAjXYWzAc49+uey6t+UU5cE9NT/Hay/8PuiGeL7r\npPi8CEbu2fl5fs/oNfPsGE5uGks1uXcDwL1Kb4e3x8eitzFOq/WxwNjyQqyad+x1I5KKLCK3R0dH\nGgzGxaq02/HFowwYjMiKpySa6FrAnFKa0xCUfzHn/Cujl35kv/8rSV8efX1b0mt2+fslvRPvmXP+\nnKTPSdKHP/zha8+HiF6hg5wP0pmZmbLoCIVfvrRbBJxRmyaUlluf9q6NbaOTIA8JekcgxPwxGDju\nyzt69aSHmRA4wq4elnEvA2E9Pj4unoTnkePC9AC+NJ7/TTu9ctOFkVA210d+OUBMM4DoF+7l/Kx5\nsPTftFwNfYjH72sDx7ZGL7PJYOPc+Jz43T3sJq84ttOvmXYe6QJkBCONqnIiRTFnGCnnXDZr8C1K\nqXzn3vv7+9re3i6GGPNtFxYWiveIQvLiJec1BXWXl5c6PDzU6emput2u7t69q+PjY83Ozmp9fb2E\ngmkDedzDw8OyxrlvPAIgS8MlHNfX10ve+ujoqESfqEwn/bK4uKiTk5Oy05Abp4xdxsfl5aWOj48L\nGJObZClaaZyz9WK26GlGOarJg+s3Jx8jPl4dEL2/a8AYj9UM5diuaMjF62MUJxqt0TCsATPHPcIH\noX88Esj9vA7I9VqM0vHfDRV/J8ZM1PvgxmAwKDJL2qRm2Pu0NJwwctnuJADgTXSdquwk6eclvZFz\n/qwdfyUP88+S9FckfWv0+UuSfiml9FkNi78+JOmrVz3nOuQdw1/0TH36DYUr7CaCFwmzYSyD0L0m\nLyJwa8vPiZZgHGy+ao8Lt1u2hGX9NxQ5QoeicG8JS96LQ7x9KFSqTnd2dopw4IlwfgzNk7fz9Xpj\n4Qnn8bxoZdJu/887TvsdqoW/43m1sJlUz0V5vqkJ5P1YbGcMk8W+j/fBA/GURu06N3BoO7LrSs55\nRVQi51xWj6L4CKD1RXVq7+jhvnv37k2sGMV1p6en2t/fL+uMDwYDve9979Pq6moppNva2tJbb72l\nk5OT4jEy95e5wsypBfiRn263O2GELS0taW1tTd1ut8zn7fV6evz4sQ4ODspeu8gt45Rxguc+Ozur\n/f39Eh0hguTro1P4x3rSvmANck80jv3E+/1+4cPMzIxeffXV8iz2S2ZMeCUuipp3r8lAJPo5ylcE\nQQdjHyfIkV/vz0buasZm7XvTeIiRJv9cey8frzzHC918yhi/8eeAHO/tRkP09p0vzgsvxHLAjv0D\nsMJrN3i9PV6sSm2Tp1y4L/I6LeJ2HY/5dUk/JumbKaVvjI59RtKPppQ+qmGY+ruS/tboYd9OKX1R\n0u9pWNH9qfwcFdlNxIu7lyw9G7aM+4n67h++9Zp3JIUZvsQlA9W9yVpuwzsHJRo9bxcoaWxVuaUL\n+YDyd3UlFK0+n9bEEnYoa984HMB1xeEWnAMEPHAPzUNMvKsvlu8LgjRRDYxr4boaf51qA9T7KVLN\nUvb/tefFPwe0pvfhXFc2rgz8HLfOMcQiaNIvKHnaTGU0FdCeN8fLA7xiRMLTIV6FzGIYKBVkKKVh\ncdTGxobW1tZ0584dDQYDPXr0SG+++ab6/b42NzdLARXhYKYg4Y3Tdl9gB0XH/sNMfzw7O9P29ra2\ntrYKwKaUyvQkjyZ52Jp0DZXdGOv8+awGNzDhG+H2fr+vR48e6eLiotSCEAZfXFzU5uamZmdn9b3v\nfU9bW1tl7jC8p788Z+pyVZNpl6kYZoaiDuSdonz6e/O+XMt4c4Dx4r8I7NzPK9Nd5j2K6efXxkgc\nQx4x9O/uybr+jI6R39t1U02fRKcO/tX0jesXT6/5+9TGMnLtOW2vIULmakaz03Wqsv+76nnjX5ty\nzc9J+rmr7v0i5LmA6P3EkCZMIBTlXrRXJUrjnVsIjXoIDu/R56RV3nniL4KrCyq/OUCjzFEstI3Q\nIGvKovy4Hj5IKtXSKGmvKPW8L/mwOPCjkYNi9wgEAodyJ1zp0wOcHDybBDFa0U2A67yOyi1axK4k\n4vFaO2res18X21F7NuR891CjeynS5NSOmKJBbl0heK7MCxsxvLzozfsRAwyAYYlUj4gcHR1pZ2en\nHE9pWJvBlCg8Qc7d39/X22+/rU6now9+8IO6c+dOSSM9fvxYJycnmpmZKYVeXlwDSHuRH2BL7nZ3\nd1ePHz8u+emFhYWJZVZRrvCHsLV7upLK9CbAPk4P4tmsh764uKiUkvb29vT06dOy6QnL1na7Xb30\n0kvq9/t66623tL29rU6no3v37k2kRxyUPdUUZSYaa/AIR8H1G4YG8oUhgedP9M09R3RE1EFuZDOW\n4R3jO95P0gTvHVCjx1wzwN24jFEtv0fUGTEtME1fRCPax6RHrzz/H9vq49Qjmv5MN7bccPDIoRs5\nUDSkmujWrPx1FUWFW1PA0mSneZzfrUXCUm4hwmAGO0LtVjmWea04AUAHzBBuz4lAcSMLrmG7Ntbo\nZVENlDz3cFB0Re/t5X0QQhTQ3bt3S1EZ/HIhc6MCK5l7uqUJT1CWtdBObXDyfZrHULumNvCixRxB\nNIaQ4n1rAyMqmHiNexyeA48yBN9jZWkc/NGj5lr4yn18+s7MzEzJKbtV7nN6HYy599zcXAFNphFR\nEHd4eKhOp1PWefYpcEwHIpfM6lh3797V2tqazs/Ptbu7O7GYBXloANnbwTsCiHjsFxcXpdK51+uV\nGhF/H2T1/Py8LAgiaWJzB8ZiSsMc8cLCQinYwoh0Y555zoD3wcGBDg4O1Ol0ivc9GAzU7Xb14MED\nnZ2dlalWVHWTc6dvvUI56hqXFwcBL+ij7316DzMm0GE557I7FUaOe2cRFNzYpO99vPtuUn59BCYH\nnqZxVBuj8Z5XjX/6O7bBxzbPjzgQz5UmU2Lxz9+N/vD2u7HDuPdaCh/L8b1cx0bvvYluDTBPsyBc\nmU2zMqSx5+FWDVYr13uiPz7bmUXRmFvBrKjjyhfgYrEEAI0cb7TqUKru5WP1ci8UiDRe8MG9AQZ7\nzKsgKOQYfbUmPBcGJe+LgPlgcF40hV04vwZ8TedP+zytb6fdrzYY6B8fCG4MTBvANVl0y1iaDJfX\nLHS3ln2Au0w6r/0ZAKrz9/j4WAcHB894IL4muYdrqbJmFyOqqgEcD/8OBoOS28W7zDmXbQyprAYo\nMfD6/X7ZF3x2draAm4fNGXteb+HThzzUitdLgRXnMy8doGSFLUmlViIqSc+zM++dQkUKyiSVvLak\nks9m68PLy8tyHZtTnJ6e6p133tHFxcUzXjLgSF9GYzXKkzQ2+D3aR1QDfeDRPZcJN/6RJ4+4+BiJ\nz3Vw9LSAX1MDz5rxO20s18amnzftt5qzxfEaKDddH98l6g3Xox5FjJEGzo0R16Z3ANy93dGAb6Jb\nA8xXAa57gDWqKWYXNu8Yt2AiYz3cjbA7+FDVSVKfz0xLoq3MEUYBumIHVD0sJ42LBqRx3tuLE2Zm\nZiZ2YHHLkXA9io8Vh1gHG++NVdKid+yWuqQJK7CJmoSy1ndXXT8NKKddW6twbAprXdcYiOfXwo3w\ns+laB2CMNY/GeF+TIgAkKVQiTNnr9XR0dFQAxxeNAdB4zuLiYunvTmdYYc+CLezVi9FH5GhtbU2r\nq6sFsAgle/XzzMxM2QWJtqY0LKS6f/++lpeXJakAigMOCsqNBq+qpoDtyZMnZcEPrzjHA+71eqX4\nCuAjcsS9PZzvG0HgnXMNXj3LcvZ6Pe3t7ZWtErmOJTVZ4Wxvb08pJa2trU0sIuJRJmlySp0bZJIm\n2soY91Wh3Pv34jHXAx6tiUai502bKn+jt+j38/ERPfvafeI4iuMveqQR0JuAdRrV2hrbEwE4Ps91\nYJNTwmd0NgRf3QuPBjifkXeXkXcNMF/lNTV15FWeHExAwPk9WmOuhL2S0y16t04BYgAcD4fn5JyL\nNe+WE0Lg3z18ktJ4DrZXXLuFGIvYuA5lSRFWSuNiOLwGFBr3ds+G44Byk4fL95o1XgNUF9zatdEq\nneZl+GBjIBFuj6HC2H6PfDR5ErX3jYOa/oxhQldgeK/kZo+OjiSpeHJzc3MFNFmFqNPplPnVOeeS\n12Q51ZzzhAFIvwGY5ElRBIQ5uQeg7+/LFCbm8u7u7paCr7jAi08HYgtE9sZmIxOmQDG3nWlLbJrS\n6YzXq8abZQESSSWXzHlEl1JKpTDNARmexpoJeOqeOf24uro6kW8/Pj7W/v5+SSHNz8+XvDqLj1AM\n1+l0StW4pFIP4oV6UR4c9NjxCaOJOdZe7wIocz9SWm6MMn5jxMrBARmpkY/7CFQu/7Ux6+fH5141\n9pvGl1PU71fp+6Zra22L74Oed0MK2cKJ8boExk9timYEZWmc56dWAmN6GihLtwSYeaGmxr6IRSWN\nhdKLmKRnrSH3Zt0DdSZ6boznez7GvXPaGAsOfGC5N+HWsD/blbzzwgefC51vvEAbKDzJOT+zp6nz\n1vNT0waCA1+tj2oDwwd/Eyi7UNes73gtn0kZNBkOnqNCGaKwOS+2ueZdu3xGEHaPxY0sD0MfHR0V\nhYwRtb+/r8vLy5LPxQtnrjl96ZsfIDO+gEyv1ytrUGN8Mb0Iz5ENIfxdKeR69OhRWYp1bm5OGxsb\nWlxcLKCJYbC4uKhut1u8a8Ks+/v72tra0vHxsTY3N/XgwYOyteLl5WXJYbOSF/za29srXvL8/HyZ\nf8wUJRYEYczEKA/eMcYM4XYfw55znZub09rammZnZ0ufPH36VAcHB2XRCJbc5HwMHJ+lAajj5fr8\nZ5d3B2oMEZYx3d/fL/3qOWi/zq/nff24e3suty6vEVDjWJPGeixGiq7ySmvPrZ0TQbvJWPBnTvN4\nY9uajtU89+iE0C76wXP5Dsicj+Pi966FyV1WffnP6DU30a0AZqgmEH+U93YrM9K0jvbQkRdKuAXl\nnm9cHtTXkfbcp1v4UXhigRH39hAK78V/D2/74MX78fM9nOJhLwexJr5c1zDyNtQ8Yj/mhXNuUdaA\n04/HaIPzw70r+swrmGv9PU3JEIIGWOErxo8bd3jxeIv0Ge/X7/cL2EmaWFEKgCDUjOdNsR1zfGkn\n8yzxKKmFkFTAnbZ3Op0yB5n1zjHaNjc3tba2pk6nU7YuTCmV49RQ9Pv9ko9lat75+bnW19f12muv\n6f79+wWQe72etre3JalUeA8Gw6U4Dw4OJA13c1pbWyv3xYMkhE2uF8JbzTmXNrEqmRfAuRGMjLHY\nzs7OTuE14L+8vFyMDqINg8GgFJC5bPr0La/Cr8kpY4p3OTo6Kgai15o4WNbGtwNJBFGXYf5PM4C5\nVwzRen7a9UgcX37f+JwmPR6NBChGoGpgF/XANBDms+eNWQiG/qAPiSQyXp1HXi8BuRETdbhHILwf\n3FN2XvhqhZFuBTATao6enL/gdWiaEn+R62vXofh8zi7t5xpCYF612OT5QXEgRl64EMRruM7v4QBN\nGyN4cK6Ds0/R8GIWrMcaT/xe3rY4XaDJQqRATRpXlns4DxDyQqpaPih6F55/pm7AIyeRj00eBdeQ\nt6cqmSl1FxcXZZ1lDCBPnSwvL5c2cw+qitn4IaVU5tCixCkC8/n15EkpfmJFrLOzs1LdTEFWt9st\n8kg72dUJD5x5w/Q789KfPn1aCp4o6gI42dYQsExpmHMFlJ8+fVqW4wR4eCahX3LJDx48ULfbLUVf\n7ACFrALQAOzy8rJmZmZKUSW88QgUvGR3qouLC+3u7hbPxQvpMFRYjpTw+unpaTGcFhYWSlgdAEWG\nyAdzvCbjLk+MI+oKGCeerorGdZRFZDaCUfTCHIR8TKBH8Ni5r0fLfIw5KE+LqDVFzZqMB297zNlG\nYPZx6VOYvG2uc3LOZQ8F6gR8XYe4taZ70jwzginPQx6jQ+X9731Cm+N9ptXv3ApglsahYBcEqXl6\nVOzsCEQvQjWBi88DJHwgufKI27x5rguKVlh8Zvzu50cw5r8LKILugBiBNVp6tdAO5wFo00JQfl/3\nLAkRuRKCPw54FK+hlL0QzY0K7uGbVkQrn/bknItCheJSes47HzyAI5Y13g6LblA8RUhzdnZW9+7d\nU87DambejTA1IEaYls0Ucs5lGUlpWAtAiJaqakLQADhh3qOjo7Ju9NLSknLOE4tgADh37twp4E2Y\n1kHPjS+mD9GH0hB8Dg8PixfN0oQUMXW7Xb388st6+eWXNTs7q8ePH2tnZ6eAbEppYhcnZALgxHul\nepzf6UNyzoDnyclJWQ7T78e5VEvTF6wchuxh1PgmGURr3OjCWIkbuyA3XjgJ6LqRGA3VTqdTKtl5\nRs1Ddq/WZTWCtd9bmlxRylN0eH+kFqgX8HGBTHgKwJ/r951G0WOuOQnucfJbLXLo+gp+uA4gOsJv\nrqMxmFj/YXt7u8xNdxDGsHPdxDP8XZ3nMa9f440bSq5La05EjW4VMHvYzgs8omURBwj/o8f7hwmJ\nN4WCULgxLIlwAQbu+dXAIL5H03cXCA+Du/foAu+LA0hjyzpaom6FMxhRLh7S5rk1QYoDDoMFIAG0\nCOVSoBMtczfKfDEG36XHp3MB5gg9cuM8wSNxGcKr8jmmMXQeQd6Ps9funTt3JpZ17HQ6E0VU3iaU\nCekNQpgAdr8/3Nd4bm6ueHEUTRFJSGk8l5gwM4oGwFpcXCwKh3tzLOc8sfQk8nlycjKx8AyralH1\nTXgdUGZpToBtdXVV6+vreuWVV7S5uamlpaUC4uxx7FO47t+/r5mZGT1+/LhEnbgfoMgYI2wPmKNQ\nCdsDwJ1OZ2KfZQqzmOqFMUGVO8/wNa5jMQ5tdj3AMeTJV8WDmiqgpXH6jEp3KuO9doV+iEABRW+Y\nc2mrL0rk74R8kBKIBU0u9xgkvE/MYTtP4udIUU9HD97BtwbU/h0ZcmOItrquYZMeiirRL7yzF3pF\nh6fmIXufRH0bKRoi6GMwIxptXsT7DO/+MOD1R0UppR1JTyQ9vum2vMtoQy3PXoRavj0/tTx7MWr5\n9vz0XuLZH8s5P4gHbwUwS1JK6Ws55z910+14N1HLsxejlm/PTy3PXoxavj0/tTyTpge6W2qppZZa\naqml/6/UAnNLLbXUUkst3SK6TcD8uZtuwLuQWp69GLV8e35qefZi1PLt+ek9z7Nbk2NuqaWWWmqp\npZZul8fcUksttdRSS+95unFgTil9LKX0+yml76SUfvam23ObKKX0Cyml7ZTSt+zYekrpKyml/z36\nv2a/fXrEx99PKf25m2n1zVJK6bWU0n9NKb2RUvp2SumnRsdbvjVQSuluSumrKaX/NeLZ3x8db3l2\nDUopzaSU/mdK6cuj7y3fplBK6bsppW+mlL6RUvra6FjLM6MbBeaU0oykfy7pz0v6iKQfTSl95Cbb\ndMvo30j6WDj2s5J+I+f8IUm/MfquEd8+Ien7R9f8ixF/32vUl/QzOec/LumHJH1qxJuWb810LulH\ncs5/UtJHJX0spfRDanl2XfopSW/Y95ZvV9Ofzjl/1KZFtTwzummP+QclfSfn/H9yzheSviDp4zfc\npltDOef/JmkvHP64pM+PPn9e0l+241/IOZ/nnP9A0nc05O97inLOWznn3x19PtJQYb6qlm+NlId0\nPPo6N/rLanl2JaWU3i/pL0j613a45dvzU8szo5sG5lcl/V/7/vboWEvN9FLOeUsagpCkzdHxlpeB\nUkofkPQDkn5bLd+m0igc+w1J25K+knNueXY9+qeS/o4kX8ux5dt0ypL+U0rp6ymlT46OtTwzuum1\nsmuLjrZl4i9GLS+NUkrLkn5Z0k/nnA+b1rdVyzdJUs75UtJHU0qrkn41pfQnppze8kxSSukvStrO\nOX89pfTD17mkcuw9xzdJr+ec30kpbUr6SkrpzSnnvid5dtMe89uSXrPv75f0zg215d1Cj1JKr0jS\n6P/26HjLyxGllOY0BOVfzDn/yuhwy7drUM65J+k3NczntTybTq9L+ksppe9qmIb7kZTSv1PLt6mU\nc35n9H9b0q9qGJpueWZ008D8O5I+lFL6vpTSvIZJ/i/dcJtuO31J0o+PPv+4pP9gxz+RUrqTUvo+\nSR+S9NUbaN+NUhq6xj8v6Y2c82ftp5ZvDZRSejDylJVSWpD0ZyS9qZZnUynn/Omc8/tzzh/QUHf9\nl5zzrfPKXgAAAQ5JREFUX1PLt0ZKKS2llLp8lvRnJX1LLc8m6EZD2TnnfkrpJyX9uqQZSb+Qc/72\nTbbpNlFK6d9L+mFJGymltyX9XUn/UNIXU0o/IektSX9VknLO304pfVHS72lYmfypUXjyvUavS/ox\nSd8c5Uwl6TNq+TaNXpH0+VG1a0fSF3POX04p/ZZanr0ItbLWTC9pmCqRhvjzSznn/5hS+h21PCvU\nrvzVUksttdRSS7eIbjqU3VJLLbXUUkstGbXA3FJLLbXUUku3iFpgbqmlllpqqaVbRC0wt9RSSy21\n1NItohaYW2qppZZaaukWUQvMLbXUUksttXSLqAXmllpqqaWWWrpF1AJzSy211FJLLd0i+n/ZiypF\ntsmXWwAAAABJRU5ErkJggg==\n",
            "text/plain": [
              "\u003cFigure size 800x800 with 1 Axes\u003e"
            ]
          },
          "metadata": {
            "tags": []
          },
          "output_type": "display_data"
        }
      ],
      "source": [
        "%matplotlib inline\n",
        "\n",
        "def plot(time_secs):\n",
        "  plt.figure(figsize=(8, 8), dpi=100)\n",
        "  plt.grid(False)\n",
        "  plt.title(f'Water level at time = {time_secs} seconds')\n",
        "  rgb = LightSource(azdeg=315, altdeg=45).hillshade(scaled_dem, vert_exag=.005)\n",
        "  plt.imshow(rgb, alpha=1, cmap='gray', vmin=-.5, vmax=.9)\n",
        "\n",
        "  # Apply the `Blues` cmap to the water heightmap manually, with alpha level\n",
        "  # equal to the water height (so that the DEM shows through).\n",
        "  x = scaled_heightmaps[time_secs]\n",
        "  norm_x = .5 + .5 * (x / x.max())\n",
        "  y = plt.cm.Blues(norm_x)\n",
        "  y[..., -1] = x / x.max()\n",
        "  plt.imshow(y)\n",
        "  plt.show()\n",
        "\n",
        "if PUBLIC_COLAB:\n",
        "  slider = widgets.IntSlider(\n",
        "      value=num_secs,\n",
        "      min=start_time_secs + num_secs_per_cycle, max=num_secs,\n",
        "      step=num_secs_per_cycle)\n",
        "  out = widgets.interactive_output(\n",
        "      lambda time_secs: plot(time_secs), {'time_secs': slider})\n",
        "  display(widgets.VBox([slider, out]))\n"
      ]
    }
  ],
  "metadata": {
    "accelerator": "TPU",
    "colab": {
      "collapsed_sections": [],
      "name": "flood_simulation_tpu.ipynb",
      "provenance": []
    },
    "kernelspec": {
      "display_name": "Python 3",
      "name": "python3"
    }
  },
  "nbformat": 4,
  "nbformat_minor": 0
}
