{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "b7de562a",
   "metadata": {},
   "source": [
    "## Two-Stream Adaptive Graph Convolutional Network\n",
    "Model with temporal attention and spatial attention combined with a graph learning layer. <br />\n",
    "Spatial attention is performed before temporal attention. <br />\n",
    "\n",
    "The last layer returns the output of the last layer without an output module.<br />\n",
    "\n",
    "The definition in PyTorch Geometric Temporal performs *view* operations on non-contiguous tensors.<br />\n",
    "Small fix applied in the following code.<br />\n",
    "<br />"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "21cbd0cf",
   "metadata": {},
   "source": [
    "### AAGCN Modified"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "d25258e0",
   "metadata": {},
   "outputs": [],
   "source": [
    "import math\n",
    "import torch\n",
    "import numpy as np\n",
    "import torch.nn as nn\n",
    "from torch.autograd import Variable\n",
    "from torch_geometric.utils.to_dense_adj import to_dense_adj\n",
    "import torch.nn.functional as F\n",
    "\n",
    "\n",
    "class GraphAAGCN:\n",
    "    r\"\"\"\n",
    "    Defining the Graph for the Two-Stream Adaptive Graph Convolutional Network.\n",
    "    It's composed of the normalized inward-links, outward-links and\n",
    "    self-links between the nodes as originally defined in the\n",
    "    `authors repo  <https://github.com/lshiwjx/2s-AGCN/blob/master/graph/tools.py>`\n",
    "    resulting in the shape of (3, num_nodes, num_nodes).\n",
    "    Args:\n",
    "        edge_index (Tensor array): Edge indices\n",
    "        num_nodes (int): Number of nodes\n",
    "    Return types:\n",
    "            * **A** (PyTorch Float Tensor) - Three layer normalized adjacency matrix\n",
    "    \"\"\"\n",
    "\n",
    "    def __init__(self, edge_index: list, num_nodes: int):\n",
    "        self.num_nodes = num_nodes\n",
    "        self.edge_index = edge_index\n",
    "        self.A = self.get_spatial_graph(self.num_nodes)\n",
    "\n",
    "    def get_spatial_graph(self, num_nodes):\n",
    "        self_mat = torch.eye(num_nodes)\n",
    "        inward_mat = torch.squeeze(to_dense_adj(self.edge_index))\n",
    "        inward_mat_norm = F.normalize(inward_mat, dim=0, p=1)\n",
    "        outward_mat = inward_mat.transpose(0, 1)\n",
    "        outward_mat_norm = F.normalize(outward_mat, dim=0, p=1)\n",
    "        adj_mat = torch.stack((self_mat, inward_mat_norm, outward_mat_norm))\n",
    "        return adj_mat\n",
    "\n",
    "\n",
    "class UnitTCN(nn.Module):\n",
    "    r\"\"\"\n",
    "    Temporal Convolutional Block applied to nodes in the Two-Stream Adaptive Graph\n",
    "    Convolutional Network as originally implemented in the\n",
    "    `Github Repo <https://github.com/lshiwjx/2s-AGCN>`. For implementational details\n",
    "    see https://arxiv.org/abs/1805.07694\n",
    "    Args:\n",
    "        in_channels (int): Number of input features.\n",
    "        out_channels (int): Number of output features.\n",
    "        kernel_size (int): Convolutional kernel size. (default: :obj:`9`)\n",
    "        stride (int): Temporal Convolutional kernel stride. (default: :obj:`1`)\n",
    "    \"\"\"\n",
    "\n",
    "    def __init__(\n",
    "        self, in_channels: int, out_channels: int, kernel_size: int = 9, stride: int = 1\n",
    "    ):\n",
    "        super(UnitTCN, self).__init__()\n",
    "        pad = int((kernel_size - 1) / 2)\n",
    "        self.conv = nn.Conv2d(\n",
    "            in_channels,\n",
    "            out_channels,\n",
    "            kernel_size=(kernel_size, 1),\n",
    "            padding=(pad, 0),\n",
    "            stride=(stride, 1),\n",
    "        )\n",
    "\n",
    "        self.bn = nn.BatchNorm2d(out_channels)\n",
    "        self.relu = nn.ReLU(inplace=True)\n",
    "        self._conv_init(self.conv)\n",
    "        self._bn_init(self.bn, 1)\n",
    "\n",
    "    def _bn_init(self, bn, scale):\n",
    "        nn.init.constant_(bn.weight, scale)\n",
    "        nn.init.constant_(bn.bias, 0)\n",
    "\n",
    "    def _conv_init(self, conv):\n",
    "        nn.init.kaiming_normal_(conv.weight, mode=\"fan_out\")\n",
    "        nn.init.constant_(conv.bias, 0)\n",
    "\n",
    "    def forward(self, x):\n",
    "        x = self.bn(self.conv(x))\n",
    "        return x\n",
    "\n",
    "\n",
    "class UnitGCN(nn.Module):\n",
    "    r\"\"\"\n",
    "    Graph Convolutional Block applied to nodes in the Two-Stream Adaptive Graph Convolutional\n",
    "    Network as originally implemented in the `Github Repo <https://github.com/lshiwjx/2s-AGCN>`.\n",
    "    For implementational details see https://arxiv.org/abs/1805.07694.\n",
    "    Temporal attention, spatial attention and channel-wise attention will be applied.\n",
    "    Args:\n",
    "        in_channels (int): Number of input features.\n",
    "        out_channels (int): Number of output features.\n",
    "        A (Tensor array): Adaptive Graph.\n",
    "        coff_embedding (int, optional): Coefficient Embeddings. (default: :int:`4`)\n",
    "        num_subset (int, optional): Subsets for adaptive graphs, see\n",
    "        :math:`\\mathbf{A}, \\mathbf{B}, \\mathbf{C}` in https://arxiv.org/abs/1805.07694\n",
    "        for details. (default: :int:`3`)\n",
    "        adaptive (bool, optional): Apply Adaptive Graph Convolutions. (default: :obj:`True`)\n",
    "        attention (bool, optional): Apply Attention. (default: :obj:`True`)\n",
    "    \"\"\"\n",
    "\n",
    "    def __init__(\n",
    "        self,\n",
    "        in_channels: int,\n",
    "        out_channels: int,\n",
    "        A: torch.FloatTensor,\n",
    "        coff_embedding: int = 4,\n",
    "        num_subset: int = 3,\n",
    "        adaptive: bool = True,\n",
    "        attention: bool = True,\n",
    "    ):\n",
    "        super(UnitGCN, self).__init__()\n",
    "        self.inter_c = out_channels // coff_embedding\n",
    "        self.out_c = out_channels\n",
    "        self.in_c = in_channels\n",
    "        self.num_subset = num_subset\n",
    "        self.A = A\n",
    "        self.num_jpts = A.shape[-1]\n",
    "        self.attention = attention\n",
    "        self.adaptive = adaptive\n",
    "\n",
    "        self.conv_d = nn.ModuleList()\n",
    "\n",
    "        for i in range(self.num_subset):\n",
    "            self.conv_d.append(nn.Conv2d(in_channels, out_channels, 1))\n",
    "\n",
    "        if self.adaptive:\n",
    "            self._init_adaptive_layers()\n",
    "        else:\n",
    "            self.A = Variable(self.A, requires_grad=False)\n",
    "\n",
    "        if self.attention:\n",
    "            self._init_attention_layers()\n",
    "\n",
    "        if in_channels != out_channels:\n",
    "            self.down = nn.Sequential(\n",
    "                nn.Conv2d(in_channels, out_channels, 1), nn.BatchNorm2d(out_channels)\n",
    "            )\n",
    "        else:\n",
    "            self.down = lambda x: x\n",
    "\n",
    "        self.bn = nn.BatchNorm2d(out_channels)\n",
    "        self.soft = nn.Softmax(-2)\n",
    "        self.tan = nn.Tanh()\n",
    "        self.sigmoid = nn.Sigmoid()\n",
    "        self.relu = nn.ReLU(inplace=True)\n",
    "\n",
    "        self._init_conv_bn()\n",
    "\n",
    "    def _bn_init(self, bn, scale):\n",
    "        nn.init.constant_(bn.weight, scale)\n",
    "        nn.init.constant_(bn.bias, 0)\n",
    "\n",
    "    def _conv_init(self, conv):\n",
    "        nn.init.kaiming_normal_(conv.weight, mode=\"fan_out\")\n",
    "        nn.init.constant_(conv.bias, 0)\n",
    "\n",
    "    def _conv_branch_init(self, conv, branches):\n",
    "        weight = conv.weight\n",
    "        n = weight.size(0)\n",
    "        k1 = weight.size(1)\n",
    "        k2 = weight.size(2)\n",
    "        nn.init.normal_(weight, 0, math.sqrt(2.0 / (n * k1 * k2 * branches)))\n",
    "        nn.init.constant_(conv.bias, 0)\n",
    "\n",
    "    def _init_conv_bn(self):\n",
    "        for m in self.modules():\n",
    "            if isinstance(m, nn.Conv2d):\n",
    "                self._conv_init(m)\n",
    "            elif isinstance(m, nn.BatchNorm2d):\n",
    "                self._bn_init(m, 1)\n",
    "        self._bn_init(self.bn, 1e-6)\n",
    "\n",
    "        for i in range(self.num_subset):\n",
    "            self._conv_branch_init(self.conv_d[i], self.num_subset)\n",
    "\n",
    "    def _init_attention_layers(self):\n",
    "        # temporal attention\n",
    "        self.conv_ta = nn.Conv1d(self.out_c, 1, 9, padding=4)\n",
    "        nn.init.constant_(self.conv_ta.weight, 0)\n",
    "        nn.init.constant_(self.conv_ta.bias, 0)\n",
    "\n",
    "        # s attention\n",
    "        ker_jpt = self.num_jpts - 1 if not self.num_jpts % 2 else self.num_jpts\n",
    "        pad = (ker_jpt - 1) // 2\n",
    "        self.conv_sa = nn.Conv1d(self.out_c, 1, ker_jpt, padding=pad)\n",
    "        nn.init.xavier_normal_(self.conv_sa.weight)\n",
    "        nn.init.constant_(self.conv_sa.bias, 0)\n",
    "\n",
    "        # channel attention\n",
    "        rr = 2\n",
    "        self.fc1c = nn.Linear(self.out_c, self.out_c // rr)\n",
    "        self.fc2c = nn.Linear(self.out_c // rr, self.out_c)\n",
    "        nn.init.kaiming_normal_(self.fc1c.weight)\n",
    "        nn.init.constant_(self.fc1c.bias, 0)\n",
    "        nn.init.constant_(self.fc2c.weight, 0)\n",
    "        nn.init.constant_(self.fc2c.bias, 0)\n",
    "\n",
    "    def _init_adaptive_layers(self):\n",
    "        self.PA = nn.Parameter(self.A)\n",
    "        self.alpha = nn.Parameter(torch.zeros(1))\n",
    "        self.conv_a = nn.ModuleList()\n",
    "        self.conv_b = nn.ModuleList()\n",
    "        for i in range(self.num_subset):\n",
    "            self.conv_a.append(nn.Conv2d(self.in_c, self.inter_c, 1))\n",
    "            self.conv_b.append(nn.Conv2d(self.in_c, self.inter_c, 1))\n",
    "\n",
    "    def _attentive_forward(self, y):\n",
    "        # spatial attention\n",
    "        se = y.mean(-2)  # N C V\n",
    "        se1 = self.sigmoid(self.conv_sa(se))\n",
    "        y = y * se1.unsqueeze(-2) + y\n",
    "\n",
    "        # temporal attention\n",
    "        se = y.mean(-1)\n",
    "        se1 = self.sigmoid(self.conv_ta(se))\n",
    "        y = y * se1.unsqueeze(-1) + y\n",
    "\n",
    "        # channel attention\n",
    "        se = y.mean(-1).mean(-1)\n",
    "        se1 = self.relu(self.fc1c(se))\n",
    "        se2 = self.sigmoid(self.fc2c(se1))\n",
    "        y = y * se2.unsqueeze(-1).unsqueeze(-1) + y\n",
    "\n",
    "        return y\n",
    "\n",
    "    def _adaptive_forward(self, x, y):\n",
    "        N, C, T, V = x.size()\n",
    "        \n",
    "        A = self.PA\n",
    "        for i in range(self.num_subset):\n",
    "            A1 = (\n",
    "                self.conv_a[i](x)\n",
    "                .permute(0, 3, 1, 2)\n",
    "                .contiguous()\n",
    "                .view(N, V, self.inter_c * T)\n",
    "            )\n",
    "            A2 = self.conv_b[i](x)\n",
    "            A2 = A2.contiguous().view(N, -1 , V)\n",
    "            A1 = self.tan(torch.matmul(A1, A2) / A1.size(-1))  # N V V\n",
    "            A1 = A[i] + A1 * self.alpha\n",
    "            A2 = x.view(N, C * T, V)\n",
    "            z = self.conv_d[i](torch.matmul(A2, A1).view(N, C, T, V))\n",
    "            y = z + y if y is not None else z\n",
    "\n",
    "        return y\n",
    "\n",
    "    def _non_adaptive_forward(self, x, y):\n",
    "        N, C, T, V = x.size()\n",
    "        for i in range(self.num_subset):\n",
    "            A1 = self.A[i]\n",
    "            A2 = x.view(N, C * T, V)\n",
    "            z = self.conv_d[i](torch.matmul(A2, A1).view(N, C, T, V))\n",
    "            y = z + y if y is not None else z\n",
    "        return y\n",
    "\n",
    "    def forward(self, x):\n",
    "        N, C, T, V = x.size()\n",
    "\n",
    "        y = None\n",
    "        if self.adaptive:\n",
    "            y = self._adaptive_forward(x, y)\n",
    "        else:\n",
    "            y = self._non_adaptive_forward(x, y)\n",
    "        y = self.bn(y)\n",
    "        y += self.down(x)\n",
    "        y = self.relu(y)\n",
    "        if self.attention:\n",
    "            y = self._attentive_forward(y)\n",
    "        return y\n",
    "\n",
    "\n",
    "class AAGCN(nn.Module):\n",
    "    r\"\"\"Two-Stream Adaptive Graph Convolutional Network.\n",
    "    For details see this paper: `\"Two-Stream Adaptive Graph Convolutional Networks for\n",
    "    Skeleton-Based Action Recognition.\" <https://arxiv.org/abs/1805.07694>`_.\n",
    "    This implementation is based on the authors Github Repo https://github.com/lshiwjx/2s-AGCN.\n",
    "    It's used by the author for classifying actions from sequences of 3D body joint coordinates.\n",
    "    Args:\n",
    "        in_channels (int): Number of input features.\n",
    "        out_channels (int): Number of output features.\n",
    "        edge_index (PyTorch LongTensor): Graph edge indices.\n",
    "        num_nodes (int): Number of nodes in the network.\n",
    "        stride (int, optional): Time strides during temporal convolution. (default: :obj:`1`)\n",
    "        residual (bool, optional): Applying residual connection. (default: :obj:`True`)\n",
    "        adaptive (bool, optional): Adaptive node connection weights. (default: :obj:`True`)\n",
    "        attention (bool, optional): Applying spatial-temporal-channel-attention.\n",
    "        (default: :obj:`True`)\n",
    "    \"\"\"\n",
    "\n",
    "    def __init__(\n",
    "        self,\n",
    "        in_channels: int,\n",
    "        out_channels: int,\n",
    "        edge_index: torch.LongTensor,\n",
    "        num_nodes: int,\n",
    "        stride: int = 1,\n",
    "        residual: bool = True,\n",
    "        adaptive: bool = True,\n",
    "        attention: bool = True,\n",
    "    ):\n",
    "        super(AAGCN, self).__init__()\n",
    "        self.edge_index = edge_index\n",
    "        self.num_nodes = num_nodes\n",
    "\n",
    "        self.graph = GraphAAGCN(self.edge_index, self.num_nodes)\n",
    "        self.A = self.graph.A\n",
    "\n",
    "        self.gcn1 = UnitGCN(\n",
    "            in_channels, out_channels, self.A, adaptive=adaptive, attention=attention\n",
    "        )\n",
    "        self.tcn1 = UnitTCN(out_channels, out_channels, stride=stride)\n",
    "        self.relu = nn.ReLU(inplace=True)\n",
    "        self.attention = attention\n",
    "\n",
    "        if not residual:\n",
    "            self.residual = lambda x: 0\n",
    "\n",
    "        elif (in_channels == out_channels) and (stride == 1):\n",
    "            self.residual = lambda x: x\n",
    "\n",
    "        else:\n",
    "            self.residual = UnitTCN(\n",
    "                in_channels, out_channels, kernel_size=1, stride=stride\n",
    "            )\n",
    "\n",
    "    def forward(self, x):\n",
    "        \"\"\"\n",
    "        Making a forward pass.\n",
    "        Arg types:\n",
    "            * **X** (PyTorch FloatTensor) - Node features for T time periods,\n",
    "            with shape (B, F_in, T_in, N_nodes).\n",
    "        Return types:\n",
    "            * **X** (PyTorch FloatTensor)* - Sequence of node features,\n",
    "            with shape (B, out_channels, T_in//stride, N_nodes).\n",
    "        \"\"\"\n",
    "        y = self.relu(self.tcn1(self.gcn1(x)) + self.residual(x))\n",
    "        return y"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "c7a950bf",
   "metadata": {},
   "source": [
    "### Multi-Layer AAGCN"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "84e813ae",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 200/200 [06:12<00:00,  1.86s/it]\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEWCAYAAABrDZDcAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/MnkTPAAAACXBIWXMAAAsTAAALEwEAmpwYAAAu5klEQVR4nO3dd3hc1bX38e8aSZYsy7IsS+5F7jbNYIzpYGqAEAOBBEhCgJDLJQkBEkjeEFJIubkphNwQSOi9JkAIJIRqesC4YOOGca+SLVdZtmRZmvX+cY7kseq4zIzk+X2eZ55TZ87SkXTW7L3P2dvcHRERSV+RVAcgIiKppUQgIpLmlAhERNKcEoGISJpTIhARSXNKBCIiaU6JQKSDMbM3zczN7LJUxyL7ByUCaTfMbGl4gTs31bG0B2Y2ITwfSxttehr4IzA3+VHJ/igz1QGICJhZlrvviGdfd7890fFIelGJQDoMMzvPzKaY2RYzW2Zmd5hZQbitk5ndY2ZlZrbdzFaY2QvhNjOzX4Xrtof7vGxmPVo4Thcz+52ZLTKzSjObYWaXhNuGmFnUzNabWVa4blD4zX19GEemmX3fzOaZ2VYzm2tmV8Z8/s3h/k+b2V/NrAr4cqMYJgBvhIv1n+/htl2qhszswXD5YTP7t5lVmdkrYVzPhDG8b2aDYz7/IDP7l5mtNbPycL+B++DXJB2QEoF0CGZ2FvAscEg43QJ8E3gy3OWrwNeBdcB9wDTgmHDbKcCNQF247W3gYKBrC4d7ALgh3P+vwHDgYTO72N0XA+8BhcCp4f5fDKdPuXsN8AvgN4ABjwM5wF1mdmmj45wPDAUeAcoabVsJPBPObyGoCvpjC/HW+wpQCWwATgNmAgXAYuCoMC7MrHd4Dk4D3gXeBD4PvGxm2W0cQ/ZDSgTSUVwdTn/l7pcCE4Ba4DNmNgLICrfPAh4DLgd6huvqty0kuLBfDfQDljc+iJn1BL4QLp7m7l8DfhgufzucPhxOLwyn9YngYTOzmFj/A2wFZofL32h0uMXAke5+pbu/FLvB3RcC9VVAG9z9One/rnG8jUxy9y8A94TLVQQX+/r4DwunlwDdCc7HcmAVUA6MAk5q4xiyH1IbgXQUJeF0HoC7rzOzdUBvYBDBxXkCcA5wEeDAa2Z2HvAK8GeCC2B9dctUYCJQ2sJxqtx9WTj/STgdFE7/CtwGnGtmo4BxwKfu/oGZFQN54X6XN/rsYY2WP3T32rZ+8N0wL5xuCqcL3T1qZlvC5S7htCScjg5frcUoaUAlAukolobTUQBh/X5RuG4ZUOvuFwL5BBe31wi+DX8eyCD4ll5AcKF7mODi/fVWjtM5ps58ZMxxcPfNwD+AbsDd4bb6UsI6glIAwBh3N3c3gv+1cY2Otb2Nn7kunMb7f1rXxnK9peH07/XxhTH2Iag6kzSjRCDt0W/M7IOY1wnAHeG2H5rZgwT12pnAq+7+KXCxmc0jqN+/lqANAIJvx8cASwiqjL4LHBuzbRfuvpbg9kyAV83sfuBX4XLs3Tr1F/7jCUofj4Tv95hYXwkbsJ8gqAa6ebfOAqwIp/3N7F4z+3+7+f6WPEbws58XNprfZWavhcfrtY+OIR2IqoakPRrRaLnQ3Z8zsy8CPwAuIGgQvYugERhgPsG38bMIGoFLgV8C/yQoBSwgaDQuCPe7k53f5hv7GsFF8TyCdoBFwK3u/njMPi8TNPD2Bt5y99j2hh8B64HLCBpwK4CPgKfi/PkBcPelZnYLQcnlCmAOQSP0XnH31WZ2IvA/wHjgOIK2gjsIzo2kGdPANCIi6U1VQyIiaU6JQEQkzSkRiIikOSUCEZE01+HuGioqKvKSkpJUhyEi0qFMmzZtnbsXN7etwyWCkpISpk6dmuowREQ6FDNb1tI2VQ2JiKQ5JQIRkTSnRCAikuaUCERE0pwSgYhImlMiEBFJc0oEIiJpLm0SwfyyLfz+lfmsr2xrLBARkfSSNolg4dpK/jRpIeu31qQ6FBGRdiVtEkFGxACordP4CyIisRKWCMxsgJm9YWZzzWyOmV3bzD4TzGyzmc0IXz9JVDz1iaAuqkQgIhIrkX0N1QLXu/t0M+sKTDOzV919bqP93nH3sxMYBwCZ9YlAI7KJiOwiYSUCdy919+nh/BZgHtAvUcdrS6ShRBBNVQgiIu1SUtoIzKwEOAyY3Mzmo81sppn928wObOH9V5rZVDObWl5evkcxNJQIlAdERHaR8ERgZnnAM8B17l7RaPN0YJC7jwH+BDzX3Ge4+93uPs7dxxUXN9uddpsaGotVIhAR2UVCE4GZZREkgcfc/dnG2929wt0rw/kXgSwzK0pELGosFhFpXiLvGjLgPmCeu9/awj69w/0ws/FhPOsTEY8SgYhI8xJ519CxwCXALDObEa77ITAQwN3vBC4AvmFmtUAVcJF7Ym7ryVQiEBFpVsISgbu/C1gb+9wO3J6oGGJFrL6NQIlARCRW2jxZnJkRJIKoEoGIyC7SJhFkqEQgItKs9EkEYRtBVE8Wi4jsIm0SQWYk+FHV6ZyIyK7SJhGEeUB3DYmINJI2iaC+RKBO50REdpU2iWBnFxNKBCIisdIuEdSp1zkRkV2kXyJQgUBEZBfplwjU+6iIyC7SJhFoPAIRkealTSJQiUBEpHnpkwjUxYSISLPSJhFEIoaZOp0TEWksbRIBBO0EKhGIiOwqrRJBxExdTIiINJJWiSAzokQgItJYWiWCiKqGRESaSKtEkBkxjUcgItJIWiWCjEhEJQIRkUbSLBFAnTobEhHZRVolgsxIROMRiIg0klaJIEN3DYmINJF2iUBtBCIiu0q7RKAuJkREdpVeicCMWvU+KiKyi/RKBBHTeAQiIo2kVSLIzDCNRyAi0khaJYKIqbFYRKSxtEoE6mJCRKSphCUCMxtgZm+Y2Vwzm2Nm1zazj5nZbWa20Mw+NrOxiYoHwttH9WSxiMguMhP42bXA9e4+3cy6AtPM7FV3nxuzz5nA8PB1JPCXcJoQGRGjplZtBCIisRJWInD3UnefHs5vAeYB/Rrtdg7wsAc+AArMrE+iYsqImLqYEBFppNVEYGYZZjbRzEbuzUHMrAQ4DJjcaFM/YEXM8kqaJgvM7Eozm2pmU8vLy/c4DnUxISLSVKuJwN3rgPuAo/f0AGaWBzwDXOfuFXvyGe5+t7uPc/dxxcXFexqKRigTEWlGPG0EjwGXmdkUoLR+pbtvaOuNZpZFkAQec/dnm9llFTAgZrl/uC4hVCIQEWkqnkRwDeDAxzHrvK33mpkRlCbmufutLez2PHC1mT1J0Ei82d1LW9h3r6nTORGRpuJJBG8TXPh317HAJcAsM5sRrvshMBDA3e8EXgTOAhYC24DL9+A4ccuIRNTpnIhII20mAnefsCcf7O7vAtbGPg58a08+f09kqkQgItJEm7ePmlk3M3vQzNaEr/vNrFsygtvXIqY2AhGRxuJ5juA24KtATfi6DPi/xIWUOLprSESkqXgSwZnAb919gLsPAH4HfDaxYSVGRFVDIiJN7MmTxR32SqpO50REmornrqEXge+Z2ZfC5X7Aw4kLKXGCTufU15CISKx4EsF1BCWHM8PlR4DvJCqgRNIDZSIiTbX1UFgG8GPgAXf/anJCSpxMdTonItJEPH0NnQsMTUo0CaYSgYhIU/FUDb0J/MTMstm1r6Hm+g5q15QIRESaiicR1Hf7cFs4NYI7hzISElECZUSMqEM06kQirT70LCKSNuJJBD9LeBRJkmHBxb/OnUjrvV+IiKSNeBqL84F/uvsbyQkpcTIywkQQdbI6XHlGRCQx0qqxODOyMxGIiEggrRqLI2HVkLqZEBHZKa0ai+tLBBqTQERkp3gSwc/pwP0LxcrICGrCVCIQEdkpnoFpbk5CHElRf9eQOp4TEdmpxcZiM5tuZqeZWZdwMJpR4frzzKzNgevbo/qqIZUIRER2au2uoUOB7kAOwWA0fcP1nYAOOUJZRv1dQ3VKBCIi9doaj2C/umI2JAJVDYmINGirjeBrwGkECeFqMzsXGJ7ooBKlIRFENSaBiEi9thLBZ2Lmz42Z75BfqTPURiAi0kRrieCkpEWRJBl6slhEpIkWE4G7v5XMQJJBXUyIiDS1J4PXd1gRJQIRkSbSKhGoRCAi0lRaJQI1FouINNViG4GZ/aS1N7r7z/d9OInV0MWEEoGISIPW7hq6OWbeoWFIr/qraIdLBJkZKhGIiDTWWiL4Qjg9CTgR+ANBVdK1wPsJjish6scjUBuBiMhOrd0++gyAmd0C/I+73x8uG/D9tj7YzO4HzgbWuvtBzWyfAPwDWBKuejbR1U2ZkaBJRIlARGSneMYjyAZ+amb9CUoElxNfI/ODwO3Aw63s8467nx3HZ+0TaiwWEWkqnkRwA3AvUN94XE3QB1Gr3P1tMyvZ89D2vfpEoPEIRER2imdgmsfN7DXgqHDVB+6+dh8d/2gzmwmsBm5w9zn76HObpRKBiEhT8T5HcARwMrAQON3MxuyDY08HBrn7GOBPwHMt7WhmV5rZVDObWl5evscHzFTvoyIiTbSZCMzsOuAF4NtAb+DzwO/29sDuXuHuleH8i0CWmRW1sO/d7j7O3ccVFxfv8TF3djq3xx8hIrLfiadEcB3wt5jl14Cxe3tgM+sd3oGEmY0PY1m/t5/bGo1HICLSVDyNxd2BmcAF4XIukNHWm8zsCWACUGRmK4GfAlkA7n5n+HnfMLNaoAq4yD2xrbhqIxARaSqeRPAh8I1w/gbgOOC9tt7k7he3sf12gttLk6bhriElAhGRBvFUDX2b4Bu7AWcApQTVRR1OpkoEIiJNtFoiMLMMYARBA3F9xfp8d69LdGCJoPEIRESaarVEEF7w7wPGufvc8NUhkwBoPAIRkebE00bwGHCZmU0hqBYCwN03JCyqBFFjsYhIU/EkgmsIup7+OGadx/nedkXjEYiINBXPxfxtdo5B0KGpRCAi0lQ8fQ1NSEIcSWFmRExtBCIisdpMBOHTvxcBBwM54Wp39+sTGViiZEYi1Kn3URGRBvFUDd0BXEXT4So7ZCLIiJhKBCIiMeJ5oOw84PFw/lrgDeAXCYsowZQIRER2FU8i6A68E86XAk8DVyYsogRTIhAR2VU8VUNl4X5lBCOVdQIqEhlUImVGjFr1Pioi0iCeEsGPgEUEbQLVwGY6aF9DEHQzofEIRER2iuf20UdjFp9MYCxJkRkxjUcgIhIjnttHJzWz2t39lATEk3ARU4lARCRWPG0EE5pZ12FbWzMzVCIQEYkVTyKIHSS4O3AzMZ3PdTQZEVMXEyIiMeJpLPaYVwUwH7g0kUElUoYZUT1ZLCLSIJ4SwTqaVgXNT0AsSZERMVZvqubDJRs4oqQ7Ztb2m0RE9mPxlAjejnm9ATxAMGJZh9QjrxMzVmzii3e9z38WrU91OCIiKZdWvY8C3HXJOJaUb+XCu9/nxVmlHDusKNUhiYikVDy3j97fymZ39yv2YTwJl5edycH9uzFhZDEvz1nDz885qGGcAhGRdBRPG8FlNO15NHa+QyWCep85sDcvzirjo+UbGVdSmOpwRERSJp42glsIOp07FTg9nP89cAQwPnGhJdbJo3rSKSPCS7PLUh2KiEhKxZMIvgo85e6T3P014K/AF919mrtPS2x4idM1J4sTRhTz949WUVVTl+pwRERSJp6qoSrgf83sKIIqoYnAfnG7zZUnDOGLd73PEx8up1d+Du8sKKdbbhZXnTCU7l06pTo8EZGkiCcRfB14FLgkXC4L13V44wcXMr6kkN+9PJ+qHXXk52RSUV3LwMJcvnzkoFSHJyKSFG1WDbn768Ag4NDwVeLubyQ2rOS59tThVNfWcdERA5j6o9PonJXBorVbUx2WiEjStFoiMDPzQI2Z9QEOAnoBryYluiQ4dlgRU246lR5dOmFmDCnuwsLyylSHJSKSNC0mAjN7neD20FPN7Arg7phtP3X3XyYhvqQoystumB/WM4+pSzemMBoRkeRqrWroIOBf4fxV4fQXwFvAf7X1wWZ2v5mtNbPZLWw3M7vNzBaa2cdmNnZ3Ak+UocV5rNpUpTuJRCRttJYIugHrzawbcBiw3N1vBh4Cesbx2Q8CZ7Sy/UxgePi6EvhLHJ+ZcEOL8wBYpOohEUkTrSWCpQTjFD8a7vdSuH4gcdw+6u5vAxta2eUc4OGwDeIDoCBsh0ipYT2VCEQkvbSWCH4MjAQ+S9AV9e/D9RcBH+yDY/cDVsQsrwzXpdSgHrlEDBaV684hEUkPLTYWu/vfwvGKhwDz3L3SzDKBLxE8S5A0ZnYlQfURAwcOTOixcrIyGFCYy6K1KhGISHpo9TkCd1/v7lPcvTJcrnX3me6+Zh8cexUwIGa5f7iuuTjudvdx7j6uuLi4uV32qWHFecxcuYltNbUJP5aISKrF09dQojwPfDW8e+goYLO7t4uxkC8eP5DVm6r42oNTdPeQiOz3EpYIzOwJ4H1gpJmtNLMrzOwqM6u/FfVFYDGwELgH+GaiYtldpx7Qiz9ceCiTl2zgl/+am+pwREQSKp6+hvaIu1/cxnYHvpWo4++tcw7tx6yVm7n33SVMHNOXI4f0SHVIIiIJ0WaJwMxGmtk9ZvaqmU0KX68nI7hU++7pIxhQ2Jkbnp5J2ebqVIcjIpIQ8VQNPUcwCtkpwISY134vt1Mmt110GBu37uDCu99nTYWSgYjsf+JJBIXAH4A+QHH4iufJ4v3CYQO788gV41m1sYqH/rM01eGIiOxz8SSCh4FhQB5BJ3T1r7Rx2MDujOrTlZkrN6U6FBGRfS6exuLrCS78Z8es8zjfu984dEAB//hoNdGoE4lYqsMREdln4rmYv02alQCaM6Z/AY9+sJzF6yoZ1rNrqsMREdln2kwE7j4hCXG0e4cOKABgxorNSgQisl9pMxGYmRF0NHcwkBOudne/PpGBtTdDivPIy85k5opNXHB4/1SHIyKyz8RTNXQHwcA0DtRXjjtB20HayIgYh/TvxqRP1lL60FQuPGIApx3QK9VhiYjstXjuGjoPeDycvxZ4g2CksrRzREkhqzZV8fan5Xzv6Zmsq9ye6pBERPZaPImgO/BOOF8KPE3YJXS6+caEobz23RP51zXHsXV7LT97Qf0QiUjHF08iKCOoQioD7iUYoCaVvZamTE5WBsN65jG8V1euOnEoL8xczWKNZCYiHVw8F/QfAYsI2gSqgc3AdQmMqUO4aHwwQM6/Zyd1jB4RkX0unttHHwUwswJgkLurYhzoV9CZQwcU8O/ZpXzrpGGpDkdEZI/F0/toiZlNIRi3+Hgze8vMfp740Nq/sw7uzexVFSxfvy3VoYiI7LF4qobuJBhU3oAowZPGFyUyqI7izIP6APDv2e1iYDURkT0STyI4Brg9ZnkRwfjCaW9AYS4H9MnntXn7YghnEZHUiCcRrAMOCud7EpQGVicsog7m1NE9mbZsIxu31qQ6FBGRPRJPIriH4OJvwGPAacBdiQyqIzlpVE+iDm8vKE91KCIieySeu4b+18xWA58NV/3T3R9ObFgdx5j+BfTo0onX563lnEP7pTocEZHdFteYAu7+EPBQgmPpkCIR46RRPXllThm1dVEyM9LyWTsR6cBavGqZWV0rr9pkBtnenTKqJxXVtUxbtjHVoYiI7LbWSgRG0MvoamBTUqLpoI4bXkRWhjFp/lqOHNIj1eGIiOyW1uoxHgC2AkXALOC77n5w/Ssp0XUQXXOyGD+4kEnz1qY6FBGR3dZiInD3K4A+wDeBAcBLZrbUzM5IVnAdycmjerFgbaWeMhaRDqfVlk133wosBpYANQSlA43T2IxTRvUEYNInerhMRDqW1hqLbzKzBcAkYBjwbaCPu/8tWcF1JCVFXRjRK4+npq4kGvVUhyMiErfWSgS/AIYQlAjWAROBx8zseTP7RzKC62i+MWEo80oreHmOuqYWkY6jrecIDBgavmLpK28zJo7px58mLeSWV+azqWoHRw3pweCiLqkOS0SkVa2VCAa38hqS+NA6noyI8b3TR7KofCs3PjuLa574KNUhiYi0qcUSgbsvS2Yg+4szD+7DjJ+cxmOTl/O7l+fz6ZotjOil9nURab8S2h+CmZ1hZvPNbKGZ/aCZ7ZeZWbmZzQhfX09kPMlSkNuJC48YQGbEeGbaylSHIyLSqoQlAjPLAO4AzgQOAC42swOa2fUpdz80fN2bqHiSrSgvmwkji/n7R6uorYumOhwRkRYlskQwHljo7ovdvQZ4Ejgngcdrd84f25+1W7bz3qL1qQ5FRKRFiUwE/YAVMcsrw3WNnW9mH5vZ02Y2oLkPMrMrzWyqmU0tL+84/f6fPLon3TpnqXpIRNq1VPeZ/AJQ4u6HAK/SQlfX7n63u49z93HFxcVJDXBvZGdmMHFMX16eU0ZF9Y5UhyMi0qxEJoJVBH0U1esfrmvg7uvdfXu4eC9weALjSYnzD+/P9tooL35cirvzi3/O5f89/XGqwxIRaRDXwDR7aAow3MwGEySAi4Avxe5gZn3cvTRcnAjMS2A8KTGmfzeGFnfhttcXsHT9Nu57dwkAV544hKHFeSmOTkQkgSUCd68FrgZeJrjA/9Xd55jZz81sYrjbNWY2x8xmAtcAlyUqnlQxM/7vwsNw4M63FjG+pJDMiPHE5OWpDk1EBABz71i9RYwbN86nTp2a6jB227rK7TwxeTlfOWoQNz03i/cXref9G08hJysj1aGJSBows2nuPq65baluLE4bRXnZfPuU4XTv0omLxw9k47Yd6pxORNoFJYIUOHZoEX275fDs9FVt7ywikmBKBCkQiRjnje3HOwvKWVtRnepwRCTNKRGkyOfH9ifq8NwMlQpEJLWUCFJkaHEehw0s4P9eW8BX7p3M7FWbUx2SiKQpJYIU+u35hzBxTF8+KdvC1x6cwhpVE4lICigRpNDwXl359fmH8MgV46ncXsvlD0xh1aaqVIclImlGiaAdGN0nnz9/eSzLN2zjs7e9wx9e/ZSPlm+kLtqxnvEQkY5JD5S1I0vWbeXGZz9m8pINuENBbha9uubQtyCHuy4ZR6dM5W0R2TOtPVCWyL6GZDcNLurCk1cezcatNbyzcB3vLihn5cYq3phfznsL13HSqJ6pDlFE9kP6itkOde/SiYlj+vLbC8bw4OXjyc/J5IWZq1Mdlojsp5QI2rlOmRHOPKgPr8xdQ/WOulSHIyL7ISWCDuBzY/pSub1WfROJSEIoEXQARw0pZEBhZ657agbXPfmRSgYisk8pEXQAmRkRnvvmsVx5whCem7Gan70wJ9Uhich+RHcNdRA98rK58czRRMz4y5uLWLpuGxXVO1hTsZ2JY/ryk88dkOoQRaSDUomgg7n+tBGcP7Y/m6t20Cs/hwGFnXnwP0tYtn5rqkMTkQ5KJYIOJjMjwu+/OKZheW1FNcf99g3+8uYifn3+ISmMTEQ6KiWCDq5nfg4XHzGARycv550F6xjdJ59fnXcQPfNzUh2aiHQQSgT7gW+dPIwN23YQMXh5Thmn/eFtRvfpSkmPLpw4ophTRveKq3uKuqiTEbEkRCwi7Yn6GtrPLFizhf97fQFrNlczf80WtlTX0q+gM0cP7cG80gqGFOcxsLAzn66ppLK6ll752fzwrNH8+c1F/GPGKh79+pEc2LcbdVHnR8/NIj8ni++ePoLszIxU/2gishda62tIiWA/tqMuyrsL1vGnSQtYvG4rB/bNZ35ZJRu2bmdIcR6FuZ34eNUm3GF7bZTcThnkZWfyyBVH8q9Zpdz2+gIADuiTz6NfP5LCLp1S/BOJyJ5SIpAG0aizIxpt+IY/r7SCHzw7i1NH9eT0A3tzwZ3/YUt1LQAXHN6fMw7szTcfn84RJd156PLxZGYEVUxbt9fyn0Xr2Vy1g6qaWhyYOKYvBbnJSxbujpmqskTioUQgcVtTUc1Ls8tYtn4b3z9jJDlZGfxt6gq+9/THHDesiKOH9mD2qs28Ob+cqkZPOJf0yOXeS49gWM+8hMe5qLySS+//kBvPHM1nD+mT8OPFY/WmKl6aXcb5h/enW+eshvU76qJM+mQtJwwvpnMnVbFJaigRyF7785sLefT9ZazeXE2/gs6cOLKYiWP60q+gMzlZGSwqr+Tqx6ezdXsd1506nCuOG9xQegCorYuSmRHB3amNOlkZEaJRZ/XmKmpqo/Tr3nmXdojl67fx0+dns3zDNkb3yQegsEsnThrVkwP75HPpA1OYV1pB3245TLphAovLtzKwRy552ZnMWLGJwUVddrkY70sfLtnA09NWcPVJwxnYI5fauijTlm3k6ic+onzLdoryOvHbCw7h5FG9ALh90gJueeVTBhd14ZsThjKgMJfxJYVE1DAvSaREIPuEu1NRVUt+58xmq2RKN1fx4+dm89q8tZw6uhe3f+kwFqyp5H9enMvkJRsY0D2XiuodVFbXcnD/bqzeVMWaiu0ADCjszE/PPpBuuVm8OKuUJz9cQWbEOGJwIQvWbiErEqGsopptNTtLIf99whDuensxYwYUMHPFJoq7ZnNAn3ze+rScQ/p345GvHcmjk5dx+KDuHDWkB6/NXcNbn5azYuM2Vm6s4vCB3fnZOQeSkxX/t/Ro1Dnzj+8wf80WsjMj9MzPZs3m7dTURelX0JmbPjua2yctZFF5JU9eeRR9unXmpFve5KB++azeVN0wFOlXjhrIL845qOE81tRGeW3eGkb3yWdwUZe9+TXttk3bathctYNBPXYed/Li9fQvzKVfQeekxlLfroXB2IHdm03mdVHnlTllHDe8iK45iUn2+yMlAkkad+fh95fx0+fnkNspg201dXTPzeL8sf0p3VxNt9wsumZnMm3ZRorysjlueBFZGWG3Geu3AZAZMT43pi/f+8xI+sZciKp31DFl6QbmrK6gZ9dsPj+2P5fcN5l3Fqzj4vEDmVdawadrtnDOof14cspyOmcFx8/tlMF/HT+EP76+gK45mQwszKW4azZvfVrOwf26cceXxuIO97+3hKE986ipjfLy7DJ2RKPsqIuycesOjhnag0uPKWHlxiquenQaPzxrFEvXb2Pr9lp6d8thRM+unDK6JwW5ndiwtYZz73iPjdtqyM/JorxyO69/90R65eewcuM2Hpu8nPveXcLIXl1ZvmEbRw4pZF3ldmavqiAjYlwwtj83njUqrvaW2rooH63YRNecTAYXdWn27q7K7bXc/fZiDhtYQK+uObw0p4wD+uSTn5PJn99cxPuL12PAXZcczimje3HvO4v55b/mUdw1m7suOZwPl2zgwL75HD+8uM14VmzYRs/87DbvMpu2bAP/+riMa08ZTrfcnRfzHz83m0c+WAYEVY3/uPq4Jsnglpfnc/sbCzl+eBEPXHbELiXPjioZt24rEUjSvTirlNfmrWHswO6cfUifNi9qVTV1vL2gnOzMCAf0yY/7gbiNW2tYun4rhw3svsv6B99bwv3vLeWaU4bz+1fmU7q5mqOGFPLg5eMbSgCvzl3Dd56aQf2/37YddQ3jRB/QJ58eeZ3IiBi5nTJ445OgTSQnK0Lv/Bxe++6JrV6AFpVXcusrn7Jley3nj+3HOYf2a9jm7vz25flMXryekb27MumTtdTWOTd9djRzVlfw0H+WUpDbieOHFzGoRy5fPbqEp6as4OU5ZfzkcwcwNvxZ3Z3r/zaTZ6evAoKhTU8a2ZMl67aG41j0ZmTvrvzmpfnMXLGp2Th75WdzweH9eWfBOj4p28KQoi58UraFk0f15OOVm1hXWQME42I8esWRjB9c2OLP/NLsUq56dDp52UGyXbtlO585sBffOW0ERXnZDftN+mQN33h0Ottro5T0yOXsQ/pSUb2DLtmZ/OXNRVx2TAnjBxdyzRMfccKIYm7/0mHkdsqkekcdT3y4nJ+9MJeD+3Vj1qrNfPnIgfz0cwfu8pzMPW8v5oH3llDUNZuJY/py2TElrf6uVm+q4pU5ZZw3dmfbzsatNWzbUUe/gs48Pnk5s1dv5mcTDyQr/Jya2igrN26jrKKa7bVRDu1fQPc9uKuuti7K3e8s5k+vL+T600dw2TElvDSnjCNKCunV6H8gGnVq6qK7VYKNpUQgaW1+2RaemrKC75w2vElVwooN2/j+0x8TdeeWL4yhpi6KO00avDdX7eBvU1fw3IxVXHPycE4/sPc+i68u6hg0tBnMWb2ZX704j2Xrt7FqUxUZZtRGnbzsTKp21DGkqAu1Uad7bhbTl2/iv08YwoH9uvHq3DW8s6CcET27srlqB/PXbAGCi/gfLzyUqMOGbTWceVBvZq7YxPqtNUwc05ecrAw2bq3hhr/NpKYuyokjirn82MEsLq/kySkrOP2AXtz491mUV2znnMP6kmHGvLIt9O/emcrqWuaWVnDCiGKen7GawUVdOKhfPqWbq8ntlMHLc9aQETFOGdWTrxw1iHWV27n+rzMZ1acr15w8nJuem836yu10zspga00dhw4o4K//fTSdMiM88v5SfvyPOeRlZzKkuAtL1m1lS3UtRw0p5KGvjefWVz7lrrcXM7JXV44Y3J0u2cHzsXe9tZgjSrpTF3WmL9/EsJ55nDiimBG98uiVn0Nup0wGFHamU0aEP7+5iEc+WBa0UxV05pfnHkSPvE58/aGpbKmu5StHDeSed5YA8Pmx/fj5OQfx71ml/OKfc6kI764DyMvO5PjhRUxesoG+BTkcO7SIf35cSkX1Doq7ZoPDoB65HDawO89MX0nUnXMP7cdLs8tYsLaSfgWdWbWpiiFFXVi8bit52Zn81/FDOGRANzZtq2Haso28MmcNlx5TwrdOGrZHf2dKBCId1CdlFdzxxiKOHtKDs8f04dZXPqVsczVmsHBtJace0Ivvf2Zks202KzZsY/G6rfTv3pmhxXt3J9fKjdv45T/n8dan5ZjByN5dWbWxiuysCMOK83h34Tq65mTxz28ft0t13qLySh55fxkvzFzN+q1B6eKoIYXc89VxdM3JorYuihNUBy7fsI2ivOyGCzrAlKUb+OuUFZRVVNO/ey5nH9KHY4b2aPh5X5u7hl+/9AmbttVQUVVLTV2UU0f34i9fGUtmxHhxVhkP/mcJH6/czPba6C4/U0bEcHcuOLw/px3Qm5+9MIeVG4M2nD7dcujZNZuZKzczdmABxw4r4k+TFja8d3xJIReNH0Dv/BwwePSDZXy4ZANHDy3ik9IKFqyt5LhhRQwt7tJQqpqxYhOrNlVx6IACIgbTl29iVO+uXHvKcE4e3ZP/fmQaH6/czPWnj+DlOWt4+9PyhuN1zspgwshiLho/kBNHtF1F15yUJQIzOwP4I5AB3Ovuv260PRt4GDgcWA9c6O5LW/tMJQKR1KmpjWJGQxVJvfIt24m6N6nOqFe9o46/f7SKpeu28p3TRuxx9UZrdtRFWbWxioGFuU3uyKqti1K6uZq1W4IbDhatraR0czVfGDegofRXvaOOdxasY15pBV8Y15/uuZ14ZvpKzjyoD91zs3h5zhpWbNhGcVjl1NJdX+5ORXVtk7aNaNRZu2U7vfKDarI1FcF8fVKLRoM76uqrudZXbmdR+Va652YxqEeXuLqJaU1KEoGZZQCfAqcBK4EpwMXuPjdmn28Ch7j7VWZ2EXCeu1/Y2ucqEYiI7L7WEkEim9vHAwvdfbG71wBPAuc02ucc4KFw/mngFNOjoiIiSZXIRNAPWBGzvDJc1+w+7l4LbAZ6NP4gM7vSzKaa2dTy8vLGm0VEZC90iBtw3f1udx/n7uOKi/esoURERJqXyESwChgQs9w/XNfsPmaWCXQjaDQWEZEkSWQimAIMN7PBZtYJuAh4vtE+zwOXhvMXAJO8o93PKiLSwSVshDJ3rzWzq4GXCW4fvd/d55jZz4Gp7v48cB/wiJktBDYQJAsREUmihA5V6e4vAi82WveTmPlq4AuJjEFERFrXIRqLRUQkcTpcFxNmVg4s24O3FgHr9nE4+4Li2n3tNTbFtXvaa1zQfmPbm7gGuXuzt112uESwp8xsaktP1aWS4tp97TU2xbV72mtc0H5jS1RcqhoSEUlzSgQiImkunRLB3akOoAWKa/e119gU1+5pr3FB+40tIXGlTRuBiIg0L51KBCIi0gwlAhGRNLffJwIzO8PM5pvZQjP7QYpjGWBmb5jZXDObY2bXhutvNrNVZjYjfJ2VgtiWmtms8PhTw3WFZvaqmS0Ip93b+px9HNPImHMyw8wqzOy6VJ0vM7vfzNaa2eyYdc2eIwvcFv7dfWxmY5Mc1+/M7JPw2H83s4JwfYmZVcWcuzuTHFeLvzszuzE8X/PN7DNJjuupmJiWmtmMcH0yz1dL14fE/425+377IujjaBEwBOgEzAQOSGE8fYCx4XxXghHcDgBuBm5I8blaChQ1Wvdb4Afh/A+A36T4d1kGDErV+QJOAMYCs9s6R8BZwL8BA44CJic5rtOBzHD+NzFxlcTul4Lz1ezvLvw/mAlkA4PD/9uMZMXVaPvvgZ+k4Hy1dH1I+N/Y/l4iiGeUtKRx91J3nx7ObwHm0XSwnvYkdgS5h4BzUxcKpwCL3H1PnirfJ9z9bYLOEWO1dI7OAR72wAdAgZn1SVZc7v6KB4M9AXxA0A18UrVwvlpyDvCku2939yXAQoL/36TGZWYGfBF4IhHHbk0r14eE/43t74kgnlHSUsLMSoDDgMnhqqvD4t39ya6CCTnwiplNM7Mrw3W93L00nC8DeqUgrnoXses/Z6rPV72WzlF7+tv7GsE3x3qDzewjM3vLzI5PQTzN/e7ay/k6Hljj7gti1iX9fDW6PiT8b2x/TwTtkpnlAc8A17l7BfAXYChwKFBKUDRNtuPcfSxwJvAtMzshdqMHZdGU3GtswXgWE4G/havaw/lqIpXnqCVmdhNQCzwWrioFBrr7YcB3gcfNLD+JIbXL312Mi9n1C0fSz1cz14cGifob298TQTyjpCWVmWUR/JIfc/dnAdx9jbvXuXsUuIcEFYlb4+6rwula4O9hDGvqi5rhdG2y4wqdCUx39zVhjCk/XzFaOkcp/9szs8uAs4EvhxcQwqqX9eH8NIK6+BHJiqmV3117OF+ZwOeBp+rXJft8NXd9IAl/Y/t7IohnlLSkCesf7wPmufutMetj6/XOA2Y3fm+C4+piZl3r5wkaGmez6whylwL/SGZcMXb5lpbq89VIS+foeeCr4Z0dRwGbY4r3CWdmZwDfBya6+7aY9cVmlhHODwGGA4uTGFdLv7vngYvMLNvMBodxfZisuEKnAp+4+8r6Fck8Xy1dH0jG31gyWsNT+SJoWf+UIJPflOJYjiMo1n0MzAhfZwGPALPC9c8DfZIc1xCCOzZmAnPqzxPQA3gdWAC8BhSm4Jx1IRjHulvMupScL4JkVArsIKiPvaKlc0RwJ8cd4d/dLGBckuNaSFB/XP93dme47/nh73gGMB34XJLjavF3B9wUnq/5wJnJjCtc/yBwVaN9k3m+Wro+JPxvTF1MiIikuf29akhERNqgRCAikuaUCERE0pwSgYhImlMiEBFJc0oEkvbCHia90WtTAo5zc/jZF+zrzxbZG5mpDkCkHfmIoKdHgJpUBiKSTCoRiOxUTvDAzmvA62Z2WfgN/tGwL/p1ZnZD/c5m9l9hH/FbzexDMzsuXN/JzP7XzJaFfdm/3eg4J1kwVkC5mX0hfM+xYUds1eH6pPd+KelLiUBkp9MJkkE5u3ancRJBZ2llwO/MbIyZnUwwkHg5QWdkA4HnzawHQZ/xPyB4IvVqgidSY50Sfl434Nfhuu8TPOH9LeDnwLp9/cOJtERVQyI7TQZ+FM5vBA4O5+9397vMrBa4FziR4MIP8FN3f9XMBgI/JBgg5HMEXQVc6EG/8o3d6u53m9k3CPqugaD7gLMJuhSYTtB1gEhSqEQgstM6d38tfE2LWW+NprG80TQe9YOi1LLzf/D/EfR8uYCgT56pFg4vKZJoKhGI7NTXzC6KWc4Kp5eb2XLgmnD5LYKOwK4HfmZmQwku3hsJRgN7ARgHPGVmTwOHuPt1bRz7RmA7QXXSCoLhGvOBTXv5M4m0SYlAZKfD2HVQku+E09eBbwK9ge+5+0yAcCS37wO3AnOB77j7ejP7NdAZ+DJwMvF1pxwFvh0eYz3BmLnL9/onEomDeh8VaUE4sMsDBBf/W1IcjkjCqI1ARCTNqUQgIpLmVCIQEUlzSgQiImlOiUBEJM0pEYiIpDklAhGRNPf/AZOfklGm0kbBAAAAAElFTkSuQmCC",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train MSE: 0.0515\n",
      "Test MSE: 1.1463\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "from tqdm import tqdm\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "import torch\n",
    "from torch import nn\n",
    "# from torch_geometric_temporal.nn.attention import AAGCN\n",
    "from torch_geometric_temporal.dataset import ChickenpoxDatasetLoader\n",
    "from torch_geometric_temporal.signal import temporal_signal_split\n",
    "\n",
    "from torch_geometric.data import DataLoader\n",
    "\n",
    "loader = ChickenpoxDatasetLoader()\n",
    "\n",
    "lags = 10\n",
    "stride = 1\n",
    "epochs = 200\n",
    "batch_size = 32\n",
    "\n",
    "dataset = loader.get_dataset(lags)\n",
    "\n",
    "sample = next(iter(dataset))\n",
    "num_nodes = sample.x.size(0)\n",
    "edge_index = sample.edge_index\n",
    "\n",
    "train_dataset, test_dataset = temporal_signal_split(dataset, train_ratio=0.4)\n",
    "\n",
    "train_loader = DataLoader(list(train_dataset), batch_size=batch_size, shuffle=True)\n",
    "test_loader = DataLoader(list(test_dataset), batch_size=batch_size, shuffle=False)\n",
    "\n",
    "### MODEL DEFINITION\n",
    "class AttentionGCN(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(AttentionGCN, self).__init__()\n",
    "\n",
    "        layer_out_channels = [32, 32, 64, 64]\n",
    "\n",
    "        self.aagcn = nn.ModuleList()\n",
    "        for i, out_dim in enumerate(layer_out_channels):\n",
    "            in_dim = 1 if i == 0 else layer_out_channels[i-1]\n",
    "            self.aagcn.append(\n",
    "                AAGCN(\n",
    "                in_channels=in_dim,\n",
    "                out_channels=out_dim,\n",
    "                edge_index=edge_index,\n",
    "                num_nodes=num_nodes,\n",
    "                stride=1,\n",
    "                residual=True,\n",
    "                adaptive=True,\n",
    "                attention=True\n",
    "                )\n",
    "            )\n",
    "\n",
    "        # self.out = nn.Linear(out_dim*lags, 1)\n",
    "        self.out = nn.Linear(out_dim, 1)\n",
    "    \n",
    "        self.bn = nn.BatchNorm1d(lags)\n",
    "        self.dropout = nn.Dropout(0.2)\n",
    "\n",
    "\n",
    "    def forward(self, window):\n",
    "        x = window.x.view(-1, lags, num_nodes)\n",
    "        x = self.bn(x).unsqueeze(1)\n",
    "\n",
    "        for layer in self.aagcn:\n",
    "            x = layer(x)\n",
    "\n",
    "        x = x.mean(2).permute(0,2,1) \n",
    "        x = self.dropout(x)\n",
    "        out = self.out(x).flatten()\n",
    "        return out\n",
    "    \n",
    "model = AttentionGCN()\n",
    "optimizer = torch.optim.Adam(model.parameters(), lr=0.01)\n",
    "\n",
    "### TRAIN\n",
    "model.train()\n",
    "\n",
    "loss_history = []\n",
    "for _ in tqdm(range(epochs)):\n",
    "    total_loss = 0\n",
    "    for i, window in enumerate(train_loader):\n",
    "        optimizer.zero_grad()\n",
    "        y_pred = model(window)\n",
    "        \n",
    "        assert y_pred.shape == window.y.shape\n",
    "        loss = torch.mean((y_pred - window.y)**2)\n",
    "        total_loss += loss.item()\n",
    "        \n",
    "        loss.backward()\n",
    "        optimizer.step()\n",
    "    total_loss /= i+1\n",
    "    loss_history.append(total_loss)\n",
    "\n",
    "### TEST \n",
    "model.eval()\n",
    "loss = 0\n",
    "with torch.no_grad():\n",
    "    for i, window in enumerate(test_loader):\n",
    "        y_pred = model(window)\n",
    "        \n",
    "        assert y_pred.shape == window.y.shape\n",
    "        loss += torch.mean((y_pred - window.y)**2)\n",
    "    loss /= i+1\n",
    "\n",
    "### RESULTS PLOT\n",
    "fig, ax = plt.subplots()\n",
    "\n",
    "x_ticks = np.arange(1, epochs+1)\n",
    "ax.plot(x_ticks, loss_history)\n",
    "\n",
    "# figure labels\n",
    "ax.set_title('Loss over time', fontweight='bold')\n",
    "ax.set_xlabel('Epochs', fontweight='bold')\n",
    "ax.set_ylabel('Mean Squared Error', fontweight='bold')\n",
    "plt.show()\n",
    "\n",
    "print(\"Train MSE: {:.4f}\".format(total_loss))\n",
    "print(\"Test MSE: {:.4f}\".format(loss))\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "45779ed0",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.0"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
