{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": true,
    "deletable": true,
    "editable": true
   },
   "outputs": [],
   "source": [
    "import os,sys,inspect\n",
    "import os\n",
    "import joblib\n",
    "import tensorflow as tf\n",
    "import numpy as np\n",
    "import h5py\n",
    "import scipy.sparse.linalg as la\n",
    "import scipy.sparse as sp\n",
    "import scipy\n",
    "import time\n",
    "\n",
    "import matplotlib.pyplot as plt\n",
    "from matplotlib.backends.backend_pdf import PdfPages\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": true,
    "deletable": true,
    "editable": true
   },
   "outputs": [],
   "source": [
    "path_dataset = '../../datasets/synthetic_netflix/synthetic_netflix.mat'"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true,
    "deletable": true,
    "editable": true
   },
   "outputs": [],
   "source": [
    "# auxiliary functions:\n",
    "\n",
    "# import matlab files in python\n",
    "def load_matlab_file(path_file, name_field):\n",
    "    \"\"\"\n",
    "    load '.mat' files\n",
    "    inputs:\n",
    "        path_file, string containing the file path\n",
    "        name_field, string containig the field name (default='shape')\n",
    "    warning:\n",
    "        '.mat' files should be saved in the '-v7.3' format\n",
    "    \"\"\"\n",
    "    db = h5py.File(path_file, 'r')\n",
    "    ds = db[name_field]\n",
    "    try:\n",
    "        if 'ir' in ds.keys():\n",
    "            data = np.asarray(ds['data'])\n",
    "            ir   = np.asarray(ds['ir'])\n",
    "            jc   = np.asarray(ds['jc'])\n",
    "            out  = sp.csc_matrix((data, ir, jc)).astype(np.float32)\n",
    "    except AttributeError:\n",
    "        # Transpose in case is a dense matrix because of the row- vs column- major ordering between python and matlab\n",
    "        out = np.asarray(ds).astype(np.float32).T\n",
    "\n",
    "    db.close()\n",
    "\n",
    "    return out"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "collapsed": false,
    "deletable": true,
    "editable": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'#definition of the ground truth and of the training set\\nMgt = M; # Ground truth\\nMtraining = np.multiply(Otraining, Mgt); #Training matrix\\nMtest = np.multiply(Otest, Mgt); #Test matrix (i.e. matrix containing ONLY the elements where we want to check our\\n                                 #reconstruction quality. It does not intersect with the Mtraining matrix)'"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#loading of the required matrices\n",
    "M = load_matlab_file(path_dataset, 'M')\n",
    "O = load_matlab_file(path_dataset, 'O')\n",
    "Otraining = load_matlab_file(path_dataset, 'Otraining')\n",
    "Otest = load_matlab_file(path_dataset, 'Otest')\n",
    "Wrow = load_matlab_file(path_dataset, 'Wrow') #sparse\n",
    "Wcol = load_matlab_file(path_dataset, 'Wcol') #sparse\n",
    "\n",
    "\"\"\"#definition of the ground truth and of the training set\n",
    "Mgt = M; # Ground truth\n",
    "Mtraining = np.multiply(Otraining, Mgt); #Training matrix\n",
    "Mtest = np.multiply(Otest, Mgt); #Test matrix (i.e. matrix containing ONLY the elements where we want to check our\n",
    "                                 #reconstruction quality. It does not intersect with the Mtraining matrix)\"\"\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "collapsed": false,
    "deletable": true,
    "editable": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Num data samples: 2250\n",
      "Num train samples: 2250\n",
      "Num train+data samples: 4500\n"
     ]
    }
   ],
   "source": [
    "np.random.seed(0)\n",
    "\n",
    "pos_tr_samples = np.where(Otraining)\n",
    "\n",
    "num_tr_samples = len(pos_tr_samples[0])\n",
    "list_idx = range(num_tr_samples)\n",
    "np.random.shuffle(list_idx)\n",
    "idx_data = list_idx[:num_tr_samples//2]\n",
    "idx_train = list_idx[num_tr_samples//2:]\n",
    "\n",
    "pos_data_samples = (pos_tr_samples[0][idx_data], pos_tr_samples[1][idx_data])\n",
    "pos_tr_samples = (pos_tr_samples[0][idx_train], pos_tr_samples[1][idx_train])\n",
    "\n",
    "Odata = np.zeros(M.shape)\n",
    "Otraining = np.zeros(M.shape)\n",
    "\n",
    "for k in range(len(pos_data_samples[0])):\n",
    "    Odata[pos_data_samples[0][k], pos_data_samples[1][k]] = 1\n",
    "    \n",
    "for k in range(len(pos_tr_samples[0])):\n",
    "    Otraining[pos_tr_samples[0][k], pos_tr_samples[1][k]] = 1\n",
    "    \n",
    "print 'Num data samples: %d' % (np.sum(Odata),)\n",
    "print 'Num train samples: %d' % (np.sum(Otraining),)\n",
    "print 'Num train+data samples: %d' % (np.sum(Odata+Otraining),)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "collapsed": false,
    "deletable": true,
    "editable": true
   },
   "outputs": [],
   "source": [
    "#computation of the normalized laplacians\n",
    "Lrow = sp.csgraph.laplacian(Wrow, normed=True) #ATTENTION! Here we do not consider self-rings in the laplacian computation\n",
    "Lcol = sp.csgraph.laplacian(Wcol, normed=True) #ATTENTION! Here we do not consider self-rings in the laplacian computation"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "collapsed": false,
    "deletable": true,
    "editable": true
   },
   "outputs": [],
   "source": [
    "class Train_test_matrix_completion:\n",
    "    \n",
    "    \"\"\"\n",
    "    The neural network model.\n",
    "    \"\"\"\n",
    "    \n",
    "    def frobenius_norm(self, tensor):\n",
    "        square_tensor = tf.square(tensor)\n",
    "        tensor_sum = tf.reduce_sum(square_tensor)\n",
    "        frobenius_norm = tf.sqrt(tensor_sum)\n",
    "        return frobenius_norm\n",
    "    \n",
    "    def chebyshev_polynomials(self, x, n): #compute the chebyshev polynomial of order n of a potentially N-dimensional tensor \n",
    "        #assuming x is N-dimensional\n",
    "        if (n==0):\n",
    "            return tf.cast(tf.diag(tf.ones([tf.shape(x)[0],])), 'float32')\n",
    "        elif (n==1):\n",
    "            return tf.cast(x, 'float32')\n",
    "        else:\n",
    "            return 2*tf.matmul(x, self.chebyshev_polynomials(x, n - 1))  - self.chebyshev_polynomials(x, n - 2)\n",
    "        \n",
    "    def chebyshev_polynomials_mul(self, L, x, n): #compute the chebyshev polynomial of order n of a potentially N-dimensional tensor \n",
    "        #assuming x is N-dimensional\n",
    "        if (n==0):\n",
    "            return x\n",
    "        elif (n==1):\n",
    "            return tf.sparse_tensor_dense_matmul(L, x)\n",
    "        else:\n",
    "            return 2*tf.sparse_tensor_dense_matmul(L, self.chebyshev_polynomials_mul(L, x, n - 1))  - self.chebyshev_polynomials_mul(L, x, n - 2)\n",
    "    \n",
    "    def convert_coo_to_sparse_tensor(self, L):\n",
    "        indices = np.column_stack((L.row, L.col))\n",
    "        L = tf.SparseTensor(indices, L.data, L.shape)\n",
    "        L = tf.sparse_reorder(L)\n",
    "        return L\n",
    "    \n",
    "    def bid_conv(self, W, b):\n",
    "        X = tf.reshape(self.X, [tf.shape(self.M)[0], tf.shape(self.M)[1]])\n",
    "        \n",
    "        feat = []\n",
    "        #collect features\n",
    "        for k_r in range(self.ord_row):\n",
    "            for k_c in range(self.ord_col):\n",
    "                row_lap = self.list_row_cheb_pol[k_r] \n",
    "                col_lap = self.list_col_cheb_pol[k_c]\n",
    "                                                     \n",
    "                #dense implementation\n",
    "                c_feat = tf.matmul(row_lap, X, a_is_sparse=False)\n",
    "                c_feat = tf.matmul(c_feat, col_lap, b_is_sparse=False)\n",
    "                \n",
    "                #sparse implementation\n",
    "                #c_feat = tf.sparse_tensor_dense_matmul(row_lap, X)\n",
    "                #c_feat = tf.sparse_tensor_dense_matmul(tf.sparse_transpose(col_lap), tf.transpose(c_feat))\n",
    "                #c_feat = tf.transpose(c_feat)\n",
    "                feat.append(c_feat)\n",
    "        all_feat = tf.stack(feat, 2)\n",
    "        all_feat = tf.reshape(all_feat, [-1, self.ord_row*self.ord_col])\n",
    "        conv_feat = tf.matmul(all_feat, W) + b\n",
    "        conv_feat = tf.nn.relu(conv_feat)\n",
    "        conv_feat = tf.reshape(conv_feat, [tf.shape(self.M)[0], tf.shape(self.M)[1], self.n_conv_feat])\n",
    "        return conv_feat\n",
    "    \n",
    "    def compute_cheb_polynomials_sparse(self, L, ord_cheb, list_cheb):\n",
    "        list_dense = list()\n",
    "        for k in range(ord_cheb):\n",
    "            if (k==0):\n",
    "                mat = tf.diag(tf.ones([tf.shape(L)[0],]))\n",
    "                list_dense.append(mat)\n",
    "                idx = tf.where(tf.not_equal(mat, 0))\n",
    "                # Use tf.shape(a_t, out_type=tf.int64) instead of a_t.get_shape() if tensor shape is dynamic\n",
    "                sparse = tf.SparseTensor(idx, tf.gather_nd(mat, idx), tf.shape(mat, out_type=tf.int64))\n",
    "                list_cheb.append(sparse)\n",
    "            elif (k==1):\n",
    "                mat = L\n",
    "                list_dense.append(mat)\n",
    "                idx = tf.where(tf.not_equal(mat, 0))\n",
    "                # Use tf.shape(a_t, out_type=tf.int64) instead of a_t.get_shape() if tensor shape is dynamic\n",
    "                sparse = tf.SparseTensor(idx, tf.gather_nd(mat, idx), tf.shape(mat, out_type=tf.int64))\n",
    "                list_cheb.append(sparse)\n",
    "            else:\n",
    "                mat = 2*tf.matmul(L, list_dense[k-1])  - list_dense[k-2]\n",
    "                list_dense.append(mat)\n",
    "                idx = tf.where(tf.not_equal(mat, 0))\n",
    "                # Use tf.shape(a_t, out_type=tf.int64) instead of a_t.get_shape() if tensor shape is dynamic\n",
    "                sparse = tf.SparseTensor(idx, tf.gather_nd(mat, idx), tf.shape(mat, out_type=tf.int64))\n",
    "                list_cheb.append(sparse)\n",
    "                \n",
    "    def compute_cheb_polynomials(self, L, ord_cheb, list_cheb):\n",
    "        for k in range(ord_cheb):\n",
    "            if (k==0):\n",
    "                list_cheb.append(tf.cast(tf.diag(tf.ones([tf.shape(L)[0],])), 'float32'))\n",
    "            elif (k==1):\n",
    "                list_cheb.append(tf.cast(L, 'float32'))\n",
    "            else:\n",
    "                list_cheb.append(2*tf.matmul(L, list_cheb[k-1])  - list_cheb[k-2])\n",
    "        \n",
    "    \n",
    "    def __init__(self, M, Lr, Lc, Odata, Otraining, Otest, order_chebyshev_col = 5, order_chebyshev_row = 5,\n",
    "                 num_iterations = 10, gamma=1.0, learning_rate=1e-4, idx_gpu = '/gpu:2'):\n",
    "        \n",
    "        #order of the spectral filters\n",
    "        self.ord_col = order_chebyshev_col \n",
    "        self.ord_row = order_chebyshev_row\n",
    "        self.num_iterations = num_iterations\n",
    "        self.n_conv_feat = 32\n",
    "        \n",
    "        with tf.Graph().as_default() as g:\n",
    "                tf.logging.set_verbosity(tf.logging.ERROR)\n",
    "                self.graph = g\n",
    "                tf.set_random_seed(0)\n",
    "                with tf.device(idx_gpu):\n",
    "                    \n",
    "                        #loading of the laplacians\n",
    "                        self.Lr = tf.cast(tf.sparse_tensor_to_dense(self.convert_coo_to_sparse_tensor(Lr)), 'float32')\n",
    "                        self.Lc = tf.cast(tf.sparse_tensor_to_dense(self.convert_coo_to_sparse_tensor(Lc)), 'float32')\n",
    "                        \n",
    "                        self.norm_Lr = self.Lr - tf.diag(tf.ones([Lr.shape[0], ]))\n",
    "                        self.norm_Lc = self.Lc - tf.diag(tf.ones([Lc.shape[0], ]))\n",
    "                        #compute all chebyshev polynomials a priori\n",
    "                        self.list_row_cheb_pol = list()\n",
    "                        self.compute_cheb_polynomials(self.norm_Lr, self.ord_row, self.list_row_cheb_pol)\n",
    "                        self.list_col_cheb_pol = list()\n",
    "                        self.compute_cheb_polynomials(self.norm_Lc, self.ord_col, self.list_col_cheb_pol)\n",
    "                        \n",
    "                        #definition of constant matrices\n",
    "                        self.M = tf.constant(M, dtype=tf.float32)\n",
    "                        self.Odata = tf.constant(Odata, dtype=tf.float32)\n",
    "                        self.Otraining = tf.constant(Otraining, dtype=tf.float32) #training mask\n",
    "                        self.Otest = tf.constant(Otest, dtype=tf.float32) #test mask\n",
    "                         \n",
    "                        #definition of the NN variables\n",
    "                        self.W_conv = tf.get_variable(\"W_conv\", shape=[self.ord_col*self.ord_row, self.n_conv_feat], initializer=tf.contrib.layers.xavier_initializer())\n",
    "                        self.b_conv = tf.Variable(tf.zeros([self.n_conv_feat,]))\n",
    "                        \n",
    "                        #recurrent N parameters\n",
    "                        self.W_f = tf.get_variable(\"W_f\", shape=[self.n_conv_feat, self.n_conv_feat], initializer=tf.contrib.layers.xavier_initializer())\n",
    "                        self.W_i = tf.get_variable(\"W_i\", shape=[self.n_conv_feat, self.n_conv_feat], initializer=tf.contrib.layers.xavier_initializer())\n",
    "                        self.W_o = tf.get_variable(\"W_o\", shape=[self.n_conv_feat, self.n_conv_feat], initializer=tf.contrib.layers.xavier_initializer())\n",
    "                        self.W_c = tf.get_variable(\"W_c\", shape=[self.n_conv_feat, self.n_conv_feat], initializer=tf.contrib.layers.xavier_initializer())\n",
    "                        self.U_f = tf.get_variable(\"U_f\", shape=[self.n_conv_feat, self.n_conv_feat], initializer=tf.contrib.layers.xavier_initializer())\n",
    "                        self.U_i = tf.get_variable(\"U_i\", shape=[self.n_conv_feat, self.n_conv_feat], initializer=tf.contrib.layers.xavier_initializer())\n",
    "                        self.U_o = tf.get_variable(\"U_o\", shape=[self.n_conv_feat, self.n_conv_feat], initializer=tf.contrib.layers.xavier_initializer())\n",
    "                        self.U_c = tf.get_variable(\"U_c\", shape=[self.n_conv_feat, self.n_conv_feat], initializer=tf.contrib.layers.xavier_initializer())\n",
    "                        self.b_f = tf.Variable(tf.zeros([self.n_conv_feat,]))\n",
    "                        self.b_i = tf.Variable(tf.zeros([self.n_conv_feat,]))\n",
    "                        self.b_o = tf.Variable(tf.zeros([self.n_conv_feat,]))\n",
    "                        self.b_c = tf.Variable(tf.zeros([self.n_conv_feat,]))\n",
    "                        \n",
    "                        #output parameters\n",
    "                        self.W_out = tf.get_variable(\"W_out\", shape=[self.n_conv_feat,1], initializer=tf.contrib.layers.xavier_initializer()) \n",
    "                        self.b_out = tf.Variable(tf.zeros([1,1]))\n",
    "                        \n",
    "                        #########definition of the NN\n",
    "                        self.X = tf.multiply(self.M, self.Odata) #we may initialize it at random here\n",
    "                        self.list_X = list()\n",
    "                        self.list_X.append(tf.identity(self.X))\n",
    "                        self.X = tf.reshape(self.X, [-1,])\n",
    "                        \n",
    "                        \n",
    "                        \n",
    "                        #RNN\n",
    "                        self.h = tf.zeros([M.shape[0]*M.shape[1], self.n_conv_feat])\n",
    "                        self.c = tf.zeros([M.shape[0]*M.shape[1], self.n_conv_feat])\n",
    "                        \n",
    "                        for k in range(self.num_iterations):\n",
    "                            #bidimensional convolution\n",
    "                            self.x_conv = self.bid_conv(self.W_conv, self.b_conv) #N, N, n_conv_feat\n",
    "                            self.x_conv = tf.reshape(self.x_conv, [-1, self.n_conv_feat])\n",
    "                            \n",
    "                            self.f = tf.sigmoid(tf.matmul(self.x_conv, self.W_f) + tf.matmul(self.h, self.U_f) + self.b_f)\n",
    "                            self.i = tf.sigmoid(tf.matmul(self.x_conv, self.W_i) + tf.matmul(self.h, self.U_i) + self.b_i)\n",
    "                            self.o = tf.sigmoid(tf.matmul(self.x_conv, self.W_o) + tf.matmul(self.h, self.U_o) + self.b_o)\n",
    "                            \n",
    "                            self.update_c = tf.sigmoid(tf.matmul(self.x_conv, self.W_c) + tf.matmul(self.h, self.U_c) + self.b_c)\n",
    "                            self.c = tf.multiply(self.f, self.c) + tf.multiply(self.i, self.update_c)\n",
    "                            self.h = tf.multiply(self.o, tf.sigmoid(self.c))\n",
    "                            \n",
    "                            #compute update of matrix X\n",
    "                            self.delta_x = tf.tanh(tf.matmul(self.c, self.W_out) + self.b_out)\n",
    "                            self.X += tf.squeeze(self.delta_x)\n",
    "                            self.list_X.append(tf.identity(tf.reshape(self.X, [tf.shape(self.M)[0], tf.shape(self.M)[1]])))\n",
    "                            \n",
    "                            \n",
    "                        self.X = tf.reshape(self.X, [tf.shape(self.M)[0], tf.shape(self.M)[1]])\n",
    "                        #########loss definition\n",
    "                        \n",
    "                        #computation of the accuracy term\n",
    "                        self.norm_X = 1+4*(self.X-tf.reduce_min(self.X))/(tf.reduce_max(self.X-tf.reduce_min(self.X)))\n",
    "                        frob_tensor = tf.multiply(self.Otraining + self.Odata, self.norm_X - M)\n",
    "                        self.loss_frob = tf.square(self.frobenius_norm(frob_tensor))/np.sum(Otraining+Odata)\n",
    "                        \n",
    "                        #computation of the regularization terms\n",
    "                        trace_col_tensor = tf.matmul(tf.matmul(self.X, self.Lc), self.X, transpose_b=True)\n",
    "                        self.loss_trace_col = tf.trace(trace_col_tensor)\n",
    "                        trace_row_tensor = tf.matmul(tf.matmul(self.X, self.Lr, transpose_a=True), self.X)\n",
    "                        self.loss_trace_row = tf.trace(trace_row_tensor)\n",
    "                        \n",
    "                        #training loss definition\n",
    "                        self.loss = self.loss_frob + (gamma/2)*(self.loss_trace_col + self.loss_trace_row)\n",
    "                        \n",
    "                        #test loss definition\n",
    "                        self.predictions = tf.multiply(self.Otest, self.norm_X - self.M)\n",
    "                        self.predictions_error = self.frobenius_norm(self.predictions)\n",
    "\n",
    "                        #definition of the solver\n",
    "                        self.optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(self.loss)\n",
    "                        \n",
    "                        self.var_grad = tf.gradients(self.loss, tf.trainable_variables())\n",
    "                        self.norm_grad = self.frobenius_norm(tf.concat([tf.reshape(g, [-1]) for g in self.var_grad], 0))\n",
    "\n",
    "                        # Create a session for running Ops on the Graph.\n",
    "                        config = tf.ConfigProto(allow_soft_placement = True)\n",
    "                        config.gpu_options.allow_growth = True\n",
    "                        self.session = tf.Session(config=config)\n",
    "\n",
    "                        # Run the Op to initialize the variables.\n",
    "                        init = tf.initialize_all_variables()\n",
    "                        self.session.run(init)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "collapsed": false,
    "deletable": true,
    "editable": true
   },
   "outputs": [],
   "source": [
    "ord_col = 5\n",
    "ord_row = 5\n",
    "\n",
    "learning_obj = Train_test_matrix_completion(M, Lrow, Lcol, Odata, Otraining, Otest, order_chebyshev_col = ord_col, \n",
    "                                            order_chebyshev_row = ord_row, gamma=1e-8, learning_rate=1e-3)\n",
    "\n",
    "num_iter_test = 10\n",
    "num_total_iter_training = 25000\n",
    "\n",
    "num_iter = 0\n",
    "\n",
    "list_training_loss = list()\n",
    "list_training_norm_grad = list()\n",
    "list_test_pred_error = list()\n",
    "list_predictions = list()\n",
    "list_X = list()\n",
    "\n",
    "list_training_times = list()\n",
    "list_test_times = list()\n",
    "list_grad_X = list()\n",
    "\n",
    "list_X_evolutions = list()\n",
    "\n",
    "path_log = '../../../../log/tmp.txt'#'../../log/supervisd_approach_norm_acc.txt'\n",
    "\n",
    "if (os.path.isfile(path_log) == True):\n",
    "    os.remove(path_log)\n",
    "    \n",
    "num_iter = 0"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": false,
    "deletable": true,
    "editable": true,
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[TRN] iter = 2340, cost = 1.29e-03, |grad| = 2.27e-01 (8.01e-01s)\n",
      "[TST] iter = 2340, cost = 2.67e+00 (8.40e-02s)\n",
      "[TRN] iter = 2350, cost = 1.29e-03, |grad| = 2.42e-01 (7.96e-01s)\n",
      "[TST] iter = 2350, cost = 2.63e+00 (8.01e-02s)\n",
      "[TRN] iter = 2360, cost = 1.22e-03, |grad| = 1.09e-01 (7.99e-01s)\n",
      "[TST] iter = 2360, cost = 2.49e+00 (7.80e-02s)\n",
      "[TRN] iter = 2370, cost = 2.11e-03, |grad| = 7.48e-01 (7.97e-01s)\n",
      "[TST] iter = 2370, cost = 2.70e+00 (9.93e-02s)\n",
      "[TRN] iter = 2380, cost = 1.32e-03, |grad| = 6.65e-02 (7.98e-01s)\n",
      "[TST] iter = 2380, cost = 2.88e+00 (8.33e-02s)\n",
      "[TRN] iter = 2390, cost = 1.24e-03, |grad| = 1.12e-01 (7.99e-01s)\n",
      "[TST] iter = 2390, cost = 2.53e+00 (8.60e-02s)\n",
      "[TRN] iter = 2400, cost = 1.22e-03, |grad| = 3.86e-02 (7.92e-01s)\n",
      "[TST] iter = 2400, cost = 2.48e+00 (8.61e-02s)\n",
      "[TRN] iter = 2410, cost = 1.23e-03, |grad| = 1.17e-01 (8.14e-01s)\n",
      "[TST] iter = 2410, cost = 2.48e+00 (8.31e-02s)\n",
      "[TRN] iter = 2420, cost = 1.20e-03, |grad| = 3.79e-02 (7.92e-01s)\n",
      "[TST] iter = 2420, cost = 2.48e+00 (8.61e-02s)\n",
      "[TRN] iter = 2430, cost = 1.19e-03, |grad| = 1.58e-02 (7.94e-01s)\n",
      "[TST] iter = 2430, cost = 2.48e+00 (8.28e-02s)\n",
      "[TRN] iter = 2440, cost = 1.18e-03, |grad| = 1.43e-02 (7.99e-01s)\n",
      "[TST] iter = 2440, cost = 2.47e+00 (8.08e-02s)\n",
      "[TRN] iter = 2450, cost = 1.17e-03, |grad| = 6.92e-03 (7.95e-01s)\n",
      "[TST] iter = 2450, cost = 2.47e+00 (8.55e-02s)\n",
      "[TRN] iter = 2460, cost = 1.16e-03, |grad| = 4.77e-02 (7.84e-01s)\n",
      "[TST] iter = 2460, cost = 2.44e+00 (7.38e-02s)\n",
      "[TRN] iter = 2470, cost = 1.22e-03, |grad| = 2.25e-01 (7.97e-01s)\n",
      "[TST] iter = 2470, cost = 2.46e+00 (8.50e-02s)\n",
      "[TRN] iter = 2480, cost = 4.95e-03, |grad| = 9.58e-01 (7.98e-01s)\n",
      "[TST] iter = 2480, cost = 4.27e+00 (8.61e-02s)\n",
      "[TRN] iter = 2490, cost = 2.89e-03, |grad| = 5.71e-01 (7.96e-01s)\n",
      "[TST] iter = 2490, cost = 2.97e+00 (8.39e-02s)\n",
      "[TRN] iter = 2500, cost = 1.66e-03, |grad| = 4.13e-01 (7.98e-01s)\n",
      "[TST] iter = 2500, cost = 3.60e+00 (8.49e-02s)\n",
      "[TRN] iter = 2510, cost = 1.47e-03, |grad| = 3.36e-01 (7.97e-01s)\n",
      "[TST] iter = 2510, cost = 2.56e+00 (7.85e-02s)\n",
      "[TRN] iter = 2520, cost = 1.25e-03, |grad| = 1.72e-01 (7.96e-01s)\n",
      "[TST] iter = 2520, cost = 2.46e+00 (8.24e-02s)\n",
      "[TRN] iter = 2530, cost = 1.20e-03, |grad| = 1.32e-01 (7.98e-01s)\n",
      "[TST] iter = 2530, cost = 2.40e+00 (7.81e-02s)\n",
      "[TRN] iter = 2540, cost = 1.16e-03, |grad| = 7.78e-02 (7.83e-01s)\n",
      "[TST] iter = 2540, cost = 2.38e+00 (8.27e-02s)\n",
      "[TRN] iter = 2550, cost = 1.14e-03, |grad| = 7.61e-02 (7.84e-01s)\n",
      "[TST] iter = 2550, cost = 2.43e+00 (7.80e-02s)\n",
      "[TRN] iter = 2560, cost = 1.15e-03, |grad| = 1.55e-01 (7.97e-01s)\n",
      "[TST] iter = 2560, cost = 2.42e+00 (7.50e-02s)\n",
      "[TRN] iter = 2570, cost = 1.14e-03, |grad| = 1.40e-01 (8.00e-01s)\n",
      "[TST] iter = 2570, cost = 2.33e+00 (8.05e-02s)\n",
      "[TRN] iter = 2580, cost = 1.11e-03, |grad| = 7.86e-02 (7.98e-01s)\n",
      "[TST] iter = 2580, cost = 2.39e+00 (8.51e-02s)\n",
      "[TRN] iter = 2590, cost = 1.28e-03, |grad| = 3.74e-01 (7.99e-01s)\n",
      "[TST] iter = 2590, cost = 2.68e+00 (8.34e-02s)\n",
      "[TRN] iter = 2600, cost = 1.12e-03, |grad| = 1.64e-01 (7.89e-01s)\n",
      "[TST] iter = 2600, cost = 2.32e+00 (7.75e-02s)\n",
      "[TRN] iter = 2610, cost = 1.26e-03, |grad| = 3.50e-01 (7.84e-01s)\n",
      "[TST] iter = 2610, cost = 2.31e+00 (7.93e-02s)\n",
      "[TRN] iter = 2620, cost = 1.16e-03, |grad| = 2.06e-01 (8.00e-01s)\n",
      "[TST] iter = 2620, cost = 2.30e+00 (8.25e-02s)\n",
      "[TRN] iter = 2630, cost = 1.91e-03, |grad| = 5.27e-01 (7.94e-01s)\n",
      "[TST] iter = 2630, cost = 3.86e+00 (8.24e-02s)\n",
      "[TRN] iter = 2640, cost = 2.64e-03, |grad| = 7.30e-01 (7.96e-01s)\n",
      "[TST] iter = 2640, cost = 4.18e+00 (8.30e-02s)\n",
      "[TRN] iter = 2650, cost = 1.16e-03, |grad| = 9.75e-02 (7.98e-01s)\n",
      "[TST] iter = 2650, cost = 2.39e+00 (8.64e-02s)\n",
      "[TRN] iter = 2660, cost = 1.16e-03, |grad| = 1.53e-01 (7.87e-01s)\n",
      "[TST] iter = 2660, cost = 2.35e+00 (7.09e-02s)\n",
      "[TRN] iter = 2670, cost = 1.10e-03, |grad| = 9.49e-02 (7.96e-01s)\n",
      "[TST] iter = 2670, cost = 2.37e+00 (9.20e-02s)\n",
      "[TRN] iter = 2680, cost = 1.07e-03, |grad| = 7.20e-02 (7.96e-01s)\n",
      "[TST] iter = 2680, cost = 2.31e+00 (8.52e-02s)\n",
      "[TRN] iter = 2690, cost = 1.05e-03, |grad| = 2.41e-02 (7.98e-01s)\n",
      "[TST] iter = 2690, cost = 2.28e+00 (8.39e-02s)\n",
      "[TRN] iter = 2700, cost = 1.05e-03, |grad| = 2.68e-02 (7.96e-01s)\n",
      "[TST] iter = 2700, cost = 2.28e+00 (9.19e-02s)\n",
      "[TRN] iter = 2710, cost = 1.04e-03, |grad| = 2.27e-02 (7.84e-01s)\n",
      "[TST] iter = 2710, cost = 2.28e+00 (8.48e-02s)\n",
      "[TRN] iter = 2720, cost = 1.03e-03, |grad| = 3.55e-03 (7.97e-01s)\n",
      "[TST] iter = 2720, cost = 2.27e+00 (8.50e-02s)\n",
      "[TRN] iter = 2730, cost = 1.02e-03, |grad| = 4.61e-03 (7.91e-01s)\n",
      "[TST] iter = 2730, cost = 2.26e+00 (8.72e-02s)\n",
      "[TRN] iter = 2740, cost = 1.02e-03, |grad| = 3.54e-03 (7.97e-01s)\n",
      "[TST] iter = 2740, cost = 2.25e+00 (7.96e-02s)\n",
      "[TRN] iter = 2750, cost = 1.01e-03, |grad| = 7.51e-03 (7.87e-01s)\n",
      "[TST] iter = 2750, cost = 2.24e+00 (7.95e-02s)\n",
      "[TRN] iter = 2760, cost = 1.01e-03, |grad| = 2.94e-02 (7.84e-01s)\n",
      "[TST] iter = 2760, cost = 2.25e+00 (8.36e-02s)\n",
      "[TRN] iter = 2770, cost = 1.00e-03, |grad| = 2.23e-02 (7.94e-01s)\n",
      "[TST] iter = 2770, cost = 2.24e+00 (7.77e-02s)\n",
      "[TRN] iter = 2780, cost = 9.95e-04, |grad| = 1.39e-02 (7.92e-01s)\n",
      "[TST] iter = 2780, cost = 2.23e+00 (8.19e-02s)\n",
      "[TRN] iter = 2790, cost = 1.01e-03, |grad| = 1.29e-01 (7.98e-01s)\n",
      "[TST] iter = 2790, cost = 2.31e+00 (8.42e-02s)\n",
      "[TRN] iter = 2800, cost = 1.28e-03, |grad| = 4.48e-01 (7.99e-01s)\n",
      "[TST] iter = 2800, cost = 2.57e+00 (8.66e-02s)\n",
      "[TRN] iter = 2810, cost = 9.83e-04, |grad| = 3.65e-02 (7.99e-01s)\n",
      "[TST] iter = 2810, cost = 2.22e+00 (8.71e-02s)\n",
      "[TRN] iter = 2820, cost = 1.01e-03, |grad| = 1.64e-01 (7.92e-01s)\n",
      "[TST] iter = 2820, cost = 2.22e+00 (7.90e-02s)\n",
      "[TRN] iter = 2830, cost = 9.82e-04, |grad| = 6.84e-02 (7.84e-01s)\n",
      "[TST] iter = 2830, cost = 2.29e+00 (8.47e-02s)\n",
      "[TRN] iter = 2840, cost = 9.75e-04, |grad| = 2.32e-02 (7.98e-01s)\n",
      "[TST] iter = 2840, cost = 2.19e+00 (7.24e-02s)\n",
      "[TRN] iter = 2850, cost = 1.60e-03, |grad| = 6.51e-01 (7.97e-01s)\n",
      "[TST] iter = 2850, cost = 2.66e+00 (8.37e-02s)\n",
      "[TRN] iter = 2860, cost = 2.33e-03, |grad| = 9.27e-01 (7.84e-01s)\n",
      "[TST] iter = 2860, cost = 2.84e+00 (8.63e-02s)\n",
      "[TRN] iter = 2870, cost = 1.28e-03, |grad| = 4.57e-01 (7.97e-01s)\n",
      "[TST] iter = 2870, cost = 2.53e+00 (7.59e-02s)\n",
      "[TRN] iter = 2880, cost = 1.04e-03, |grad| = 1.89e-01 (8.00e-01s)\n",
      "[TST] iter = 2880, cost = 2.30e+00 (8.37e-02s)\n",
      "[TRN] iter = 2890, cost = 9.74e-04, |grad| = 1.23e-01 (8.03e-01s)\n",
      "[TST] iter = 2890, cost = 2.29e+00 (8.57e-02s)\n",
      "[TRN] iter = 2900, cost = 9.55e-04, |grad| = 7.66e-02 (8.04e-01s)\n",
      "[TST] iter = 2900, cost = 2.17e+00 (8.59e-02s)\n",
      "[TRN] iter = 2910, cost = 9.39e-04, |grad| = 1.85e-02 (7.96e-01s)\n",
      "[TST] iter = 2910, cost = 2.15e+00 (8.78e-02s)\n",
      "[TRN] iter = 2920, cost = 9.32e-04, |grad| = 1.28e-02 (7.83e-01s)\n",
      "[TST] iter = 2920, cost = 2.16e+00 (7.86e-02s)\n",
      "[TRN] iter = 2930, cost = 9.34e-04, |grad| = 7.60e-02 (7.98e-01s)\n",
      "[TST] iter = 2930, cost = 2.19e+00 (8.32e-02s)\n",
      "[TRN] iter = 2940, cost = 9.70e-04, |grad| = 1.83e-01 (7.97e-01s)\n",
      "[TST] iter = 2940, cost = 2.27e+00 (8.48e-02s)\n",
      "[TRN] iter = 2950, cost = 1.09e-03, |grad| = 3.72e-01 (7.83e-01s)\n",
      "[TST] iter = 2950, cost = 2.52e+00 (8.15e-02s)\n",
      "[TRN] iter = 2960, cost = 1.14e-03, |grad| = 3.82e-01 (7.95e-01s)\n",
      "[TST] iter = 2960, cost = 2.44e+00 (8.54e-02s)\n",
      "[TRN] iter = 2970, cost = 1.25e-03, |grad| = 4.24e-01 (7.96e-01s)\n",
      "[TST] iter = 2970, cost = 2.20e+00 (8.17e-02s)\n",
      "[TRN] iter = 2980, cost = 1.05e-03, |grad| = 2.84e-01 (8.15e-01s)\n",
      "[TST] iter = 2980, cost = 2.22e+00 (7.96e-02s)\n",
      "[TRN] iter = 2990, cost = 9.92e-04, |grad| = 2.42e-01 (7.97e-01s)\n",
      "[TST] iter = 2990, cost = 2.32e+00 (8.27e-02s)\n",
      "[TRN] iter = 3000, cost = 9.04e-04, |grad| = 5.55e-02 (7.93e-01s)\n",
      "[TST] iter = 3000, cost = 2.12e+00 (8.23e-02s)\n",
      "[TRN] iter = 3010, cost = 8.98e-04, |grad| = 5.92e-02 (7.92e-01s)\n",
      "[TST] iter = 3010, cost = 2.10e+00 (7.95e-02s)\n",
      "[TRN] iter = 3020, cost = 8.93e-04, |grad| = 6.32e-02 (7.92e-01s)\n",
      "[TST] iter = 3020, cost = 2.13e+00 (8.71e-02s)\n",
      "[TRN] iter = 3030, cost = 9.38e-04, |grad| = 1.98e-01 (7.88e-01s)\n",
      "[TST] iter = 3030, cost = 2.23e+00 (8.15e-02s)\n",
      "[TRN] iter = 3040, cost = 9.45e-04, |grad| = 2.16e-01 (7.98e-01s)\n",
      "[TST] iter = 3040, cost = 2.23e+00 (8.61e-02s)\n"
     ]
    }
   ],
   "source": [
    "with open(path_log,'a+') as f:\n",
    "    for k in range(num_iter, num_total_iter_training):\n",
    "\n",
    "        tic = time.time()\n",
    "        _, current_training_loss, norm_grad, X_grad = learning_obj.session.run([learning_obj.optimizer, learning_obj.loss, \n",
    "                                                                                learning_obj.norm_grad, learning_obj.var_grad]) \n",
    "        training_time = time.time() - tic\n",
    "\n",
    "        list_training_loss.append(current_training_loss)\n",
    "        list_training_norm_grad.append(norm_grad)\n",
    "        list_training_times.append(training_time)\n",
    "        #list_grad_X.append(X_grad)\n",
    "\n",
    "        if (np.mod(num_iter, num_iter_test)==0):\n",
    "            msg = \"[TRN] iter = %03i, cost = %3.2e, |grad| = %.2e (%3.2es)\" \\\n",
    "                            % (num_iter, list_training_loss[-1], list_training_norm_grad[-1], training_time)\n",
    "            print msg\n",
    "            f.write(msg+'\\n')\n",
    "\n",
    "            #Test Code\n",
    "            tic = time.time()\n",
    "            pred_error, preds, X = learning_obj.session.run([learning_obj.predictions_error, learning_obj.predictions,\n",
    "                                                                     learning_obj.norm_X]) \n",
    "            c_X_evolutions = learning_obj.session.run(learning_obj.list_X)\n",
    "            list_X_evolutions.append(c_X_evolutions)\n",
    "            \n",
    "            test_time = time.time() - tic\n",
    "\n",
    "            list_test_pred_error.append(pred_error)\n",
    "            #list_predictions.append(preds)\n",
    "            list_X.append(X)\n",
    "            list_test_times.append(test_time)\n",
    "            msg =  \"[TST] iter = %03i, cost = %3.2e (%3.2es)\" % (num_iter, list_test_pred_error[-1], test_time)\n",
    "            print msg\n",
    "            f.write(msg+'\\n')\n",
    "\n",
    "        num_iter += 1"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "collapsed": false,
    "deletable": true,
    "editable": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Best predictions at iter: 24180 (error: 0.555117)\n",
      "RMSE: 0.008275\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "\"pp = PdfPages('../../pdf/losses/corrected_gamma_best_iter/supervised_approach_original_training_set_40k_iter.pdf')\\nplt.savefig(pp, format='pdf')\\npp.close()\""
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAABLcAAAJQCAYAAAB8aYYOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3XuU7WdZJ/jvU1Wnzi0ggSDNEDrgEi944XYa7cELgo1I\n24TpURtGERTNTC9Ru3VUcNTyoI6XntGxR1qMEAEvRBt1SM8EMaKC6HAJdwgqMQgkE4nt4ZqTy6k6\n7/yxd53sU6mq/auT7LPfqvp81tpr799t72etw1k5fNfzPm+11gIAAAAAu9HCvAsAAAAAgHMl3AIA\nAABg1xJuAQAAALBrCbcAAAAA2LWEWwAAAADsWsItAAAAAHYt4RYAAAAAu5ZwCwAAAIBdS7gFAAAA\nwK61NO8C7k0LCwvt8OHD8y4DAAAAYM84efJka6112yC1p8Ktw4cP59Zbb513GQAAAAB7RlXdNu8a\nttNt6gYAAAAA0wi3AAAAANi1hFsAAAAA7FrCLQAAAAB2LeEWAAAAALuWcAsAAACAXUu4BQAAAMCu\nJdwCAAAAYNcSbgEAAACwawm3AAAAANi1hFsAAAAA7FrCLQAAAAB2LeEWAAAAALuWcAsAAACAXUu4\nBQAAAMCuJdwCAAAAYNcSbgEAAACwawm3AAAAANi1hFsAAAAA7FrCLQAAAAB2LeEWAAAAALuWcAsA\nAACAXUu41Znv+Ik/z8F/ckNu+fit8y4FAAAAoHvCrc78wz+u5c6PfU5OrZ6edykAAAAA3RNudaZq\n9N7afOsAAAAA2A2EW52pSLUAAAAAhhJudUrnFgAAAMB0wq3eWJYIAAAAMNjMwq2qemhV/WlVXVdV\n76+q79vknqqq/1hV11fVe6rqsRPXnlNVHxy/njOrOntTNUq1hFsAAAAA0y3N8LtXk/xAa+0dVXWf\nJG+vqmtaa9dN3PP1SR4xfn1Zkl9J8mVVdf8kK0mOJWnjZ69qrX18hvX2oeZdAAAAAMDuMbPOrdba\nza21d4w/fzrJB5I8ZMNtlyZ5ZRt5c5L7VdWDk3xdkmtaayfGgdY1SZ46q1p7pHMLAAAAYLpZdm6d\nUVUPS/KYJG/ZcOkhST46cXzj+NxW5zf77suSXJYky8vL90q983Smcatp4QIAAACYZuYD5avqgiS/\nl+TftdY+dW9/f2vt8tbasdbasaWl85LVzZaZWwAAAACDzTTcqqoDGQVbv9Va+/1NbrkpyUMnji8e\nn9vq/J5XGrYAAAAABpvlbomV5GVJPtBa+4UtbrsqybeNd0388iSfbK3dnOR1SZ5SVRdW1YVJnjI+\nt2/o3AIAAACYbpbr+J6Q5NlJ3ltV7xqf+5Ek/zRJWmsvSXJ1kqcluT7JySTfPr52oqp+Msnbxs+9\nqLV2Yoa19sOyRAAAAIDBZhZutdbelIn56Fvc05J89xbXrkhyxQxK65pliQAAAADDzXygPOdG5xYA\nAADAdMKt7liWCAAAADCUcKsz68sShVsAAAAA0wm3OlMl1QIAAAAYSrjVqdNatwAAAACmEm51psbr\nEmVbAAAAANMJt3pTBsoDAAAADCXc6sz6QHkAAAAAphNudUrnFgAAAMB0wq3eWJYIAAAAMJhwqzPr\nqxKFWwAAAADTCbd6U1ItAAAAgKGEW53SuQUAAAAwnXCrM+u7JQq3AAAAAKYTbnWmDJQHAAAAGEy4\nBQAAAMCuJdzqlMYtAAAAgOmEW51Zn7mVVtveBwAAAIBwqz9mbgEAAAAMJtzqTGnYAgAAABhMuNUp\nnVsAAAAA0wm3umNZIgAAAMBQwq3OrC9LFG4BAAAATCfc6s2ZgfLSLQAAAIBphFudqYxat2RbAAAA\nANMJtzpjWSIAAADAcMKt3pRUCwAAAGAo4VandG4BAAAATCfc6sx4VaJwCwAAANgVqurfV9X7q+p9\nVfWqqjpUVQ+vqrdU1fVV9TtVtTyr3xdu9ebMbolzrgMAAABgiqp6SJLvTXKstfbFSRaTPDPJzyX5\nxdba5yb5eJLnzaoG4VZn1gfKAwAAAOwSS0kOV9VSkiNJbk7ypCSvHl9/RZJnzOrHhVud0rkFAAAA\n9K61dlOS/y3JRzIKtT6Z5O1JPtFaWx3fdmOSh8yqBuFWbyxLBAAAAPqyVFXXTrwuW79QVRcmuTTJ\nw5P8N0mOJnnqeS3ufP4Y0xkoDwAAAHRmtbV2bItrX5vkQ621f0iSqvr9JE9Icr+qWhp3b12c5KZZ\nFadzqzNVUi0AAABg1/hIki+vqiNVVUmenOS6JH+a5BvH9zwnyWtmVYBwq1M6twAAAIDetdbektHg\n+HckeW9GWdPlSX44yfdX1fVJHpDkZbOqwbLE3pxZlzjXKgAAAAAGaa2tJFnZcPqGJI8/H7+vc6sz\nNQ63WqvtbwQAAABAuNUdM7cAAAAABhNudcrMLQAAAIDphFudOTNyS7gFAAAAMJVwqzfjZYnCLQAA\nAIDphFudKXPkAQAAAAYTbnXqtNYtAAAAgKmEW51Z79ySbQEAAABMJ9zqjHALAAAAYDjhVm9KqgUA\nAAAwlHCrUzq3AAAAAKYTbnXGskQAAACA4YRb3RmlWsItAAAAgOmEW51Z79wCAAAAYLqlWX1xVV2R\n5BuS3NJa++JNrv9gkm+ZqOMLkzywtXaiqv4uyaeTrCVZba0dm1WdvdK4BQAAADDdLDu3Xp7kqVtd\nbK39h9bao1trj07ywiRvaK2dmLjla8bX91WwVeu7JUq3AAAAAKaaWbjVWntjkhNTbxx5VpJXzaqW\nXcWyRAAAAIDB5j5zq6qOZNTh9XsTp1uSP6qqt1fVZfOpbL5ak3IBAAAATDOzmVs78K+S/MWGJYlf\n0Vq7qao+O8k1VfVX406wuxmHX5clyfLy8uyrnbH1gfJ2SwQAAACYbu6dW0memQ1LEltrN43fb0ny\nB0kev9XDrbXLW2vHWmvHlpZ6yOruqVGqJdwCAAAAmG6u4VZVfVaSr07ymolzR6vqPuufkzwlyfvm\nU+H5V1YjAgAAAAw2s1anqnpVkicmuaiqbkyykuRAkrTWXjK+7b9L8kettVsnHn1Qkj+oUcqzlOS3\nW2t/OKs6e6VzCwAAAGC6mYVbrbVnDbjn5UlevuHcDUkeNZuqdoGyLBEAAABgqB5mbjHBQHkAAACA\n4YRb3ZFqAQAAAAwl3OrU6dNCLgAAAIBphFudWRivS7QsEQAAAGA64VZvDJQHAAAAGEy41Zn1gfIA\nAAAATCfc6pTOLQAAAIDphFu9GS9LtGkiAAAAwHTCrc6sr0rUuQUAAAAwnXCrM2ZuAQAAAAwn3OpU\na1IuAAAAgGmEW70Zz9yyLBEAAABgOuFWZ9aXJQq3AAAAAKYTbvWmpFoAAAAAQwm3OqVzCwAAAGA6\n4VZn1sfIC7cAAAAAphNu9cZAeQAAAIDBhFudWR8oDwAAAMB0wq1O6dwCAAAAmE641Zn1zi3ZFgAA\nAMB0wq3uiLUAAAAAhhJudapZlwgAAAAwlXCrMzX+E5FtAQAAAEwn3OrMmZlbzbaJAAAAANMItzpT\nMi0AAACAwYRbnbIsEQAAAGA64VZ3RqmWcAsAAABgOuFWZ+6auTXfOgAAAAB2A+FWb0qqBQAAADCU\ncKtTOrcAAAAAphNudcayRAAAAIDhhFvdMVAeAAAAYCjhVmfWO7cAAAAAmE641SmdWwAAAADTCbc6\nY+YWAAAAwHDCrd6UmVsAAAAAQwm3OmPmFgAAAMBwwq1O6dwCAAAAmE641R3LEgEAAACGEm515syy\nROEWAAAAwFTCrd4YKA8AAAAwmHCrM5VR65ZsCwAAAGA64VZn1pcl6twCAAAAmE641RvLEgEAAAAG\nE251pqbfAgAAAMCYcKtTOrcAAAAAphNudcbMLQAAAIDhhFu9KakWAAAAwFDCrU7p3AIAAACYTrjV\nGcsSAQAAAIYTbnWnnfUGAAAAwNaEW51Z79wCAAAAYLqZhVtVdUVV3VJV79vi+hOr6pNV9a7x68cn\nrj21qv66qq6vqhfMqsaeWZYIAAAAMN0sO7denuSpU+7589bao8evFyVJVS0meXGSr0/yyCTPqqpH\nzrDOvox3S2xNCxcAAADANDMLt1prb0xy4hwefXyS61trN7TW7kxyZZJL79XiOmagPAAAAMBw8565\n9c+r6t1V9dqq+qLxuYck+ejEPTeOz+0LZm4BAAAADLc0x99+R5JLWmufqaqnJfm/kjxip19SVZcl\nuSxJlpeX790K50jnFgAAAMB0c+vcaq19qrX2mfHnq5McqKqLktyU5KETt148PrfV91zeWjvWWju2\ntDTPrO7esj5za85lAAAAAOwCcwu3quqfVI0W4VXV48e1/GOStyV5RFU9vKqWkzwzyVXzqvN8M3ML\nAAAAYLiZtTpV1auSPDHJRVV1Y5KVJAeSpLX2kiTfmOTfVtVqktuSPLO11pKsVtXzk7wuyWKSK1pr\n759Vnd0pqRYAAADAUDMLt1prz5py/ZeT/PIW165OcvUs6totmtYtAAAAgKnmvVsiGywsjNYlyrYA\nAAAAphNudWd9oHzNuQ4AAACA/gm3OlMyLQAAAIDBhFudsiwRAAAAYDrhVmfWO7eEWwAAAADTCbc6\nUwunkyRra3MuBAAAAGAXEG51ZnFp1LK1dnrOhQAAAADsAsKtzpzp3FqdcyEAAAAAu4BwqzML4z+R\ntTXbJgIAAABMI9zq0cKprOrcAgAAAJhKuNWjWstpA+UBAAAAphJu9Whh1W6JAAAAAAMIt3q0sJbV\nVTO3AAAAAKYRbvWo1rJ2et5FAAAAAPRPuNWjhdWsGSgPAAAAMJVwq0cLa1lbsywRAAAAYBrhVo8M\nlAcAAAAYRLjVo1rLqmWJAAAAAFMJt3q0sKZzCwAAAGAA4VZnKmWgPAAAAMBAwq0elYHyAAAAwO5Q\nVferqldX1V9V1Qeq6p9X1f2r6pqq+uD4/cJZ/b5wq0cGygMAAAC7xy8l+cPW2hckeVSSDyR5QZLX\nt9YekeT14+OZEG71yMwtAAAAYBeoqs9K8lVJXpYkrbU7W2ufSHJpkleMb3tFkmfMqgbhVo9KuAUA\nAADsCg9P8g9Jfr2q3llVL62qo0ke1Fq7eXzP3yd50KwKEG71aGE1q6tmbgEAAABdWKqqaydel01e\nS/LYJL/SWntMkluzYQlia60laTMrblZfzD1gWSIAAADQj9XW2rEtrt2Y5MbW2lvGx6/OKNz6WFU9\nuLV2c1U9OMktsypO51aPFlZzWrgFAAAAdK619vdJPlpVnz8+9eQk1yW5Kslzxueek+Q1s6pB51aP\nas2yRAAAAGC3+J4kv1VVy0luSPLtGTVU/W5VPS/Jh5N886x+XLjVo4W1rJ2edxEAAAAA07XW3pVk\ns2WLTz4fv29ZYo8WVrO2Ou8iAAAAAPon3OpRGSgPAAAAMIRwq0cLq1lbM3MLAAAAYBrhVo8W1ixL\nBAAAABhAuNUjyxIBAAAABhFu9WhhNavCLQAAAICphFs9WlgzcwsAAABgAOFWjxZWc1rnFgAAAMBU\nwq0e1VpWDZQHAAAAmEq41ZmqGi9LnHclAAAAAP0TbvVoYTVrq2ZuAQAAAEwj3OpRrWXt9LyLAAAA\nAOifcKtHC6uWJQIAAAAMINzq0cKaZYkAAAAAAwi3elQGygMAAAAMIdzq0cJqVlfnXQQAAABA/4Rb\nPVpYy+nTldbmXQgAAABA34RbPVoYtW2dtmMiAAAAwLaEWz2q0cAtSxMBAAAAtifc6tHCKNwyVB4A\nAABge8KtHo2XJercAgAAANiecKtHpXMLAAAAYAjhVo/GnVvCLQAAAIDtzSzcqqorquqWqnrfFte/\npareU1Xvraq/rKpHTVz7u/H5d1XVtbOqsVsLBsoDAAAADDHLzq2XJ3nqNtc/lOSrW2tfkuQnk1y+\n4frXtNYe3Vo7NqP6+mVZIgAAAMAgS7P64tbaG6vqYdtc/8uJwzcnuXhWtew6BsoDAAAADNLLzK3n\nJXntxHFL8kdV9faqumy7B6vqsqq6tqquXd0radCCzi0AAACAIWbWuTVUVX1NRuHWV0yc/orW2k1V\n9dlJrqmqv2qtvXGz51trl2e8pPHo0aNt5gWfDwbKAwAAAAwy186tqvrSJC9Ncmlr7R/Xz7fWbhq/\n35LkD5I8fj4VzkkZKA8AAAAwxNzCrar6p0l+P8mzW2t/M3H+aFXdZ/1zkqck2XTHxb2oUpYlAgAA\nAAw0s2WJVfWqJE9MclFV3ZhkJcmBJGmtvSTJjyd5QJL/VFVJsjreGfFBSf5gfG4pyW+31v5wVnV2\nyUB5AAAAgEFmuVvis6Zc/84k37nJ+RuSPGpWde0KpXMLAAAAYIhedktkkoHyAAAAAIMIt3q0YKA8\nAAAAwBDCrR5ZlggAAAAwiHCrRwbKAwAAAAwi3OrRONw6dWrOdQAAAAB0TrjVo/vemCT58IfnXAcA\nAABA54RbPfqsj+TwkZbrrpt3IQAAAAB9E271aKHlYQ9fzYc+NO9CAAAAAPom3OrU0aMtt9467yoA\nAAAA+ibc6tThIy0nT867CgAAAIC+Cbc6deSIzi0AAACAaYRbnTpiWSIAAADAVMKtTh0+HMsSAQAA\nAKYQbnVK5xYAAADAdMKtTpm5BQAAADCdcKtTh46czupqcurUvCsBAAAA6JdwqzNVlWTUuZVE9xYA\nAADANoRbnRJuAQAAAEwn3OrUkSOnkwi3AAAAALYj3OpMZbQs8fC4c+vkyXlWAwAAANC3HYVbdbyq\njtfRWRXDXQ5blggAAAAw1dRwq47XK+t43beO15Ek701yfR2v7599afvbkaPCLQAAAIBphnRufWlb\naZ9K8owk1yS5JMlzZ1nUfra+W+Lhw6OZW5YlAgAAAGxtSLh1oI7XUpJLk7ymrbQ7k5yebVlYlggA\nAAAw3ZBw66VJPpLkwiRvqOP1T5N8ZqZVkcN2SwQAAACYamnaDW2l/WKSX1w/ruP10SRPmmVR+9mZ\n3RIP2y0RAAAAYJohA+WfX8frvuPPv5rkLUm+ctaF7XcGygMAAABMN2RZ4mVtpX2qjtdTkjwoyXcl\n+fnZlsXSgVG4deedcy4EAAAAoGNDwq02fn9akt9oK+3dA5/jHKzvlliVLC4mq6tzLggAAACgY0NC\nqnfX8bo6yTckeW0drwtyV+DFDB04kJw6Ne8qAAAAAPo1JNz69iQ/keTxbaWdTHIoyfNmWRRJay1L\nSzq3AAAAgP2jKp9VlUfu5Jmp4VZbaWtJLkryQ3W8fjbJP2sr7Z3nWCNTrO+WmOjcAgAAAPa+qry+\nKvetyoVJ3pXkN6ryH4Y+P2S3xJ9O8kNJbhi/frCO10+da8EMp3MLAAAA2Afu31o+leRfJ/nN1vK4\nJF839OGlAff8qySPbSttNUnqeF2R5B1JfvQcimWglqZzCwAAANgPlqrywCTflOTHd/rw0F0P77PF\nZ+5l67slJjq3AAAAgH3hp5O8IclHWstbq/I5ST409OEhnVs/n+Qddbxen6SSPDHJj51DoeyQzi0A\nAABgr2stVya5cuL4hiSXDn1+yED530zyFUmuTvL/JPmqttJ+e+elshOtjZYl6twCAAAA9rKq/Mx4\noPxSVV5XlY9V5X8Y+vyWnVt1vL50w6nrx+8PqOP1gLbS3nMuBbO9yd0Sl5Z0bgEAAAB73te3lhdW\n5RlJ/r8kz0ryp0kGNVdttyzxxdtca0m+anCJnBOdWwAAAMA+sJ5PPS3Jf24tJ6rSdvrw3bSV9pX3\ntDLOXUvTuQUAAADsB6+tyvuSrCX57qpclOSOoQ8P3S2R82Ryt0SdWwAAAMBe11p+MMmTkjyutZxK\ncluSfz30+SG7JTInOrcAAACAva4qS0m+MclXjXt+3pDk14Y+L9zq1PpuibffPu9KAAAAAGbqxUmO\nJrlifPytSR6T5LIhD08NtzbZNTFJPpnko22lnR5YJAPZLREAAADYZ768tTxq4viPqvLuoQ8Pmbn1\nsiRvT/LKJL+R5Nokr0nywTpeT95JpeyMmVsAAADAPnC6Kg9bPxh/HtxQNSTc+rskj2sr7dFtpT0q\nyeOS/E2Sr0vyv++kUoazWyIAAACwT/xwkj+vyh9X5fUZzdz6waEPDwm3vrCttPesH7SV9t4kj2wr\n7fodl8pUdksEAAAA9pPW8kdJPi/JD2UUan1Ba/njoc8PGSj/V3W8/s8kV46P/8343MEkopcZ0rkF\nAAAA7FVVefoWly6uSlrLVUO+Z0i49W1JvifJC8bHf5HkhRkFW2Zuzcj6bok6twAAAIA96pu2udaS\neyncaivtZJKfG782+uSQH2E4uyUCAAAA+0Frefa98T1Tw606Xl+eZCXJJZP3t5X2efdGAWxtcTFZ\nW5t3FQAAAAD9GrIs8dczGuj19iSilvOkpWVxMTk9eONLAAAAgP1nSLj1qbbS/su5fHlVXZHkG5Lc\n0lr74k2uV5JfSvK0JCeTPLe19o7xteck+dHxrT/VWnvFudSw20zulriwINwCAAAA9raqLLV29qaF\nm53bypBw60/qeP1Mkt9Pcsf6ybbS3jPg2Zcn+eUkr9zi+tcnecT49WVJfiXJl1XV/TNaCnksowFi\nb6+qq1prHx/wm3vGwoJliQAAAMCe99Ykjx1wblNDwq2v2PCejAKnr5r2YGvtjVX1sG1uuTTJK1tr\nLcmbq+p+VfXgJE9Mck1r7USSVNU1SZ6a5FUD6t0TWrMsEQAAANi7qvLZSR6c5HBVviQ5s8vefZMc\nGfo9Q3ZL/MpzqnCYhyT56MTxjeNzW52/m6q6LMllSbK8vDybKs+jyd0SLUsEAAAA9rB/meQ7klyc\n5MW5K9z6dJIfG/olW4Zbdbye1Vbaq+p4fe9m19tK+4/Da52d1trlSS5PkqNHj7Y5l3OvsiwRAAAA\n2Ktay68n+fWqfHNr+d1z/Z6Fba5dOH5/4Bave8NNSR46cXzx+NxW5/cNuyUCAAAA+8RnV+W+SVKV\nl1TlrVV58tCHt+zcaivtP43fB7eBnYOrkjy/qq7MaKD8J1trN1fV65L8r1W1HrA9JckLZ1hHN+yW\nCAAAAOwzl7WWX67KUzKawfVdSa5I8rghD0+duVXH66KM1j8+bPL+ttIum/ps1asyGg5/UVXdmNEO\niAeSpLX2kiRXJ3lakuuTnEzy7eNrJ6rqJ5O8bfxVL1ofLr+fWJYIAAAA7APrY6aeluSVreXdVduu\nNjzLkN0SX5PkzUnelGRHUUtr7VlTrrck373FtSsySun2JbslAgAAAPvEu6tydZLPS/IjVbkgdwVe\nUw0Jt462lfYD51odO7Nxt8TWRq+J1YoAAAAAe8m3Z7QE8frWcrIqFyV53tCHh7R4vbaO11POtTrO\n3cL4T0f3FgAAALBXtZa1JJ+T5N+OTx3OsMwqybDOrf8pyQ/X8TqZ5M4klaS1lXb/HdbKDqzvlpiM\nwq31zwAAAAB7SVV+OaMZ7V+V5KeT3JrkJUn+2ZDnh4RbF51zdezYxt0SE51bAAAAwJ7237aWx1bl\nnUnSWk5UZXnow1uGW3W8HtFW2geTfNEWt7xnZ3WyU+vdWnZMBAAAAPawU+PdEVuSVOUBSQa3+mzX\nufWCjIZ3vXiTay2jVjFmpLWmcwsAAADYs6qy1FpWM8qefi/JA6tyPMk3Jzk+9Hu2DLfaSnve+P0r\n72Gt7MDG3RIT4RYAAACwJ701yWNbyyur8vYkX5vRrPdvai3vG/olQ2ZupY7XFyR5ZJJD6+faSvvt\nndXLTlmWCAAAAOxhZzp8Wsv7k7z/XL5karhVx+tHkzwlyRckeV2Sr0vypiTCrRlqsSwRAAAA2NMe\nWJXv3+pia/mFIV+yMOCef5Pka5Lc3Fbas5M8KsnRQSWyY3ZLBAAAAPaJxSQXJLnPFq9BhixLvK2t\ntLU6Xqt1vO6T5O+TXLLzetkpyxIBAACAPezm1vKie/olQzq33lnH635JrkhybUbDvt56T3+Y7dkt\nEQAAANgtqmqxqt5ZVf/3+PjhVfWWqrq+qn6nqpY3e+ze+O1tw606XpXkJ9pK+0RbaS9O8i+T/I9t\npX3bvfHj3J3dEgEAAIBd6PuSfGDi+OeS/GJr7XOTfDzJ8zZ55sn3xg9vG261ldaSXDNxfH1bae+4\nN36Y6SxLBAAAAHpXVRdn1BD10vFxJXlSklePb3lFkmdsfK61nLg3fn/IssR31fF6zL3xYwxnt0QA\nAACgE0tVde3E67IN1/+PJD+UZD3BeECST7TWVsfHNyZ5yMyK2+pCHa+lttJWkzwmydvqeP1tklsz\nWg/Z2kp77KyK2s/slggAAAB0ZrW1dmyzC1X1DUluaa29vaqeeH7LGtlut8S3Jnlskqefp1rYwLJE\nAAAAoHNPSPL0qnpakkNJ7pvkl5Lcr6qWxt1bFye5aVYFbBduVZK0lfa3s/pxtma3RAAAAKB3rbUX\nJnlhkow7t/7n1tq3VNV/TvKNSa5M8pwkr5lVDduFWw+s4/X9W11sK+0XZlDPvje5W+J655ZwCwAA\nANhlfjjJlVX1U0nemeRls/qh7cKtxSQXJBNpC+fVeueWZYkAAABA71prf5bkz8afb0jy+PPxu9uF\nWze3lfai81EEd2e3RAAAAIDpFra5pmNrDiZ3S7QsEQAAAGB724VbTz5vVbApyxIBAAAAtrdluNVW\n2onzWQhns1siAAAAwHTbdW4xB3ZLBAAAABhOuNUxyxIBAAAAtifc6pTdEgEAAACmE251xm6JAAAA\nAMMJtzpmWSIAAADA9oRbnbJbIgAAAMB0wq3O2C0RAAAAYDjhVscsSwQAAADYnnCrU5O7JQq3AAAA\nADYn3OrM5G6Jhw6N3u+8c07FAAAAAHROuNWxI0dG7ydPzrcOAAAAgF4JtzrVWhNuAQAAAEwh3OrM\n5G6Jwi0AAACA7Qm3OibcAgAAANiecKtTLS0HDiRLS8ItAAAAgK0Itzpz/YnrkyRv+sibkoy6t4Rb\nAAAAAJsTbnXmjz/0x0mS33zPbyYRbgEAAABsR7jVmYUa/ZG0tCTCLQAAAIDtCLc6s75b4trptSTC\nLQAAAIDtCLc6U1VnHR8+LNwCAAAA2Ipwq1PryxKXl5M775xzMQAAAACdEm51Zn1Z4jrhFgAAAMDW\nhFud2bjb5MsvAAAgAElEQVQs8eBB4RYAAADAVoRbnTn24GNJks9/wOcnGXVu3XHHPCsCAAAA6Jdw\nqzPf8ZjvSJI86eFPSmJZIgAAAMB2hFudWVxYTJKsnV5LItwCAAAA2I5wqzOLNQ632ijcMnMLAAAA\nYGszDbeq6qlV9ddVdX1VvWCT679YVe8av/6mqj4xcW1t4tpVs6yzJ5t1bpm5BQAAALC5pVl9cVUt\nJnlxkn+R5MYkb6uqq1pr163f01r79xP3f0+Sx0x8xW2ttUfPqr5ebezcsiwRAAAAYGuz7Nx6fJLr\nW2s3tNbuTHJlkku3uf9ZSV41w3p2BTO3AAAAAIabZbj1kCQfnTi+cXzubqrqkiQPT/InE6cPVdW1\nVfXmqnrG7Mrsi5lbAAAAAMPNbFniDj0zyatbGyc6I5e01m6qqs9J8idV9d7W2t9ufLCqLktyWZIs\nLy+fn2pnaLPOrVOnktOnkwXj/wEAAADOMsu45KYkD504vnh8bjPPzIYlia21m8bvNyT5s5w9j2vy\nvstba8daa8eWlnrJ6s7dZjO3klHABQAAAMDZZhluvS3JI6rq4VW1nFGAdbddD6vqC5JcmOT/nTh3\nYVUdHH++KMkTkly38dm9qKpSqbM6txJLEwEAAAA2M7NWp9baalU9P8nrkiwmuaK19v6qelGSa1tr\n60HXM5Nc2VprE49/YZJfrarTGQVwPzu5y+Jet7iweNbMrSS5447kPveZY1EAAAAAHZrpOr7W2tVJ\nrt5w7sc3HP/EJs/9ZZIvmWVtPVusRZ1bAAAAAAMYUd6hyc4t4RYAAADA1oRbHdK5BQAAADCMcKtD\nW83cAgAAAOBswq0O6dwCAAAAGEa41SEztwAAAACGEW51SOcWAAAAwDDCrQ6ZuQUAAAAwjHCrQ4tl\nWSIAAADAEMKtDi0uWJYIAAAAMIRwq0ObdW5ZlggAAABwd8KtDk12bq3P3NK5BQAAAHB3wq0OmbkF\nAAAAMIxwq0NmbgEAAAAMI9zqkJlbAAAAAMMItzpk5hYAAADAMMKtDk12bh04MDon3AIAAICdOXUq\nedrTkre+dd6VMEtL8y6Au5vs3FpcHL2EWwAAALAzH/xg8trXJh/6UPKBD8y7GmZF51aHJju3ktHc\nLTO3AAAAYGcWF0fva2vb38fuJtzq0GTnVjKau6VzCwAAAHZmPdw6fXq+dTBbwq0Obda5JdwCAACA\nndG5tT8Itzq0sXPLskQAAADYOeHW/iDc6tDGzi3LEgEAAGDnFsaph3BrbxNudWizzi3hFgAAAOyM\ncGt/EG51yMwtAAAAuOdaG70Lt/Y24VaHzNwCAACAe2493LJb4t4m3OqQmVsAAABw79G5tbcJtzpk\n5hYAAADcc5Yl7g/CrQ5tNnPLskQAAADYGeHW/iDc6pDOLQAAALjnzNzaH4RbHTJzCwAAAO49Orf2\nNuFWhxZL5xYAAADcU5Yl7g/CrQ4tLpi5BQAAAPfUeri1/s7eJNzq0GadW8ItAAAAgLsTbnVoY+fW\n4cPJ7bfPsSAAAADYhXRs7Q/CrQ5t7Nw6ejS59VZ/KQEAAGAn/P/o/UG41aGNnVtHj462LdW9BQAA\nAHA24VaHNuvcSkbdWwAAAMAwOrf2B+FWhzZ2bl1wwehduAUAAADDCbf2B+FWh3RuAQAAAAwj3OrQ\n4sJiWlraOGIWbgEAAMDO6dzaH4RbHVqsxSQ5szRxPdz6zGfmVREAAADsPsKt/UG41aHFhXG4dfrs\ncEvnFgAAAAwn3NofhFsd2ti5ZaA8AAAAwOaEWx3SuQUAAAD3nM6t/UG41aGtZm4JtwAAAGA44db+\nINzqkM4tAAAAgGGEWx3a2Ll16FBSZbdEAAAA2AmdW/uDcKtDGzu3qkbdWzq3AAAAYDjh1v4g3OrQ\nxs6tZLRjonALAAAA4GzCrQ5t7NxKdG4BAADATunc2h+EWx3arHNLuAUAAAA7I9zaH4RbHbr5Mzcn\nSU7cduLMuaNHDZQHAAAA2Gim4VZVPbWq/rqqrq+qF2xy/blV9Q9V9a7x6zsnrj2nqj44fj1nlnX2\n5n6H7pckOXnq5JlzOrcAAABgZ3Ru7Q9Ls/riqlpM8uIk/yLJjUneVlVXtdau23Dr77TWnr/h2fsn\nWUlyLElL8vbxsx+fVb09+cKLvjBJcmrt1JlzF1yQfOxj86oIAAAAdh/h1v4wy86txye5vrV2Q2vt\nziRXJrl04LNfl+Sa1tqJcaB1TZKnzqjO7hxdPpokufXUXa1aOrcAAABgZ4Rb+8Msw62HJPnoxPGN\n43Mb/fdV9Z6qenVVPXSHz+5JRw+Mw607hVsAAAAA25n3QPn/kuRhrbUvzag76xU7/YKquqyqrq2q\na1dXV+/1Audhq84tA+UBAABgOJ1b+8Msw62bkjx04vji8bkzWmv/2Fq7Y3z40iSPG/rsxHdc3lo7\n1lo7trQ0sxFi59VWnVsnT/qLCQAAAEP5/9D7wyzDrbcleURVPbyqlpM8M8lVkzdU1YMnDp+e5APj\nz69L8pSqurCqLkzylPG5feGC5QuSnN25dcEFo7+Ut902r6oAAAAA+jOzVqfW2mpVPT+jUGoxyRWt\ntfdX1YuSXNtauyrJ91bV05OsJjmR5LnjZ09U1U9mFJAlyYtaaydmVWtvlheXs1iLd+vcSkZzt44c\nmVNhAAAAsIvo3NofZrqOr7V2dZKrN5z78YnPL0zywi2evSLJFbOsr1dVlaPLR+82cysZhVsPfOCc\nCgMAAIBdRLi1P8x7oDxbOHrg6KadW4bKAwAAANxFuNWpo8tH85lTdyVZk51bAAAAwHQ6t/YH4Van\njh44mpOnTp45vmA0Y164BQAAAAMJt/YH4VanDh84nNtO3bU1os4tAAAAoDdV9dCq+tOquq6q3l9V\n3zc+f/+quqaqPjh+v3BWNQi3OnXkwJGzOreEWwAAALAzOrfOi9UkP9Bae2SSL0/y3VX1yCQvSPL6\n1tojkrx+fDwTwq1OHV46nNtWdW4BAADAuZoMtwRds9Fau7m19o7x508n+UCShyS5NMkrxre9Iskz\nZlXD0qy+mHvm8IHDm3Zu2S0RAAAAdq61pGreVexaS1V17cTx5a21yzfeVFUPS/KYJG9J8qDW2s3j\nS3+f5EEzK25WX8w9c+TAkbNmbhkoDwAAADsz2a11+nSyYP3auVptrR3b7oaquiDJ7yX5d621T9VE\nkthaa1U1s945f6yd2rgscXk5WVwUbgEAAMBQG8MtZqOqDmQUbP1Wa+33x6c/VlUPHl9/cJJbZvX7\nwq1ObRwoXzVamijcAgAAgGGEW7NXoxatlyX5QGvtFyYuXZXkOePPz0nymlnVYFlipw4vHc5tp25L\nay3rrXzCLQAAADg3a2vzrmDPekKSZyd5b1W9a3zuR5L8bJLfrarnJflwkm+eVQHCrU4dOXAka20t\np06fyvLicpJRuGWgPAAAAAyjc2v2WmtvSrLVqP4nn48aLEvs1OEDh5PkbkPldW4BAADAMMKt/UG4\n1akjB44kyVlztyxLBAAAgHMj3Nq7hFudOrw07tya2DFRuAUAAADD6dzaH4RbndK5BQAAAPeMcGt/\nEG51arOZWwbKAwAAwLmxW+LeJdzq1MHFg0mSO9buOHNO5xYAAAAMp3NrfxBuderQ0qEkye2rt585\nZ7dEAAAAGE64tT8Itzp1cGncubV6dufWyZP+QgIAAMBO+f/Se5dwq1ObdW4dPTp6v+22zZ4AAAAA\nJunc2h+EW53aauZWYmkiAAAADCHc2h+EW51a79zauCwxsWMiAAAADDEZbtktce8SbnVqfebWxoHy\nic4tAAAA2CmdW3uXcKtTZzq3LEsEAACAc2JZ4v4g3OrU+sytzQbKC7cAAABgOuHW/iDc6tT6ssTN\nZm4JtwAAAGBnhFt7l3CrUwu1kAMLBzbt3DJQHgAAAKYzUH5/EG517ODSwbNmbhkoDwAAAMNZlrg/\nCLc6dmjpkJlbAAAAcC8Qbu1dwq2OHVw8aOYWAAAAnCOdW/uDcKtjh5YO5fa1uzq3lpeTpSXhFgAA\nAAwh3NofhFsdO7h0dudWMureMlAeAAAAdsZA+b1LuNWxjTO3ktFQeZ1bAAAAMN1k59add86vDmZL\nuNWxg4tn75aYjDq3hFsAAAAw3WS4dfvtW9/H7ibc6tihpUObLksUbgEAAMB0k+HWHXdsfR+7m3Cr\nYweXDt5tWaKZWwAAALBzwq29S7jVsUNLh+62LPGBD0xuuWVOBQEAAMAuYlni/iDc6tjBxbt3bl1y\nSfLhD5/9FxQA9osPffxD+bE/+bE0/yEEAAawLHF/EG51bLOZW5dcMpq5deLEnIoCgDm69MpL81N/\n/lP54IkPzrsUAGCXEW7tXcKtjm3VuZWMurcAYL9ZX66vcwsAGMKyxP1BuNWx5cXl3Ll251nnhFsA\nAAAwjGWJ+4Nwq2PCLQDYXIvOLQBgZ4Rbe5dwq2MHlw7ebbfE+98/OXpUuAXA/lSpeZcAAOwiOrf2\nB+FWx5YXl7N6ejWn2+kz56ru2jERAPYbHVsAwE6YubU/CLc6dnDxYJJsujRRuAUAAADD6dzau4Rb\nHVteXE6yebj1d383h4IAYM4sSwQAdsKyxP1BuNWxg0ubd2597ucmJ04k//W/zqMqAAAA2B0sS9wf\nhFsdW+/cumP17Hj5i7949P7+95/vigAAAGD3WA+3lpZ0bu1lwq2ObbUs8Uu+ZPT+3vee74oAAABg\n9zl0SLi1lwm3OrY+UP6OtbP/Bj74wcmFFybve988qgIAAIDdYb1z69AhyxL3MuFWx7bq3KoaLU0U\nbgEAAMDWJsMtnVt710zDrap6alX9dVVdX1Uv2OT691fVdVX1nqp6fVVdMnFtrareNX5dNcs6e7U+\nUH7jzK3krnBrcjgeAOwXzX8AgY684Q3JS1867yqA7Qi39raZhVtVtZjkxUm+Pskjkzyrqh654bZ3\nJjnWWvvSJK9O8vMT125rrT16/Hr6rOrs2VadW8lo7tYnP5nceOP5rgoA5qeq5l0CwN088YnJd33X\nvKsANnPbqdFaxEOHmnBrD5tl59bjk1zfWruhtXZnkiuTXDp5Q2vtT1trJ8eHb05y8Qzr2XXWZ25t\nFm6t75j4xjcmz31u8ulPn8fCAGBO1m4/lFzzM7n9NiEXADDdb7/nVUmS2/MJM7f2sFmGWw9J8tGJ\n4xvH57byvCSvnTg+VFXXVtWbq+oZWz1UVZeN77t2dXX1nlXcmfXOrY0D5ZPki75o9P6t35q84hXJ\nr/3a+awMAObj4294dvIXL8hvXH7/eZcCAOwCt566NUmyeOCUzq09bGneBSRJVX1rkmNJvnri9CWt\ntZuq6nOS/ElVvbe19rcbn22tXZ7k8iQ5evTonhrAsd2yxPv7Nz0A+9LoP/Wf+qQ9cQCAIUbd3gcO\nrgm39rBZ/svwpiQPnTi+eHzuLFX1tUn+lyRPb62d+Z9aa+2m8fsNSf4syWNmWGuXthsov5ERJADs\nB4uHRtMMbjsp3AIABmjjcGt5zbLEPWyW/zJ8W5JHVNXDq2o5yTOTnLXrYVU9JsmvZhRs3TJx/sKq\nOjj+fFGSJyS5boa1dmm7zq0kefSj7/q84N/4AOwDtXxbkuTkrf7DBwBMV+POrSWdW3vazP5l2Fpb\nTfL8JK9L8oEkv9tae39Vvej/b+++4+Sq6v+Pv8603c3upleSQAotkBBpQaQEUSliwa8gCNJEECHi\nV1GK5QfytYAUpSjYEESFSJEigVBEoiAtNEkCSSAJ6W1Lsn3K+f3xmbszu5ndbJLdnZnN+/l43Me9\nc+fOnTPl3PK5n3Oucy64++F1QAVwn3PuDedcEPyaBLzqnHsTeBa4xnu/0wW3gg7lc/W5BTBrVmZa\nmVsiIrIzCMXskmuDMrdERESkC4K+i2IxC275PtWZkQR6tM8t7/0sYFa7ef8va/rjHbzuBWBKT5at\nGGwtc2vUqMx0PN4bJRIREckvF7abx6hZooiIiHSJz/S5BdDSAiUl+SyQ9AQdGRawoM+tjoJb2Roa\nero0IiIi+edTdujS2KCUZREREdm6pTXLAFjetACAurp8lkZ6ioJbBSxoltiU2Hqvd/X1PV0aERGR\n/FtVuwaAxkYdwoiIiMjWrdm8GoAPWl4HYOPGfJZGeoqODAtYv2g/IqEINU01W11WmVsiIrJT8Hbo\nkkzkuRwiIiJSJCzb25dZVKuqKp9lkZ6i4FYBc84xqHQQ1Y3VHS4zZ46NlbklIiI7BR8GIJVUs0QR\nERHpAh8EtzYACm71VQpuFbjyWDn18Y4jV0ccAQcfDEuX9l6ZRERE8iaVDm7pTkciUoB0FzaRAlZm\nUS0Ft/omBbcKXHm08+AWwH77wT/+AevX91KhRERE8kWZWyJSwJLJfJdARLaUPmboZ80S1edW36Tg\nVoGriFVQ39J5cOvYY218++29UCAREZF8Sve5lUrluRwiIjkouCVSgIJmiaUbKCmB5cvzXB7pEQpu\nFbjyWDl1LZ3fq/Tkk2H8eHj77V4qlIiISL4EzRIV3BKRAqTglkjhSpFg3Dh16dNXKbhV4LrSLBFg\nyhT417/UsbyIiPRxQbPElJolikjhUeBdpBAFxwye8eNhyZK8FkZ6iIJbBa48Vr7VZokAM2bA6tVw\n3329UCgREZF8UbNEESlgytwSKUDpZok4r8ytPkzBrQLX1cytj38cxo6FO+/UXVpERKQPU7NEESlg\nCm6JFDLL3Kqqgk2b8l0W6W4KbhW4iljFVvvcAnAOLr0UnnsOZs/uhYKJiIjkg+6WKCIFTIF3kUKU\nOWYYN87GaprY9yi4VeCGlA1hU/MmWpItW132/PNhwgS47DLtWEVEitkTT8Bf/pLvUhSodLNEZUeI\nSCHStkmkAGU1Sxw/3ibVNLHvUXCrwA0oHQDA5ubNW102FoMf/QjeegseeKCnSyYiIj3l+OPh9NPz\nXYoClVKH8iJSuBTcEilEbTuUB2Vu9UUKbhW4/iX9AdjU3LVGwV/4gmVv/eIXPVkqERGRPEk3S1T/\nkiJSiBTcEilsQ4ZAebkyt/oiBbcKXGWsEoDNLVvP3AIIh+Eb34AXXoCXX+7JkomIiORBa7NEZW6J\nSOFR1yAiBSirWaJzsPvu8M47+S2SdD8FtwrctmZuAZxzDvTvr+wtERHpg4JmicqOEJECpMwtkcIz\nonxUesrSvqdNg5deUjC6r1Fwq8BVlqQzt7rQ51brayrhK1+B++6DFSt6qmQiIiJ5kG6WqMwtESlE\nCm6JFDBnwa1DD4WaGnj33TyXR7qVglsFLsjc6mqzxMCMGRaJ/uUve6JUIiIieZJulpiIK7glIoVH\nmSAihe/QQ238n//ktxzSvRTcKnBBn1vb0iwRYPx4+Mxn4I47IB7viZKJiIjkQbpZYiLh1Km8iBQc\nZW6JFB7vM3dLBNhzTxg0SMGtvkbBrQLXmrm1Dc0SA+eeC+vWwWOPdXepRERE8iTdLBEgkchjOURE\nclBwS6SApZslhkJwyCF2EzbpOxTcKnAVsQpg2zO3AI47DkaNsuwtERGRPiGVCW61tOSxHCIiOahZ\nokjhcTnCHkcfDfPnw/LleSiQ9AgFtwpcOBSmf0l/qpuqt/m1kQicdRbMmgWrV/dA4URERHqbzxy6\nNDbmsRwiIjkoc0uk8GS6Mcj0Z3DCCTaeNavXiyM9RMGtIjC031A2NGzYrteec47tZP/4x24ulIiI\nSD5kNUvcvO0t9kVEepSCWyIFzGWCW5Mmwbhx8OCD+SuOdC8Ft4rA0H5DWd+wfrteu+eecMQR1jRR\nHe+KiEjRy2qWuGnbW+yLiPQoBbdECs/gsiFbzHMOzj4bnnwSFi7s/TJJ91NwqwjsSOYWwHnnWYU9\n7zx1visiUkx0USKHrGaJytwSkUKjPrdECs9Bow4C4IBdDmgz/6tfhWgUfvnLfJRKupuCW0VgWL9h\nOxTcOv10uPRS+P3v4fvf18mSiEix0ElSDmqWKCIFTJlbIoXL+7YHViNHwimnWCunDdt/ui0FQsGt\nIrCjmVuhEFx7LZx5po332w9uuQWqt72P+lbf/S4ceuj2v15ERLZO2bY5qFmiiBQwBbdECo/3zsZs\nedXwiiugvh6uuaa3SyXdTcGtIjC031Aa4g3Ut9Tv0Hr+8Ae46y4oLYWLL4ZddoEzzoB//3vbs7l+\n+lN48UXdhl1EpCfpJCkHHwJnX4wyt0Sk0CjjVqTwBOe6Sb/lgdU++9hN2H7xC3jttV4umHQrBbeK\nwLiB4wBYUrNkh9YTCln21iuvWMU95xx45BHrcP7II+1OEU1N27bOBQt2qEht7LuvbVRE8mnDBnj3\n3XyXQsQocysHH4bSGgBqavJcFhGRdnRRQqQAtQa3ch9YXX89DB9u58p1db1YLulWCm4Vgd0H7w7A\n4qrF3bbO/feHX/0KVq2yJopLl8LnPw8jRsCXvwxPPdX5SVUkYuM33+y2IjF/Pnzzm923Pulezz+/\nc2zsd90V9t4736UQMQpu5ZAKQ1kVlQMSvPdevgsjItKWttsihSuVI3MLYNAga+G0YAGcdpqC1MVK\nwa0iMHHQRKB7g1uB8nKYMQOWLIHZs+Fzn4P774djjoExY+y5J56Axsa2rxs+3MbdFdzqqeaN69Zt\nOc97+4zNzT3znn3R2rVw+OGW7deXeb/lf10kn3SSlEO6WeK43Zu6NXtYRKQ77AwXAkWKTdDn1pq6\nNR0u84lPwM03w6OPwne+01slk+6k4FYRGFQ2iCFlQ3okuBWIRCygdeedFsi4/3447DC7c8Txx8OQ\nIfDpT9ttUmfPzvRz8tBD3RPZbmjY8XW09+ijlon23HNt58+eDSefDFdf3f3v2VetX2/jefPyW46e\nlh3YisfzVw6RgIJbOfgwuBTj92hk/vx8F0ZEpK3a2nyXQETaC/rcqmmu6nS5iy6yvql//nO4/fZe\nKJh0KwW3isTug3fv0eBWtrIya6L4wAOwcSM8/jice64FNmbMgOOOs+DW5Mnw/vsWCNvWDunbq9+x\nvvJzmjnTxovbfW1Ll9o4V1ZXLvfdZwGxXLa1j7JiFVyFrKjIbzl6WnZwS335SCFQWnwOySiEWxi/\nRyPr1+vW3SKSf9nHwbqLq0gh2/pJ6403wic/aee9s2b1QpGk2yi4VSR6M7iVrazMglm33ALvvWft\nkCdaK0kuuwz22w9OPRUOPBBeeKFr69ywYcsTtp7I3Fq2zMZlZW3nB1fUBg7s2nq+8AX7Dtq7915b\n98KF21/GYhEEesrL81uOnpYdrFRwSwqBMrdySMYgHGf8HlZh+3pGqYgUvuzjWgW3ZGe3dm3hHUdv\nSyJGOGznefvtB5/9rPXFJcVBwa0isfvg3fmg9gOaE/nrKMo562h70SKoroYvfckqPsDrr1szxpNP\ntnkd7djr62HYMLjkki3nd7cgC6d9WYLH/fvv2Pp/9jMbL1++Y+spBhs32rivZ25lB7eqq/NXDpGA\ngls5pCxz66CD7OF//pPf4oiIpFKZaQW3ZGc3cqT13VxIWoNbrmtRrspKePZZmD4dzj4bfvhDZdMX\nAwW3isTug3fH43m/+v18FwXnMllPkybZDv3BB60z+n/9C774RQtgHX20BYDmzLEd/bvv0npnqzvu\naLvO7ODWjjZxbL+ejoJb2Qci22PRorbvsz1WrYKnn+6+z9xTqtLN0/t6cCu7WWJXm62K9CQFt7YU\n9mUQbmH4cMekSbaPERHJJ2VuibTVE4kL3aPrJ10DBlizxDPPhKuuskQOZYsXNgW3isSkoZMAmL++\n8HrPdc4CWw8+CCtXWoBrxgzr2+qyyyziPWCAZX0dfLC9JggszZ1r/XtVZfXt110ZM8EdGNsfZARp\nstvaFLJ9m+vgQGZHNt5nnGF35nj55e1fR28IvsPS0vyWo6dlZ279/e/5K4dIQMGtLflEDMK2gT/y\nSHj+eV1NFZH8yr7rd6E1xxKRbQlptRWL2Q3X/vxn68d5//0t0NWcv8ZU0gkFt4rEpGGTcDjeXvd2\nvovSqXAYDj8cbrjBNgAffGAbg6DPqmDnX19vG4aDDrKg2Gc+k1lH0OF7tvXr7ZasQeZXIJHoOLgU\nBGS6K7h1wgltHwcBuh3pL2zuXBsHdyMsVNkZTX1Z9ufUXdikEChok0MyCiG7nenHP27b+GefzXOZ\nRGSnln2iu2RJ/sohIh3YxmaJ2ZyD006zvqdPPtmaKO6zD9x9t47TCo2CW0WiX7QfEwdP5O31hR3c\nyhYKwdixtjF4/HHb8d9+O/zgB7DXXrZhyOW3v4Wbb7ZAw/r11mTv0kvh+uvhy19uu+zZZ1tTuaBZ\n38qVFiyDTMfxmze3fc22BLfi8cx0+47pg/fckeCWczZuX8ZCEwR9sq9M9kVB5taYMbB6dX7LIgLK\n3Mop3eeWx/OpT1lm8B//mO9CicjOrPX4KLaZBQt2vOsLEekpnmRq+yJSw4ZZ0sYTT1jfzWeeCVOm\nWGZXT9wcTbadgltFZPLwyQWfudWZWAy++lW4+mqLfM+bB6+8AhddZM9//vN218Xbb4dvfAP69YPh\nwy1AduedtsycOTB7tl0VW7TINjBgfVd94xsWlPj85y2AFWRsrVzZthxBs8eubISyl2nfL1bweEea\nJQbBrULvnyEI+vT14FYQxJswwYJbhd4XWm+bPdvugCO9J7uprBifiLY2SywthVNOgQcegLq6PBdM\nRHZarZlbo1+msRH++9+8FkdE2vHetU7HU/FOlty6Y4+11jf33WfJHOecA6NHw8UXq0+ufFNwq4hM\nHjaZRRsX0ZQo/rMd5yyd86CD4NZbLYhw//0WvHrqKbsKf9FFttyAATB0KPzlL9bs8bjjLPiw556Z\n9Y0ZY9legUsvzQQmFi5s+95by9z6/vfhnnu2XGbEiLbLdaVZ4oYN8MtfdnwFr9gyt/p6+/IgkDBh\nggUtCz3o2JuSSat7Rx+d75LsuGIKWm7YkO8SFJ4guPXou48ClsHb0AB/+EN+yyUiO6/W46O9HwLg\nsYe3oB0AACAASURBVMfyVxYR2VL23RJbkjt+tT4UgpNOskD2P/8Jxx8Pv/41TJ4MH/kI3HQTrFix\nw28j20jBrSIyefhkkj7JOxveyXdReky/ftaHyhlnWNArlbII+Pr1dhfG556zjcX118P//I9lbk2b\nlnl9EPD67W+t6eNFF8GyZZbVdfXV8JvfZDK5HnkEHn7YNnbew2c/a527//jH1pQS2mZlBXcKXLMG\nvvWtTBvrzoJbX/qSda7f0a3qO7qjY2dmzty2zkq74+R4Z2mWGHzOD3/Yxs88k7+yFJrguyn2vsh+\n+EM7ICmW5n66a2cO6WaJH9R+AFh9nT4dfvrTnad/QJGumD4dPvrRfJei+7zzTu5+WQtB6/FR/+Xs\nuWfh3yhIZKfTmrnlaYx338GCc7at/ctf7Bzzuuvs/PF//9daH33kI/Daa932drIVkXwXQLru4NF2\nq8E5y+bwoZEfynNp8uOww2wAuOQSG3/xi5ZxU1pqG5g774S33rI7NTY32wHGbbdl+s9yzq70/+EP\ncOKJlg2WqzPAb387ExgaPNg6x3/1VXvf7FvP52qWuO++1gb79dft8dNPW7nvussCdZdcYuUImtF0\nNXPr/ffh1FPhU5+CRx/d+vIzZ9ryr7xiWXLba2cJbq2tqQUGMPqANxkxYip3321BVOk7QYNbb7Xx\nihUwblxei9IlCm7lkIxBOE4iZRFK5yxoedRR8POfw3e/m9/iiRSK7GOVvmDSJLs4UYgdOLdmboVb\nOPhg65Nn0ybrl0dkZ1Ko2fE+6FHeeTY1b2JExYjOX7Adhg6188dvfxvefddaJd13n82X3qHMrSIy\nYdAE9h66N48tUq5zNuess/egid/ZZ8ONN1ozwl13teBWS4sFt95805op3nGHnTT+/OfWhPGCCywb\n67LLLAg0bpw1J7zrLjuQuvJKC2IdfPCWB4s33mjR+T/8wYJYTzxh2S0zZ2ZOTG+/3QJZZ59td308\n9ljrMywIuP3iF207rw/87Gdtr/4FnZy/+ipUVeVOd125MtO8Luhc/+GHt+OLzbKzBLdeX/4uAH9e\neDunn27NCjZuzHOhCkRvB7cWLLAbSHR3hlVwgPH++9273u6UfWCoPs5ySMYg3ELSZ85wp0+3QPRV\nV1lWroj0LatW2bhQO2pvDW5Fmrn4YjtGu/LKvBZJJC+yz2cKKdCVSmYytzY193y/I3vtBd/7Hrzx\nhp2PSu9QcKvInLjXiTzz/jO8V/VevotSdCIR2G8/2H13ezxsmAWlfvITy+y64Qa45hrLclqyxE7m\n1661ANLFF1vTwiuvhI99zLK9BgywQNVRR1lTyS9/2Zo1Hn982/fdbz9ryrjLLpl5Tz3Vts8wsGaT\nQWf3YNOXXQaHHpqZFwSzqqvh05+2dNfsTpSbmqz/sbPOssdB+v6sWdv7rWXWCztBn1v1lswaK2vm\nrLNsB/2LX+S5UAWit4NbX/qSBYzfeqt71ztsmI0LObiVnZWgVPYc0sGtIHMr8JvfWNbsZz8LX/mK\n7ngq0pcsXpzvEnSu9eJfuJlp0+wGSjffDHffnddiifS67AvhhXTe0NIUgUgjhHonuCX5oeBWkbn4\nkIuJhqNc/MTFpHyK9fXreXzR4/kuVp81fDiMGmXT06ZZVsDTT1s2SU2NZVY9+6xdoXvvPZt+8EH4\n178so2vGDMv0uu02mDrVIvgzZ1pQCiyTa8UKGDQIfvQja/5YWWmBr8GDbZlUyrLLrrvOAmBgO4sX\nXrDpm26CH/zA3j+Y99e/2tWSBQvs8WuvwYsvbv/3EAQ2Xn21b59sN9RFIVpHOGxBydNPt+89aF66\nM+vt4FZwdb67D4wqK21cyIGP4LNHoxZUr6rKb3kKTrrPrcPGHtZm9pAhtp279FLLup040bbBb76Z\np3KKSLfZlr5G8yG7WSLYscP06XDmmXYhtFj6eRTZUdnBrULq0qK5MQJR60umqlEHVn2V+twqMqMq\nR3H9J65nxuMzOPuhs7n7LbskVP/devpF++W5dDuvQYNsmDAhM+/wwzPTF1xgQ+Dkky3jKjjR/u9/\nLTC2apUNq1dbPw1z51pH9jffnDlw2msva8cd+P73bXz99XYb2sDxx1tfXtdcY4G2U06B886zINvI\nkXYiWFlpQ0lJplnn009bX2OnnGKPW1ra7pwOPNDKXl6+Y99ZPG4n8SUlO7ae7tRUH4WSzNWcG2+E\nf/zDrsA++uiWd8zcmfT2AUrwf+zuuwUG9WjNmu5d7/bwHh5/HI45xjJLA0HmVnjis8Tf+SjPP2+Z\nmmJ9V5CMgUsy54M5nLP/OW2eLymBa6+1bd3//R/87nfWxHzSJDjhBLvj57RpmW2vSF/V17oRKJrg\nVsQmKips+/6tb9nx2YMPwoUXWpb/oEH5K6dIT8tullhdXTj/9+bGCMQsuPVetVpA9VXK3CpCFx58\nIT/66I9aA1sAx//5+E5eIYXGubYnV6NHWzOsSy+1ZnAzZ1qWlPcWoKqvt8ys11+3bKynn7bl7rkH\nPvMZa754xBF2UnzDDdY0Z/ZsyxA7+2y4915r7/2DH9jy06ZZVsPw4dZfWSxmO58BA6xp5Re/CPvv\nD3vvbc+//baVMeiU/sgjLWh2113WxPLtt62vr7q6rrWvv/NOW+8XvtAT3+72a0wHtxwWWRk+3A5K\nX33VPv8JJ1jZly3Lbznzoa8Et4KbNxRCcOupp+w/9eMft50fZG41jXyWWMwyQcW0bjPqR3DnG3d2\nuNzuu9v2adUquOUWq7833WR34x040G74cfbZln37979bM1VlVkhfkn2jmkLsgH1bZQe3CrGuZjdL\njCft7L6kxILrDz9sXUZ8+9u2LTr/fGWUSmGpqrL/Z9ANyY7IDqwXwrFWoLkxDFG7xf2ymp3wQH4n\nocytIuSc43tHfo+xA8Zy1kPWudKcZXOoaaphYOnAPJdOekI43DYr7GMfswHsbojtfetbdhXROQtc\njRhhJ8hr11pgZu1a6yi9rs4OgIOxc9Yn0caNlk3mnAXCVq2yJpVHHw0PPACXXw5XXNFxWfv3t0BZ\n9jBwYGb6ttvsYPuRRyyT7IgjYLfdLKNs2DALKg0bZp1/l5V13/eYSFj5gsBJtldfhVeenAi7vIzL\nWuC00yzQ98c/wp/+lOm/bPx4+PrXrT+2cLj7yliosoNb9fU7nrm3NcFPsL13C3zqKbsz6WmntZ1f\nSMGtoLP4d95pO7/1RDRWz7Rp8OSTlo2U63+706rtWu+sgwdb08QZM+y3//e/7SYdL71k/5G77sos\nGw7bCehuu9nFgN12s2HsWNsmBdulQso2FenIpqwuZerri/+ufdnBrdpayz4vJNmZW42JRqLhaOtz\nn/mMDW++acGuP/0JfvtbO/Y59VTrW3XKlLYZvCK96Wc/s4vjEyfC1762Y+vKztwqpC4ggmaJw/oN\nY976efkujvQQbUaL2JlTz2T6btMZd9M4AEbfOJoLDryAUyefykG7HNTmBF12PrlOwEaM2PGmdZ//\nvA319bbTCobqajvgzDUsW2Ydg9fW2gF3LGYHeHPnWtbXn//ccZODkhILjAVNP4MMs6BJZUVF59PB\neOlSC9RdcYVlyK1ZYwGqX/8aPvjAbiwAQKy+NXMrMGkS/PSnlmEzbx7885/w0EMWRLz8cjsY2HNP\n2GOPzHjUKDuxHjjQ+k4qdtnBrfPOs2BfTx6IN9jFNR591DITt8XmzdbUD+Ckk+z/BpZVGNz98s03\nLahbUdE95d0R7bMqWu8G5pKccYY1i334YTjxxF4vWkFpkxUa29zhch2prLTm2tk3/aipsWzY+fPt\nRiLLltkwZ45lo+bKeBkwwIJcw4ZltkcDB2455JqvwJj0luys17q6vhXcWriw7c12CkF2n1uN8Ub6\nl2z5hU+daje+uPZau2HKr34FF11kz5WXW1+fU6bA5Mk2njKl8IJ40jcFx3PBXUk7U1VlTf6/9a3c\nx4GFmrnV0mTNEg8efTDPLX2OhniDuvTpgxTcKnK7DdyN1P9LMXPeTO55+x5uefkWbnzxRiYMmsDJ\n+5zMJ/f4JIeOObTNFSSR7lBebk1/grtPdpX3dvLePttp0ybLZFm/3rJ11q+3g/OaGgucVVfb9Nq1\n1udYkG0WBEG66qqr4Oc/t0DbgQdagK2NRAmr6nLv3UOhzAHnjBnW/8/cuXagvWgRPPFE7g7QKyst\n0DVggAVTOhrKy6FfPzsBLi3t2jgYYjErX0+45pq2mXr33AOHHGLNV4cP7/73896CkaEQPP+8dRL+\n4Q93vHxdnV0Vv/RS61Pp3//OPPfOO3bCAHbDhXXrrDnanXda5s7nPtf95e+q4GSt/a3tWwMqLsWX\nv2x97p19tn2+T33Kmh3vDNmCgW9/2363666zx4P3mkfVpy7olkzlgQPtJDnXiXIiYQf6y5e33S6t\nW9d2euFC+y1rarbeXCoI1ucKhFVUWL0uK9tyHExHo9s37Ez/l52B93YXvk9+0jKccwkyQ8H+m9l3\nbM6XF1+0wM32XFTYuNH+x8mkZV72RnArmbTv9+qrLVO7M9nNEhsTnbfjHzTIAgPf/Kbt6/7zH/tu\n3noL7r/fAmCBUaPsO9trL8siHTMmM4werYC5tHXTTdba4MgjM/OeeMKOoQZ2sssMLgJ2pSuIq6+2\n9xk/3voQbi87uLVoUdfK3RuCzK1T9z2VWYtm8bXHvsZvPvUbSiKqRH1Jjwa3nHPHATcBYeB33vtr\n2j1fAvwROBDYCJzivV+afu4K4FwgCVzsvZ/dk2UtZs45Tp18KqdOPpXqxmoeeuch7nn7Hm74zw1c\n+/y1DCgZwCcmfoLpu01ndOVoRlWOYmTFSCpiFZRHyymNlCrLqxMz357JvsP3ZfLwyfkuyjZzP3Sc\nf8D5/PrTv96h9azYtIKxPx/Lnz73J07f7/QdK5PLfaLVv78Ne+yxbetLJi2LLLt5ZTBkP04mLavq\nmWfsZHXhQrsievTR1iQxlYKBo9ey4vBvMWvRS136HF/4Qtt+w5JJu/vlokV20ltVZUG5qiobamut\nrJs22UlzXV1m6I5+DqLRtgGv9sGvWMyWCcbtp5uabBwErMJheO45O5EILFpknZv/7//CJZdYgKui\nwk6cysvtJDz7PdtPB+WIRGwIhzPTweP16y1T7Cc/gVtvtZOYH/zADs5GjbLsw379MgG+3/3Obsiw\nZo2dBDzwQKa8995r6/3gg8zdRH/6U+vX7sILLWtn5EgLLgTNaSsrM+XM9Z11NYi4fLmV+4orMu8B\n9r29/DJ89KP2eMkSK3v//va5WoNzzhOJWPPd886zpsHf+56VZeJEqyujR9t3MnJkZhg61A5i+/cv\n7KDGU09ZM4j77+/4ZLe52ZYB+w0Bqg7+JvSrpqbJmuQfuduRuV+8gyIRa564a9daQOK9BduDQFdt\nbWa6/RA8V11tv39NjW0bGhu3DHZ2B+e2PzDWlSG7fgTBtFCo7bzsep5raL9taL9d6GybEYl0f7Nd\n763Z6oc/bP1Pbou6Ots2bU/W7ne+YxdDrrjCmvc0NGS2HYEXXoCzzrKg/sMP515PdsbEs8/CPvts\ne1m6oqbGynLNNZbl3N6zz1pXBDfdZNvyvfbasil2Vyxdaq9ft84yrr/ylZ7PvA0C1xdfvPXg1tKl\nQLgZSjaxqXlT5wunOWcBgvHjM03ovbds+Lfftu4hguHuu2270V7//pms9sGDbVxZafvjXBfQgumg\nv9VgyN4/Zw86PdhSKmX1a2sB4yuusP/Pbbflft57298fdljb44p16zq+cPj1r9sF3ief3PK5eNyO\nzYJ1gx1PHX+8tVx4qZND2yCr/b0u9LMerPvtt3MHt7KbJQbHXd2ls++mM6tXw5L5Q2FChMN2PYyr\npl/FVc9dxZxlc7jwoAs5ZuIxTBkxhZBTd+TFzvmu9P68PSt2LgwsBD4BrABeAb7ovZ+ftcyFwH7e\n+wucc6cCn/Pen+Kc2we4B5gG7AI8Dezpve+0S8zy8nJfX1/fI5+nGNU21fL0+0/z+OLHeXzx46za\n3EE2igvRL9qP8mg55bHyNuPSSCmxcKzNsLZ+LX9f+HcATptyGoeOOZTSSGnrsiEXIuzCNg6FqWqs\n4uyHzuaBLzzAmP5jWLV5Ff1L+jOyYiThUJiwCxMOhTnl/lPw3vPgKQ+2aRYWCUV4d+O7fOnBL3Hv\nSfcyefhkwi6Mx5PyKbxPj9s9XlKzhJP+ehIvnPsCY/uPbX3+m098k1tfuZX3L36f8YPGA7S+JhwK\nc/NLNzN9t+lMHTkV7z2hq21DV3NZDQNKM0eYFz52Ibe9ehvJ/5fscGPYnGjmzbVvMm30tNZ5KZ9q\nXX7uqrlc8NgFvPyVl7stwJhMJQmHwm3K7q/csXr+8DsPc+LME9lvxH68ecGO94KaSCVYXru89fsv\nFL965VdcNMvaCCy4aAF7D93GM5odkEjYCVFjo53QNzV1bdx+aGnJPb+52Q44WlpsnGu6sdFOsEMh\nO3hparKTuiFD4LDDk/z9vb9x361TKK3fi5NOsqvMw4bZCXpPdDg/66k6olRw7rmZwEYuzm15I4Pp\n0y3Ikx3oAhg5uonrZz1A8o3T+dnPrJnptopE2p7Qx2Jbnnw3Nm5544GpUy0w1/6gNBTKBDQ+8QkL\n+gBw5NU0PXNZ61XFFSssODt/vp10LV5sB2zBQWku2f3dBdk/QeZfMJ0r8NBZMCIIXgQBjI4G57ac\n5719L+PGWWC0ocFOEG680b6zW26xE/ajjrLyvfJK25PK446DJw6KQSTOwNKBhFyIGQfP4OjxRzNl\nxBQGlQ4q+os18bjVvcbGzDiYDuprIQzZ2494PP+dlodCuYNfYN/dwIH2nwr6o4zHLTBQWrrl65yz\nk9cgCHPEEW2DdNl1IBjKymz7WVFhd8YbNMgyLZ95xi4KVFbaBZapU23blCswCNYMGWzZhQtt+qij\nLBtj6FAbHnvMgt7OWRPx4cMz5QDbntx6K/ztbzBqVIqqqhAXXGBZn+XlmfqfPQQXIoLvInvINS8U\nst985kw44wwLzC9davuL7BP1gQMtKPO5z1l5AP7yF+trKvivNzTY9mzKlC0DeWDbjV13hQF7v8aX\nPj2WK74xjAMOsADXlCm2XR0+3MrQnd0A3HknnHNO5j/UWZbURz4C/1nxPJxrt8quu6KO8lj3dk65\nebM1l16+3PYHK1bY9j/7Qlp1deaiWX39jt81M/iP5gqCZf83ujre3teEQm3HHU131zzn7OLebbfZ\n3c+/8x37Ll9/3bpL+Otfbb915JFW91eutO/khRfsvzBvXqbJ6W9+Y9uD6mo7Nlm82OrKwIFw+ukW\nkLruOvu8991nF01POsnWP2aMXQyNxSzAecklts6f/MS6xQh2d6mUBUE/9CF7vOeeFlAuKbGLuGDd\nMQSZ7O2dfrrVy1DI6mNH/3XvbVsUrOuNNzJl8N6mX3jBAnZTD6rnrbnlvPiiBdd21A03WCb35Zdb\nNyHtLzTW1lrm2cSJbedXVdk2fP58ILaZhavWsMeQPXjqvae46rmreGG5ReAqYhWMHziecQPHMbx8\nOANKBtC/pD8DStPj9OP+Jf0pi5YRDUWJhqNEQ1EioUjrdPa4LwbLnHMN3vse7nl3+/VkcOtQ4Crv\n/bHpx1cAeO9/mrXM7PQy/3HORYA1wDDg8uxls5fr7D0V3OqY95619WtZvXk1qzavYm39Wupb6qlr\nqaM+Xk99S72Ns6db6mlJtmwxrNy8Mt8fp9uURkpJppLEU3aZIRqKtk5Xxiqpj9eT8pnL6BWxCpKp\nJEmfpCWZOWIYUT6ClE+R9EkSqUTrMk2Jptb1jqocRX1LPRsb7Ux03MBxLK1Z2rqOCYMmEHZhnHN4\n72lONlPbVMvIipFEQhESqQTNyebW1+w3Yj/iyTjr6texsXEjk4ZOIumTLNy4kIpYBRMGTeCttW8B\nsPvg3amIVbQGBYN6H0x77HEylWTe+nlMGT6lNVAZciFeWpm53LPvsH3btFHvbH25pr33rR057jlk\nT0aUj8A5t0U/V9sjeK/tNWfZnDaPS8IlREKRLYZoOJp7fqjt/GLfqQUHKgCvrnq19b/7sfEfa10m\nCCT4ZJhkcyk+XkYqEcHHYzZOxrIeR206EcOnQpAK41uHECQj+FQInwozr+XvMOlhDht7GIPKBtFS\nPZTGDcNprBlIc+0AkvEoyZYoqXiMZDzKkH1fp3rhJCL96mhuKGHgQU8weFgLG148hkRjBZHSJuL1\n5cyP3QkTn+bI3Y6kf0l/ko3lxOsriDf0I9FYTqKhH4mm9GdIRPDJiJU7GSaViNiQDJOKh0klwyQT\nEVLxCD7l8KkQyWQYnwwTijaTbCmhYc1oBu/3EommMhrXjCHRWE5s0Hr6jV5M44bhbJzwK44+cDeW\nPvkpVr96CLGKzSRbYtSl1sGXjoeKdRy525GUhEta60n7sU9GaakdRHPtIJqrBxOvryReX068oTwz\nbign1RIlGY+RikdJxaMk4/b9pZL2/fuk/RapZBiftN/BJ/Pfe0GkrJ59vvhH6teO4L0DT4KQ1fP5\nF87nkicv4fHFj7cuGw1F6RftR0mkhFg4Rkm4hHAo3LotCwaH22JeyIVwzrU+H2gfLOvouR19TVfX\ntz2v2ZqtHQt2ZduaSvl0fQmDD+FTofR/KYJPhkll/cd8Mv1cyuqLvS4YwngfLBvKLJMKtRu3m9f6\n3237HEAoEifRWE6ypQScx4UT+ITV7ez3IBUilQpDyuHCScKljUTK6mmqGmafIxXGJ8KkkhFIhfDe\ngbfPkmwuIxROkEpEiG8eZL9BOE7p0LXE+leTbCrDe0fjmrGkErFOv8vYwA201GTaG0b7V5FqKSXZ\nlNn3DtjrDRrX7UJLdSdpDAOWwTlHMOSpR6leuDepeO80v3HhBC6UIpWIgM/sB8Nl9VSOWknN+3uC\nS7V5LlDSv5ZwrAUXSoHz6W1zGalEFD76fZj+Yw7YcB3vzDyDhvVbdiLqwgnCsRbCsRZCESuHC6Vw\nzts6g8ehFC6UxDkPeHDYfyP9b3fOU7Uwk4pWOng9pYOrCIUTuHASF0rgwraeRGMZVe9MgaOuhKOu\nbn1NZaySilgFpZFSyqJllIRLcm5vOtouZW/rt/icnWwHspdJJSIkm0tINJWRbC4l2VzW+n2mEhFS\n8Sg+mE7afi8Vj5JKRlv3g6kc85LxTN0hlanvQR1qWzdDOepvCO9DWfufUKYupkIFse/pbS7Sgm+/\nbXBJ8LlTsF3ItlG4FD4RJdm89TswlQ6qIlLaCC69TU///+vXjsAn0tsHl2LA2OVWX1qXMS315TSs\ny9S7fiNWEauspWH9CJJNZZQOW0OyuYSmDcPhzI8Tvf/vxOsriFVsJtKvgVAkka6X6brnrJ4H9R08\nLmTzXfo5nCdRX07tkj0z7ztyBWXD1uDCyfT+JMLmpbuTaKhk4N5vEC3fTLDn2jA3K7v79OP4743X\nt2mNs7x2Oc8ufZa5q+ayrHYZS2qWsKFhA5uaN1HXUrfV77QzDkckFOG8A87jlyf8cofWVSgKPbjV\nk1uO0cDyrMcrgEM6WsZ7n3DO1QJD0vNfbPfa0bnexDl3PnA+QCzW+cHCzsw5x8iKkYysGMn+o/bv\ntvU2JZrY3LyZpkQTTYkm4qk4yVSyNdCT8imSqSQPv/sw03eb3hocC/pKCYJAKZ/iuWXP8Y8l/+B7\nR3yvzU67JdnC4qrF/OKlX3D87sfzsfEfI+mTbQ4Esk9Igsdzls1h5ryZDC4bzNenfb31YOHNtW9y\n//z7mTpiKp+Y8InWYIXDUd1UzS0v38LUEVP56LiPUh4rZ/Xm1dzxxh2tG6cg0+za568FrGP/0nBp\naxZaJBRpnV5Ss4T75t/HyfueTDQUpSxSxu1zb+eocUcxtv9YyqPlzFs/z5qM9h/dmkEWciFKIiUs\nWL+AUZWjWt+zJFxCRayCftF+jKoYRSwcY0PDBpxzDCkbQkuyhYUbF3LUuKPw3rcGtyYPn0zKp0ik\nEm1OioP/RjDPe8+6+nXsNnC31rIkfZJDRh/SGuDadcCurSc67dfRfn3BMu2nR1aM5Jklz7DrgF1J\npBJtgmM7EuTq6CCwq/Ydti/z1s9jwUULeGzhY2xo2EAilSCeipNIJdoMOeclM/MaE41bPWEsJhMH\nT2TjSgtuBUHb7MAlgI/UQoQ2v2f28w4ItX9NB49Z8wZgWX4rN60kFVoOw4HhEHaOSLv/sXeOQROe\nxjnHpuZNvFv7AZNSk/AH/ZqQ96SC9173NmDbnhWbVth7xWzwA9r+Xpn/ubWtD6fLmX3S0dF09jap\n/XSwbDwV559L/8nS8ATKP/MCEz/jW5drTjbzzoZ1nLzPyaytX8vmls1tAsRbjKNL8UM8fojVoaC8\npdv9i6e/A08mCJkMQyqSmfZ2cu8I41N2kh/Mw4fanPy3zvMhwmWbSTZU4pNRSse8i3PQtGY3fCJG\nbOgKUo2VtGwcRSoZJRRtITZ0OaGhq6kEWGO/yZXTr2TSsEnMOn0W6+vXM3f1XBasX8C6+nU0Jhpp\nTjTTnLQhmUq2Zu62H4LtXPuh/X8g8334Ns+1/m/bze/sNbnmd/Zcd7ymK9vFrW17t3kdDjvKzDrS\n7GwdofSwo+Xoyj4k3+vwHjt5T0VbA3s+GQHvCEVbiFbWkmwqIxRO4iLxTHZGPEa8rj/xuv6UDV+J\nCydpWDWOVEtZawDByuZJJaK8xV1QsZ5dv34OuySgqXowJErx8RJ8oqR1nIrHIFFqYx+GVDhd70Pg\nw+kARnrw6YCGD+FCSUKRBGVjFhGt2ETt/GmQjFogKhXBhZJEK2sZduhsqt84nPI9XiPZbwVl/zqZ\nRGM5LtKMizbhok2AJ1E7ksTmQXaSnQ50REsacJE4qZIqag+8xb6HyX9m4j53E984mqa1Y0lsGkqi\nbiCpljJS8RJSLSX4llJ8MooPtkUp1/pZvM8ERFsDbD74vULg7XHlPv9m4LTHiVbWsGHOSSTj8crh\nbgAAC/hJREFUpSSSYWhJbwdTJfhkGBdKUj7xTer3v4OfffxnHLTLQby44kXW1K2hId5AU7Kp9Ti5\n/Xan/bYpkUq03UZ1sj1pnZcj+LzFciXgSzpeLqh/XV5fjuU6WqajutBRHWrdr6dc62/kUyGcj9i+\nJGue/UdDbebZ72y/t+13wq3LZS/fZl7wmMx0pKKWSEU1ifoBxKtG4cIJIv020W/XhUQqaqhfOonE\n5iEkm8pJxWOEwnHKRi+hcc2uJOsGERu0jpJhK2hYvgfRyhrCZXXUL90XvCNcvolUUz/KRr9HS9VI\nmtePJhWPES5pZMA+r9BSM5SmtbuSaikjvnkg4bJ6EvX9GTH9IRJ1A2heN4bG9aMhXed9KkwqHmPg\npNct0NhSSuPqsbhwkkhlFZX7z2bj64fR8MFe+GQJ+OD4KWT/9QnPsung71K+7mPElh1HvGZk8AO2\n/ireO0LljkhTiBHH/w58mLoFh5JKxCgbU0OqpYxo+SbwYZqmfxPGP8cePzyWjf85gZb140k19cMn\nY5kLnN7e244R2j4m2W6+s+3EmBNvxydi1C2eSlPdQEiGIZwkFE5QMnwFoU2DSMbDtKzZpbXc/XZZ\nQuPaMQy5+ATiw1/eopuZsQPGcubUMzlz6plb/BeTqSSbWzZT21RLbXMtm5o3UdtU23rOG0/GW88H\ngun242QqycGjD875X++LttYtVY+/fw9mbp0EHOe9/0r68RnAId77GVnLvJ1eZkX68XtYAOwq4EXv\n/Z/S838PPO69v7+z91TmloiIiIiIiIhI9+osc6sr3VL1tJ5sM7MSGJv1eEx6Xs5l0s0SB2Ady3fl\ntSIiIiIiIiIikl/TgMXe+/e99y3AvcBne7MAPRncegXYwzk33jkXA04FHmm3zCPAWenpk4B/eEsl\newQ41TlX4pwbD+wBvNyDZRURERERERERkdwizrlXs4bzs57L1S1Vzq6leqxwPbXidB9aM4DZWJvL\nO7z385xzVwOveu8fAX4P3O2cWwxUYQEw0sv9FZgPJICLtnanRBERERERERER6REJ7/1B+S5ER3qs\nz618UJ9bIiIiIiIiIiLdayt9bh0KXOW9Pzb9+AoA7/1Pe6t8xX2fehERERERERERyaeudEvVo3qs\nWaKIiIiIiIiIiPRtHXVL1ZtlULNEERERERERERHpUGfNEguBmiWKiIiIiIiIiEjRUnBLRERERERE\nRESKloJbIiIiIiIiIiJStBTcEhERERERERGRoqXgloiIiIiIiIiIFC0Ft0REREREREREpGgpuCUi\nIiIiIiIiIkVLwS0RERERERERESlaCm6JiIiIiIiIiEjRUnBLRERERERERESKloJbIiIiIiIiIiJS\ntBTcEhERERERERGRoqXgloiIiIiIiIiIFC0Ft0REREREREREpGgpuCUiIiIiIiIiIkVLwS0RERER\nERERESlaznuf7zJ0G+dcCmjMdzm6QQRI5LsQIkVAdUWka1RXRLpGdUWka1RXRLqmL9WVMu99wSZI\n9angVl/hnHvVe39QvsshUuhUV0S6RnVFpGtUV0S6RnVFpGtUV3pPwUbdREREREREREREtkbBLRER\nERERERERKVoKbhWm3+S7ACJFQnVFpGtUV0S6RnVFpGtUV0S6RnWll6jPLRERERERERERKVrK3BIR\nERERERERkaKl4FaBcc4d55x71zm32Dl3eb7LI9LbnHNLnXP/dc694Zx7NT1vsHPuKefcovR4UHq+\nc87dnK4vbznnDshaz1np5Rc5587K1+cR6U7OuTucc+ucc29nzeu2+uGcOzBd/xanX+t69xOK7LgO\n6slVzrmV6X3LG865T2Y9d0X6P/+uc+7YrPk5j8mcc+Odcy+l5890zsV679OJdB/n3Fjn3LPOufnO\nuXnOuW+k52u/IpKlk7qifUsBUXCrgDjnwsAvgeOBfYAvOuf2yW+pRPLio977D2XdNvdy4Bnv/R7A\nM+nHYHVlj/RwPnAb2EEZcCVwCDANuDI4MBMpcncCx7Wb15314zbgvKzXtX8vkWJwJ7n/uz9P71s+\n5L2fBZA+zjoV2Df9ml8558JbOSa7Nr2u3YFq4Nwe/TQiPScBXOK93wf4MHBR+n+u/YpIWx3VFdC+\npWAouFVYpgGLvffve+9bgHuBz+a5TCKF4LPAXenpu4ATs+b/0ZsXgYHOuVHAscBT3vsq73018BQ6\nmJI+wHs/B6hqN7tb6kf6uf7e+xe9dcj5x6x1iRSNDupJRz4L3Ou9b/beLwEWY8djOY/J0lknRwP3\np1+fXedEior3frX3/rX09GZgATAa7VdE2uikrnRE+5Y8UHCrsIwGlmc9XkHnlUakL/LAk865uc65\n89PzRnjvV6en1wAj0tMd1RnVJdmZdFf9GJ2ebj9fpK+YkW5KdUdWVsm21pMhQI33PtFuvkhRc86N\nA/YHXkL7FZEOtasroH1LwVBwS0QKzeHe+wOwdN2LnHNHZj+ZvvKn27yK5KD6IdKh24CJwIeA1cAN\n+S2OSOFwzlUADwD/673flP2c9isiGTnqivYtBUTBrcKyEhib9XhMep7ITsN7vzI9Xgf8DUvfXZtO\nbSc9XpdevKM6o7okO5Puqh8r09Pt54sUPe/9Wu990nufAn6L7Vtg2+vJRqwpVqTdfJGi5JyLYifr\nf/beP5ierf2KSDu56or2LYVFwa3C8gqwR/pOCTGsE7pH8lwmkV7jnCt3zlUG08AxwNtYPQjuvHMW\n8HB6+hHgzPTdez4M1KbT6GcDxzjnBqXTg49JzxPpi7qlfqSf2+Sc+3C674czs9YlUtSCE/W0z2H7\nFrB6cqpzrsQ5Nx7r8PplOjgmS2exPAuclH59dp0TKSrpbf3vgQXe+xuzntJ+RSRLR3VF+5bCEtn6\nItJbvPcJ59wMbAcRBu7w3s/Lc7FEetMI4G/pu0RHgL94759wzr0C/NU5dy6wDPhCevlZwCexThob\ngHMAvPdVzrn/w3YgAFd777vaubBIwXLO3QMcBQx1zq3A7k51Dd1XPy7E7jRXBjyeHkSKSgf15Cjn\n3Iew5lVLga8CeO/nOef+CszH7oZ1kfc+mV5PR8dklwH3Oud+BLyOnfCIFKPDgDOA/zrn3kjP+y7a\nr4i011Fd+aL2LYXDWZBQRERERERERESk+KhZooiIiIiIiIiIFC0Ft0REREREREREpGgpuCUiIiIi\nIiIiIkVLwS0RERERERERESlaCm6JiIiIiIiIiEjRUnBLREREZCucc3Xp8Tjn3GndvO7vtnv8Qneu\nX0RERKSvU3BLREREpOvGAdsU3HLORbaySJvglvf+I9tYJhEREZGdmoJbIiIiIl13DXCEc+4N59w3\nnXNh59x1zrlXnHNvOee+CuCcO8o59y/n3CPA/PS8h5xzc51z85xz56fnXQOUpdf35/S8IEvMpdf9\ntnPuv865U7LW/U/n3P3OuXecc392zrk8fBciIiIiBWFrVxJFREREJONy4Nve+08BpINUtd77g51z\nJcDzzrkn08seAEz23i9JP/6y977KOVcGvOKce8B7f7lzbob3/kM53ut/gA8BU4Gh6dfMST+3P7Av\nsAp4HjgM+Hf3f1wRERGRwqfMLREREZHtdwxwpnPuDeAlYAiwR/q5l7MCWwAXO+feBF4ExmYt15HD\ngXu890nv/VrgOeDgrHWv8N6ngDew5pIiIiIiOyVlbomIiIhsPwd83Xs/u81M544C6ts9/jhwqPe+\nwTn3T6B0B963OWs6iY7pREREZCemzC0RERGRrtsMVGY9ng18zTkXBXDO7emcK8/xugFAdTqwtTfw\n4azn4sHr2/kXcEq6X69hwJHAy93yKURERET6EF3lExEREem6t4BkunnhncBNWJPA19Kduq8HTszx\nuieAC5xzC4B3saaJgd8AbznnXvPen541/2/AocCbgAcu9d6vSQfHRERERCTNee/zXQYRERERERER\nEZHtomaJIiIiIiIiIiJStBTcEhERERERERGRoqXgloiIiIiIiIiIFC0Ft0REREREREREpGgpuCUi\nIiIiIiIiIkVLwS0RERERERERESlaCm6JiIiIiIiIiEjRUnBLRERERERERESK1v8HxsTvmv7jKJgA\nAAAASUVORK5CYII=\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x7f8c1c31e4d0>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "fig, ax1 = plt.subplots(figsize=(20,10))\n",
    "\n",
    "ax2 = ax1.twinx()\n",
    "ax1.plot(np.arange(len(list_training_loss)), list_training_loss, 'g-')\n",
    "ax2.plot(np.arange(len(list_test_pred_error))*num_iter_test, list_test_pred_error, 'b-')\n",
    "\n",
    "ax1.set_xlabel('Iteration')\n",
    "ax1.set_ylabel('Training loss', color='g')\n",
    "ax2.set_ylabel('Test loss', color='b')\n",
    "\n",
    "#best_iter = len(list_training_loss)\n",
    "best_iter = (np.where(np.asarray(list_training_loss)==np.min(list_training_loss))[0][0]//num_iter_test)*num_iter_test\n",
    "best_pred_error = list_test_pred_error[best_iter//num_iter_test]\n",
    "print 'Best predictions at iter: %d (error: %f)' % (best_iter, best_pred_error)\n",
    "RMSE = np.sqrt(np.square(best_pred_error)/np.sum(Otest))\n",
    "print 'RMSE: %f' % RMSE\n",
    "\"\"\"pp = PdfPages('../../pdf/losses/corrected_gamma_best_iter/supervised_approach_original_training_set_40k_iter.pdf')\n",
    "plt.savefig(pp, format='pdf')\n",
    "pp.close()\"\"\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "collapsed": false,
    "deletable": true,
    "editable": true
   },
   "outputs": [],
   "source": [
    "with learning_obj.graph.as_default():\n",
    "\n",
    "    oSaver = tf.train.Saver()\n",
    "\n",
    "    oSess = learning_obj.session\n",
    "    oSaver.save(oSess, './dumps/backup_supervised_approach_synthetic_netflix/backup_gamma=1e-8.dump')  #filename ends with .ckpt"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": false,
    "deletable": true,
    "editable": true,
    "scrolled": false
   },
   "outputs": [],
   "source": [
    "#best X generated\n",
    "plt.figure(figsize=(20,10))\n",
    "plt.imshow(list_X[best_iter//num_iter_test])\n",
    "cbar = plt.colorbar()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "collapsed": false,
    "deletable": true,
    "editable": true
   },
   "outputs": [],
   "source": [
    "joblib.dump([list_training_loss, list_test_pred_error, num_iter_test], './dumps/results_supervised_approach_synthetic_netflix/results_gamma=1e-8.dump')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": true,
    "deletable": true,
    "editable": true
   },
   "source": [
    "# Profiling"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "collapsed": false,
    "deletable": true,
    "editable": true
   },
   "outputs": [],
   "source": [
    "with open(path_log,'a+') as f:\n",
    "    for k in range(10):\n",
    "\n",
    "        run_metadata = tf.RunMetadata()\n",
    "        _ = learning_obj.session.run(\n",
    "            [learning_obj.optimizer],\n",
    "            options=tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE),\n",
    "            run_metadata=run_metadata)\n",
    "\n",
    "        num_iter += 1\n",
    "        \n",
    "        from tensorflow.python.client import timeline\n",
    "        trace = timeline.Timeline(step_stats=run_metadata.step_stats)\n",
    "        trace_file = open('profiling/timeline_supevised.ctf.json', 'w')\n",
    "        trace_file.write(trace.generate_chrome_trace_format())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true,
    "deletable": true,
    "editable": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 2",
   "language": "python",
   "name": "python2"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 0
}
