{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": true,
    "pycharm": {
     "is_executing": false
    }
   },
   "outputs": [],
   "source": [
    "import gym\n",
    "import tensorflow as tf\n",
    "from tensorflow.keras import layers\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "outputs": [
    {
     "name": "stdout",
     "text": [
      "Size of State Space -> 3\n",
      "Size of Action Space -> 1\n",
      "Max Value of Action ->  2.0\n",
      "Min Value of Action ->  -2.0\n"
     ],
     "output_type": "stream"
    }
   ],
   "source": [
    "problem = 'Pendulum-v0'\n",
    "env = gym.make(problem)\n",
    "\n",
    "num_states = env.observation_space.shape[0]\n",
    "print('Size of State Space -> {}'.format(num_states))\n",
    "num_actions = env.action_space.shape[0]\n",
    "print(\"Size of Action Space -> {}\".format(num_actions))\n",
    "\n",
    "upper_bound = env.action_space.high[0]\n",
    "lower_bound = env.action_space.low[0]\n",
    "\n",
    "print(\"Max Value of Action ->  {}\".format(upper_bound))\n",
    "print(\"Min Value of Action ->  {}\".format(lower_bound))"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n",
     "is_executing": false
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "outputs": [],
   "source": [
    "class OUActionNoise:\n",
    "    def __init__(self, mean, std_deviation, theta=0.15, dt=1e-2, x_initial=None):\n",
    "        self.theta = theta\n",
    "        self.mean = mean\n",
    "        self.std_dev = std_deviation\n",
    "        self.dt = dt\n",
    "        self.x_initial = x_initial\n",
    "        self.reset()\n",
    "\n",
    "    def __call__(self):\n",
    "        # Formula taken from https://www.wikipedia.org/wiki/Ornstein-Uhlenbeck_process.\n",
    "        x = (\n",
    "            self.x_prev\n",
    "            + self.theta * (self.mean - self.x_prev) * self.dt\n",
    "            + self.std_dev * np.sqrt(self.dt) * np.random.normal(size=self.mean.shape)\n",
    "        )\n",
    "        # Store x into x_prev\n",
    "        # Makes next noise dependent on current one\n",
    "        self.x_prev = x\n",
    "        return x\n",
    "\n",
    "    def reset(self):\n",
    "        if self.x_initial is not None:\n",
    "            self.x_prev = self.x_initial\n",
    "        else:\n",
    "            self.x_prev = np.zeros_like(self.mean)"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n",
     "is_executing": false
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "outputs": [],
   "source": [
    "def get_actor():\n",
    "    last_init = tf.random_uniform_initializer(minval=-0.003,maxval=0.003)\n",
    "    inputs = layers.Input(shape=(num_states,))\n",
    "    out = layers.Dense(512,activation='relu')(inputs)\n",
    "    out = layers.BatchNormalization()(out)\n",
    "    out = layers.Dense(512,activation='relu')(out)\n",
    "    out = layers.BatchNormalization()(out)\n",
    "    outputs = layers.Dense(1,activation='tanh',kernel_initializer=last_init)(out)\n",
    "    \n",
    "    outputs = outputs * upper_bound\n",
    "    model = tf.keras.Model(inputs,outputs)\n",
    "    return model\n",
    "\n",
    "def get_critic():\n",
    "    state_input = layers.Input(shape=(num_states,))\n",
    "    state_out = layers.Dense(16,activation='relu')(state_input)\n",
    "    state_out = layers.BatchNormalization()(state_out)\n",
    "    state_out = layers.Dense(32,activation='relu')(state_out)\n",
    "    state_out = layers.BatchNormalization()(state_out)\n",
    "    \n",
    "    action_input = layers.Input(shape=(num_actions,))\n",
    "    action_out = layers.Dense(32,activation='relu')(action_input)\n",
    "    action_out = layers.BatchNormalization()(action_out)\n",
    "    \n",
    "    concat = layers.Concatenate()([state_out,action_out])\n",
    "    \n",
    "    out = layers.Dense(512,activation='relu')(concat)\n",
    "    out = layers.BatchNormalization()(out)\n",
    "    out = layers.Dense(512,activation='relu')(out)\n",
    "    out = layers.BatchNormalization()(out)\n",
    "    outputs = layers.Dense(1)(out)\n",
    "    \n",
    "    model = tf.keras.Model([state_input,action_input],outputs)\n",
    "    return model\n",
    "\n",
    "\n",
    "    \n",
    "    \n",
    "    \n",
    "    "
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n",
     "is_executing": false
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "outputs": [],
   "source": [
    "std_dev = 0.2\n",
    "ou_noise = OUActionNoise(mean=np.zeros(1),std_deviation=float(std_dev) * np.ones(1))\n",
    "\n",
    "actor_model = get_actor()\n",
    "critic_model = get_critic()\n",
    "\n",
    "target_actor = get_actor()\n",
    "target_critic = get_critic()\n",
    "\n",
    "target_actor.set_weights(actor_model.get_weights())\n",
    "target_critic.set_weights(critic_model.get_weights())\n",
    "\n",
    "critic_lr = 0.002\n",
    "actor_lr = 0.001\n",
    "\n",
    "critic_optimizer = tf.keras.optimizers.Adam(critic_lr)\n",
    "actor_optimizer = tf.keras.optimizers.Adam(actor_lr)\n",
    "\n",
    "total_episodes = 100\n",
    "gamma = 0.99\n",
    "tau = 0.005\n",
    "\n",
    "\n",
    "\n",
    "\n",
    "\n",
    "\n",
    "\n",
    "\n",
    "\n"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n",
     "is_executing": false
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "outputs": [],
   "source": [
    "class Buffer():\n",
    "    def __init__(self,buffer_capacity=100000,batch_size=64):\n",
    "        self.buffer_capacity = buffer_capacity\n",
    "        self.batch_size = batch_size\n",
    "        self.buffer_counter = 0\n",
    "        \n",
    "        self.state_buffer = np.zeros((self.buffer_capacity,num_states))\n",
    "        self.action_buffer = np.zeros((self.buffer_capacity,num_actions))\n",
    "        self.reward_buffer = np.zeros((self.buffer_capacity,1))\n",
    "        self.next_state_buffer = np.zeros((self.buffer_capacity,num_states))\n",
    "    \n",
    "    def record(self,obs_tuple):\n",
    "        index = self.buffer_counter % self.buffer_capacity\n",
    "        self.state_buffer[index] = obs_tuple[0]\n",
    "        self.action_buffer[index] = obs_tuple[1]\n",
    "        self.reward_buffer[index] = obs_tuple[2]\n",
    "        self.next_state_buffer[index] = obs_tuple[3]\n",
    "        \n",
    "        self.buffer_counter += 1\n",
    "    \n",
    "    def learn(self):\n",
    "        record_range = min(self.buffer_counter,self.buffer_capacity)\n",
    "        batch_indices = np.random.choice(record_range,self.batch_size)\n",
    "        \n",
    "        state_batch = tf.convert_to_tensor(self.state_buffer[batch_indices])\n",
    "        action_batch = tf.convert_to_tensor(self.action_buffer[batch_indices])\n",
    "        reward_batch = tf.convert_to_tensor(self.reward_buffer[batch_indices])\n",
    "        reward_batch = tf.cast(reward_batch,dtype=tf.float32)\n",
    "        next_state_batch = tf.convert_to_tensor(self.next_state_buffer[batch_indices])\n",
    "        \n",
    "        with tf.GradientTape() as tape:\n",
    "            target_actions = target_actor(next_state_batch)\n",
    "            y = reward_batch + gamma * target_critic([next_state_batch,target_actions])\n",
    "            critic_value = critic_model([state_batch,action_batch])\n",
    "            critic_loss = tf.math.reduce_mean(tf.math.square(y - critic_value))\n",
    "        \n",
    "        critic_grad = tape.gradient(critic_loss,critic_model.trainable_variables)\n",
    "        critic_optimizer.apply_gradients(zip(critic_grad,critic_model.trainable_variables))\n",
    "        \n",
    "        with tf.GradientTape() as tape:\n",
    "            actions = actor_model(state_batch)\n",
    "            critic_value = critic_model([state_batch,actions])\n",
    "            actor_loss = -tf.math.reduce_mean(critic_value)\n",
    "        \n",
    "        actor_grad = tape.gradient(actor_loss,actor_model.trainable_variables)\n",
    "        actor_optimizer.apply_gradients(\n",
    "            zip(actor_grad,actor_model.trainable_variables)\n",
    "        )\n",
    "    \n",
    "def update_target(tau):\n",
    "    new_weights = []\n",
    "    target_variables = target_critic.weights\n",
    "    for i,variable in enumerate(critic_model.weights):\n",
    "        new_weights.append(variable * tau + target_variables[i] * ( 1 - tau))\n",
    "    \n",
    "    target_critic.set_weights(new_weights)\n",
    "    \n",
    "    new_weights = []\n",
    "    target_variables = target_actor.weights\n",
    "    for i,variable in enumerate(actor_model.weights):\n",
    "        new_weights.append(variable * tau + target_variables[i] * ( 1 - tau))\n",
    "    target_actor.set_weights(new_weights)\n",
    "        \n",
    "            \n",
    "            \n",
    "        \n",
    "        \n",
    "        \n",
    "\n"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n",
     "is_executing": false
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "outputs": [],
   "source": [
    "def policy(state,noise_object):\n",
    "    sampled_actions = tf.squeeze(actor_model(state))\n",
    "    noise = noise_object()\n",
    "    sampled_actions = sampled_actions.numpy() + noise\n",
    "    \n",
    "    # We make sure action is within bounds\n",
    "    legal_action = np.clip(sampled_actions, lower_bound, upper_bound)\n",
    "\n",
    "    return [np.squeeze(legal_action)]\n"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n",
     "is_executing": false
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "outputs": [],
   "source": [
    "buffer = Buffer(50000,64)\n"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n",
     "is_executing": false
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "outputs": [
    {
     "name": "stdout",
     "text": [
      "Episode * 0 * Avg Reward is ==> -1460.9450147277987\n",
      "Episode * 1 * Avg Reward is ==> -1459.0246206902827\n",
      "Episode * 2 * Avg Reward is ==> -1436.7579114526998\n",
      "Episode * 3 * Avg Reward is ==> -1528.7494174392573\n",
      "Episode * 4 * Avg Reward is ==> -1546.1245101225663\n",
      "Episode * 5 * Avg Reward is ==> -1546.214054367147\n",
      "Episode * 6 * Avg Reward is ==> -1539.7315217393352\n",
      "Episode * 7 * Avg Reward is ==> -1515.0318517242454\n",
      "Episode * 8 * Avg Reward is ==> -1459.7796967829947\n",
      "Episode * 9 * Avg Reward is ==> -1423.7573716429104\n",
      "Episode * 10 * Avg Reward is ==> -1390.3980680118498\n",
      "Episode * 11 * Avg Reward is ==> -1348.6619521372104\n",
      "Episode * 12 * Avg Reward is ==> -1360.9866673610293\n",
      "Episode * 13 * Avg Reward is ==> -1310.0362963190375\n",
      "Episode * 14 * Avg Reward is ==> -1266.0245094794814\n",
      "Episode * 15 * Avg Reward is ==> -1226.397083910079\n",
      "Episode * 16 * Avg Reward is ==> -1161.9215427526258\n",
      "Episode * 17 * Avg Reward is ==> -1112.0353130366636\n",
      "Episode * 18 * Avg Reward is ==> -1060.0707877059854\n",
      "Episode * 19 * Avg Reward is ==> -1013.190495388589\n",
      "Episode * 20 * Avg Reward is ==> -976.2277138413762\n",
      "Episode * 21 * Avg Reward is ==> -931.886825214833\n",
      "Episode * 22 * Avg Reward is ==> -891.3885066186\n",
      "Episode * 23 * Avg Reward is ==> -863.906961514463\n",
      "Episode * 24 * Avg Reward is ==> -834.3171140895925\n",
      "Episode * 25 * Avg Reward is ==> -802.2952952880212\n",
      "Episode * 26 * Avg Reward is ==> -781.4068684523938\n",
      "Episode * 27 * Avg Reward is ==> -758.0060336580757\n",
      "Episode * 28 * Avg Reward is ==> -735.9964033368533\n",
      "Episode * 29 * Avg Reward is ==> -715.3709069642405\n",
      "Episode * 30 * Avg Reward is ==> -696.0723214429232\n",
      "Episode * 31 * Avg Reward is ==> -678.0611013747118\n",
      "Episode * 32 * Avg Reward is ==> -661.0925148353222\n",
      "Episode * 33 * Avg Reward is ==> -645.3790578649991\n",
      "Episode * 34 * Avg Reward is ==> -630.7328510222131\n",
      "Episode * 35 * Avg Reward is ==> -619.6282062166904\n",
      "Episode * 36 * Avg Reward is ==> -615.3092981853595\n",
      "Episode * 37 * Avg Reward is ==> -612.5593977156843\n",
      "Episode * 38 * Avg Reward is ==> -609.5560290360203\n",
      "Episode * 39 * Avg Reward is ==> -594.5117327455151\n",
      "Episode * 40 * Avg Reward is ==> -566.2988691113314\n",
      "Episode * 41 * Avg Reward is ==> -533.0220601424875\n",
      "Episode * 42 * Avg Reward is ==> -501.3639439317734\n",
      "Episode * 43 * Avg Reward is ==> -456.3700379006876\n",
      "Episode * 44 * Avg Reward is ==> -419.0379870367785\n",
      "Episode * 45 * Avg Reward is ==> -383.4471071910502\n",
      "Episode * 46 * Avg Reward is ==> -352.2516397549027\n",
      "Episode * 47 * Avg Reward is ==> -324.83231303786613\n",
      "Episode * 48 * Avg Reward is ==> -302.496544080967\n",
      "Episode * 49 * Avg Reward is ==> -278.243663972985\n",
      "Episode * 50 * Avg Reward is ==> -251.89653772767716\n",
      "Episode * 51 * Avg Reward is ==> -229.80250334563533\n",
      "Episode * 52 * Avg Reward is ==> -198.4388412344055\n",
      "Episode * 53 * Avg Reward is ==> -185.38312166288065\n",
      "Episode * 54 * Avg Reward is ==> -172.24937429532218\n",
      "Episode * 55 * Avg Reward is ==> -163.53673905648617\n",
      "Episode * 56 * Avg Reward is ==> -166.40894760094852\n",
      "Episode * 57 * Avg Reward is ==> -159.87039607500293\n",
      "Episode * 58 * Avg Reward is ==> -162.94412696641643\n",
      "Episode * 59 * Avg Reward is ==> -163.13763648795748\n",
      "Episode * 60 * Avg Reward is ==> -163.09972573285933\n",
      "Episode * 61 * Avg Reward is ==> -169.12861921221537\n",
      "Episode * 62 * Avg Reward is ==> -177.51968756493406\n",
      "Episode * 63 * Avg Reward is ==> -177.4139900229624\n",
      "Episode * 64 * Avg Reward is ==> -183.48685399035062\n",
      "Episode * 65 * Avg Reward is ==> -186.4426569959535\n",
      "Episode * 66 * Avg Reward is ==> -183.7413558544236\n",
      "Episode * 67 * Avg Reward is ==> -186.5295271217137\n",
      "Episode * 68 * Avg Reward is ==> -183.63138839829833\n",
      "Episode * 69 * Avg Reward is ==> -187.0414171128224\n",
      "Episode * 70 * Avg Reward is ==> -187.01962758939243\n",
      "Episode * 71 * Avg Reward is ==> -187.17894697724594\n",
      "Episode * 72 * Avg Reward is ==> -187.41204813722848\n",
      "Episode * 73 * Avg Reward is ==> -192.47107223958545\n",
      "Episode * 74 * Avg Reward is ==> -195.34278784922827\n",
      "Episode * 75 * Avg Reward is ==> -197.49543135079958\n",
      "Episode * 76 * Avg Reward is ==> -189.11551651107044\n",
      "Episode * 77 * Avg Reward is ==> -179.4951239796313\n",
      "Episode * 78 * Avg Reward is ==> -170.2390332683639\n",
      "Episode * 79 * Avg Reward is ==> -170.08683881576852\n",
      "Episode * 80 * Avg Reward is ==> -164.66169366588807\n",
      "Episode * 81 * Avg Reward is ==> -164.51031882616752\n",
      "Episode * 82 * Avg Reward is ==> -164.32315689936092\n",
      "Episode * 83 * Avg Reward is ==> -167.44772027827796\n",
      "Episode * 84 * Avg Reward is ==> -173.99386780666245\n",
      "Episode * 85 * Avg Reward is ==> -177.07379106403172\n",
      "Episode * 86 * Avg Reward is ==> -174.01520798507897\n",
      "Episode * 87 * Avg Reward is ==> -174.2125711225828\n",
      "Episode * 88 * Avg Reward is ==> -174.2901281950169\n",
      "Episode * 89 * Avg Reward is ==> -174.2291856365319\n",
      "Episode * 90 * Avg Reward is ==> -180.22430730860606\n",
      "Episode * 91 * Avg Reward is ==> -183.02172049012674\n",
      "Episode * 92 * Avg Reward is ==> -179.74796752728062\n",
      "Episode * 93 * Avg Reward is ==> -176.74064905462484\n",
      "Episode * 94 * Avg Reward is ==> -179.4044892904645\n",
      "Episode * 95 * Avg Reward is ==> -175.371396980093\n",
      "Episode * 96 * Avg Reward is ==> -177.62459731250962\n",
      "Episode * 97 * Avg Reward is ==> -180.62610361037252\n",
      "Episode * 98 * Avg Reward is ==> -177.4831579238239\n",
      "Episode * 99 * Avg Reward is ==> -174.28919677636395\n"
     ],
     "output_type": "stream"
    },
    {
     "data": {
      "text/plain": "<Figure size 432x288 with 1 Axes>",
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZQAAAEGCAYAAABCa2PoAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0\ndHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3deXyU1dn/8c+VBbIRtoAsIYRVRVSU\noNi6r/jUuhXUWqtPq1J9tHa3Wp+22l9tq621Uq2PVKui1qXWBauodam4IBAWBUQgQICwQwIEyJ7r\n98fc0QhJGMJMZjL5vl+veWXmnLlnrhuSueYs9znm7oiIiByopFgHICIiiUEJRUREIkIJRUREIkIJ\nRUREIkIJRUREIiIl1gHESk5Ojufn58c6DBGRdmXOnDlb3L1XU3UdNqHk5+dTWFgY6zBERNoVM1vV\nXJ26vEREJCKUUEREJCKUUEREJCKUUEREJCKUUEREJCKUUEREJCKUUEREJCI67HUoIh1JXb0zb3UZ\ns4vL6JnVifyemQzsmUFaajLJSUaShZ7nDmaQnpqMmcU2aGl3lFBEEkRdvTN92Wae+HA1H67YSu/s\nzuT1yCCrcwofLN9K6a7qsF8rt3s6Jw3vxYnDe3HCsBwyOiXGR4W7U1VbjzvUu7N+ewUzVpTy4Yqt\nFG3cSZ077v5ZfZ07KUlJ5HZPZ1BOJkN6ZXHCsBwG98qK9anEpcT4LRHpwOrrnWfnljDpzWWUlFWQ\nk9WJrx7Zl+0VNawu3c3SDeWcOCyH00ccxJeH5LCjsoaVW3axpnT3Zx+ude40tEdq6535a7bxwry1\nPDFzNVmdUzh3VD8uGTOAw/t3/cJ7t5dWzK6qWp6bW8IjHxSzfPOuver7dk1jZP+upCYbZoZB0HIz\nquvqWb11N8/PXUt5VS0Ag3tlctohvTmkTzb5OZnkdk9ny84qVm3dzerS3dTU1pMUHG8GRqjl5w4N\nWxoO7JHBl4bm0DU9dZ/xV9fWU1FdR3Z6ymf/5nX1TvHWXawtqyAnqzN9uqbRPSO1yf+T2rp6Ssoq\nWLllFyu27OLLQ3tySJ/s1v5zNksJRaQdW7x+Bz9/YSGFq8oYNaAbN599KGeMOIhOKc0Pj3bP7MTA\nnpn7fO3q2npmF5fyz7klPDe3hL/PXP2F+r5d0zhxWC9OOrgXY/J7kJPV6QsfZpU1dVTV1H/hQxCg\nqraO8spaemZ2inpCKinbzSPvF/N04RrKK2s5vH9XfnTGcFJTkkgy6JbRiWMH9SCvR8Y+Y3F3Ssoq\neOvTTbyxeCOPfFBMTd2B7XibnGSMGtCNo/O6MaBHBgN6ZGDAis27WLFlJyu37GLV1t2s21ZBvUNm\np2T6d08nPTWZpRt3UlFT94XX65SSRF6PDPJ7ZtA7O40N2ysp3rKL1aW7qa3/PNZbvzoiKgnF4m0L\nYDP7PfBVoBpYDnzL3bcFdTcDVwJ1wA3u/lpQPhp4BEgHXgG+5/s4sYKCAtdaXtJe7ayq5U//XsrD\nHxTTNT2Vm88+hK8dnUtSUnQ+oHdU1vDyx+tZv60Cgm/wyzaV8+6yLZRXhr61Z3ZKJq9nJumpSZSU\nVbCpvAqArM4p5HZPJzs9lbVlFazbXoEHH45DemeR1yOD1OTPE2Do23xoXCc5yUhKMlKTjOF9ujAm\nvwdDe2W1eJ7uztzV23j4/ZVMW7gBgLNH9uFbXx7E0XndIpbEqmvrWVO2m1WNWgkDe2aS1zODtJQk\n6typrwfHP2uZNLyzE/oyMH3pZqYv3cynG8qpqq3/wutnp6UwqFcW+T0zGNgzky6dU1i3vYKSsgp2\nV9cy/KAujOibzYAeGZTuqmbjjkrWb69k1dZQEtqwo5I+2WkM7pVJfs9MBuVkMrhXJoNyspptyYTD\nzOa4e0GTdXGYUM4E3nL3WjO7A8Ddf2pmI4AngWOAfsAbwHB3rzOzWcD3gA8JJZRJ7j6tpfdRQpH2\nyN156eP13P7yJ2wqr+KSMXnceNbBdM/sFJN4auvqmbdmGwvXbv+su6eypo7c7unkds8gPTWZtdsq\nWFO6mx2VNeR2D30L75aeyqqtu1i+eRclZbtp9OUZJ/RBXO8eGseoh6qaus+6m7qmpzIsSEQDemTQ\nr1savbPTyMnszMyVW3l69hqWbdpJl7QULj0mjyu+lE+/bukx+fcJV329s2VnFatLd+PAoJzMNmnB\ntUZLCSXuurzc/fVGDz8Exgf3zwOecvcqYKWZFQHHmFkxkO3uMwDMbApwPtBiQhFpj371r094+P1i\nRvbP5oFvFjBqQLeYxpOSnMSY/B6Mye8R1fdxd1aX7mZ2cRlzVpWyYvMuPlyxlefnr2XP78RH5XXj\ndxcezjlH9iOrc9x9xDUpKcnonR1KjO1ZvP9rfxt4Orjfn1CCaVASlNUE9/cs34uZTQQmAuTl5UU6\nVpGoemfpZh5+v5jLxuZx27kjSY5S91Y8MjMG9sxkYM9Mxo/O/ay8qraOTTuq2FReyebyKgb3ymL4\nQV1iGGnHFpOEYmZvAH2aqLrF3V8MnnMLUAs80XBYE8/3Fsr3LnSfDEyGUJfXfoYtEjPbdldz47Mf\nMax3Fv/7lREdKpm0pHNK8meD2RJ7MUko7n56S/VmdgVwDnBao8H1EmBAo6flAuuC8twmykUSxi9e\nXMTWndU8dMUY0lKTYx2OSJPibukVMxsH/BQ41913N6qaClxiZp3NbBAwDJjl7uuBcjMba6ERrMuB\nF9s8cJEoeXH+WqZ+tI7vnTaMkXtcByIST+JxDOVeoDPw72CGw4fufo27LzKzZ4BPCHWFXefuDZOw\nr+XzacPT0IC8JIh3lm7mJ//4mKPzunHtyUNiHY5Ii+Iuobj70Bbqbgdub6K8EBgZzbhE2toHRVuY\nOKWQIb2z+Nt/jyElOe46FES+QL+hInFo1spSrny0kPyemTxx1bF0y4jNdSYi+0MJRSTOFG/ZxdVT\nCunbLY3HrzqWHjG6aFFkfymhiMSR8soarppSSJLBo986hl5dOsc6JJGwxd0YikhHVVfvfP+p+azc\nsovHrjxG11ZIu6MWikic+MPrS3jz003c+tURfGlITqzDEdlvSigiceDxD1dx/3+Wc+mxeVw2dmCs\nwxFpFSUUkRh7fdEGfvHiQk49pDe/OvewuFxhViQcSigiMTRnVRnffXIeh+d2495Lj9K1JtKu6bdX\nJEYWr9/BlY/Opm/XNB66oiBh9m2XjksJRSQGijaVc9mDM0lLSeaxK48lJ0vTg6X9U0IRaWPFW3Zx\n6V9nYmb8/epjNT1YEoYSikgb2rqzim88OJPaeufvVx/L4F5ZsQ5JJGLUaSvSRmrr6rnhqXls3lnF\nP75znHYWlISjFopIG/n960t4v2grvz5/JEfGeC94kWhQQhFpA9MWrOeBd1Zw6bF5XFQwYN8HiLRD\nSigiUbZ6625+/I+PGDWgG7/86ohYhyMSNUooIlFUV+/86B/zSTLj3kuPonOK9oOXxKVBeZEo+uu7\nK5hdXMZdE44kt7umB0tiUwtFJEoWr9/BH19fyrjD+nDh0f1jHY5I1MVtQjGzH5uZm1lOo7KbzazI\nzJaY2VmNykeb2YKgbpJpdT2Jseraen7w9Hyy01P5zYWHa8FH6RDiMqGY2QDgDGB1o7IRwCXAYcA4\n4C9m1tAhfT8wERgW3Ma1acAie3jwvRV8uqGc3114uLbwlQ4jLhMKcDdwI+CNys4DnnL3KndfCRQB\nx5hZXyDb3We4uwNTgPPbPGKRwNptFfz5zSLOHHEQp484KNbhiLSZuEsoZnYusNbdP9qjqj+wptHj\nkqCsf3B/z/KmXnuimRWaWeHmzZsjGLXI53710iIAfqEpwtLBxGSWl5m9AfRpouoW4GfAmU0d1kSZ\nt1C+d6H7ZGAyQEFBQZPPETkQby/ZxGuLNnLjuIM1q0s6nJgkFHc/valyMzscGAR8FAxi5gJzzewY\nQi2PxpcY5wLrgvLcJspF2lRlTR23Tl3EkF6ZXHX84FiHI9Lm4qrLy90XuHtvd89393xCyeJod98A\nTAUuMbPOZjaI0OD7LHdfD5Sb2dhgdtflwIuxOgfpuB56byWrtu7mV+eNpFNKXP1pibSJdnNho7sv\nMrNngE+AWuA6d68Lqq8FHgHSgWnBTaTNbNxRyX1vFzHusD58eWjOvg8QSUBxnVCCVkrjx7cDtzfx\nvEJgZBuFJbKXO179lNo652f/dWisQxGJGbXLRQ7QvNVlPDd3LVedMIi8nhqIl45LCUXkANTXO7e9\n9Am9u3Tmf04ZGutwRGJKCUXkAExbuIH5a7Zx47hDyOoc1z3IIlGnhCLSSu7OvW8XMbhXJhccpcUf\nRZRQRFrp7SWbWLx+B9eeNITkJC3+KKKEItIK7s69bxXRv1s656t1IgIooYi0yocrSpm7ehvfOWkw\nqcn6MxIBJRSRVrnv7SJysjpzUcGAfT9ZpINodlqKmf2ZZhZZBHD3G6ISkUicKywu5b2iLdx89iGk\npWqPeJEGLbVQCoE5QBpwNLAsuI0C6lo4TiRhVdbUceM/P6Zf1zS+MXZgrMMRiSvNtlDc/VEAM/tv\n4BR3rwke/x/weptEJxJn7nx1CSs27+LxK4/VdSciewhnDKUf0KXR46ygTKRDmbF8K397fyWXHzeQ\n44dpAUiRPYXzFet3wDwzezt4fBJwa9QiEolDO6tq+cmzH5HfM4Obzj4k1uGIxKUWE4qZJQFLgGOD\nG8BNwf4kIh3G76YtZu22Cp695jgyOqmrS6QpLf5luHu9md3l7sehTaukg5q5YiuPf7iaK48fxOiB\nPWIdjkjcCmcM5XUz+1qwG6JIh1JZU8dNzy1gQI90fnTm8FiHIxLXwmm7/xDIBGrNrBIwwN09O6qR\nicSBe95cxsotu3jiqmPV1SWyD/v8C3H3Lvt6jkgiWlCyncnTV3BxwQBt6ysShrC+cplZd2AYoYsc\nAXD36dEKSiTWdlTW8N0n55KT1Unb+oqEaZ8JxcyuAr4H5ALzgbHADODU6IYmEhvuzo3/+Jg1ZRU8\nPXEsXTNSYx2SSLsQzqD894AxwCp3PwU4CtgczaDM7LtmtsTMFpnZnY3KbzazoqDurEblo81sQVA3\nSRMI5EA89N5KXl20gZvGHUJBvmZ1iYQrnC6vSnevNDPMrLO7f2pmB0crIDM7BTgPOMLdq8ysd1A+\nArgEOIzQlfpvmNlwd68D7gcmAh8CrwDjgGnRilES13vLtvC7aZ9y5oiDuOqEQbEOR6RdCSehlJhZ\nN+AF4N9mVgasi2JM1wK/c/cqAHffFJSfBzwVlK80syLgGDMrBrLdfQaAmU0BzkcJRfZDdW0997y5\nlPv/s5z8nEx+P+FI1NAV2T/hzPK6ILh7a7D8Slfg1SjGNBw4wcxuByqBH7v7bKA/oRZIg5KgrCa4\nv2f5XsxsIqGWDHl5eZGPXNqViuo6VpfuZuWWndz7dhEL1+7gooJcfn7OCLqkadxEZH+FMyj/K+Bd\n4AN3fycSb2pmbwB9mqi6JYipO6HB/zHAM2Y2mND1L3vyFsr3LnSfDEwGKCgoaHavF0ksm8orWbh2\nOwtKdrBiy07WlO6mpKyCTeVVnz2nR2YnHvjmaM46rKlfSxEJRzhdXsXA14FJZlZOKLlMd/dWL8Xi\n7qc3V2dm1wLPubsDs8ysHsgh1PJovD1eLqGut5Lg/p7l0oHV1zuvLtrApDeX8emGcgDMoH+3dAZ0\nz+Ck4b0Y0COD/JxM8ntmMLR3li5cFDlA4XR5/Q34m5n1AS4Cfkyo2yhaFzy+QGhK8n/MbDjQCdgC\nTAX+bmZ/JDQoPwyY5e51ZlZuZmOBmcDlwJ+jFJu0A/9Zsok7X13CJ+t3MLR3Fv/7lUM5vH9XDuvf\nVXuYiERROF1eDwIjgI2EWifjgblRjKkhgS0EqoErgtbKIjN7BvgEqAWuC2Z4QWgg/xEgndBgvAbk\nO6DtFTXc9tIinpu7loE9M7j74iM598j+JCdpcF2kLYTzda0nkAxsA0qBLe5eG62A3L0auKyZutuB\n25soLwRGRismiX/Tl27mxmc/ZvPOKm44bRjXnzKUTinhXGYlIpES9iwvMzsUOAt428yS3T235SNF\noq+qto47X13CQ++tZGjvLCZfPpojcrvFOiyRDimcLq9zgBOAEwnNvnqLUNeXSEyt2LyTG56ax8K1\nO7jiuIHc/F+HkpaaHOuwRDqscLq8zgamA/e4u2ZPSVx4ZcF6fvKPj0hNSWLyN0dzpqb7isRcOF1e\n15nZQEID8+vMLB1IcffyqEcnsofaunp+/9oSHpi+gqPyunHfpUfTr1t6rMMSEcLr8rqa0DThHsAQ\nQtd5/B9wWnRDE/misl3VXPf3uXywfCvfHDuQn58zQgPvInEknC6v64BjCF3jgbsva1iwUaStFG0q\n58pHC1m/vZLfjz+CCQUD9n2QiLSpcBJKlbtXNyyUZ2YpNLO0iUg0vLN0M9c/MZfOqck8NXEsR+d1\nj3VIItKEcBLKO2b2MyDdzM4A/gd4KbphiYQ8OWs1tzy/gIP7ZPPgFQX013iJSNwKJ6HcBFwJLAC+\nA7zi7n+NalTS4bk7f3pjGfe8uYyTD+7FfZceTaaWTRGJa+HM8qoH/hrcMLMzzezf7n5GtIOTjqm2\nrp6fv7iQJ2etYcLoXH5z4eGkJmvwXSTeNftXamanmtlSM9tpZo+b2QgzKwR+S2iHRJGIq66t57tP\nzuPJWWu4/pSh3Dn+CCUTkXaipRbKXYSmC88gdHHjh8DP3f2etghMOp7KmjqufXwOby/ZzP9+5VCu\nOmFwrEMSkf3QUkJxd/9PcP8FM9usZCLRsquqlqunFDJjxVZ+c8HhXHqsdtQUaW9aSijdzOzCRo+t\n8WN3fy56YUlHUlFdx7cfmc3s4lLumnAkFx6tdUdF2qOWEso7wFebeeyAEoocsMqaOq6aEkomd188\nivNG9Y91SCLSSs0mFHf/VlsGIh1PZU0dEx+bwwfLt/KH8UcqmYi0c5o+IzFRW1fPDU/OY/rSzdxx\n4RF8bbS6uUTaOyUUaXPuzs+eX8Drn2zkl18dwUVjtC6XSCJQQpE297tXP+WZwhJuOG0Y3/ryoFiH\nIyIRss+EYmbXmVm3Ro+7m9n/RCsgMxtlZh+a2XwzKzSzYxrV3WxmRWa2xMzOalQ+2swWBHWTrGEl\nS4k79761jAfeWcE3xw7kB6cPi3U4IhJB4bRQrnb3bQ0P3L0MuDp6IXEncJu7jwJ+ETzGzEYAlwCH\nAeOAv5hZw36v9xO6CHNYcBsXxfikFdydP76+hD+8vpQLjurPrecehvK+SGIJJ6EkNf7GH3yId4pe\nSDiQHdzvCjRsO3we8JS7V7n7SqAIOMbM+gLZ7j7D3R2YApwfxfhkP7k7v532KZPeKuLiggH8YcKR\nJCcpmYgkmnCWb30NeMbM/o/Qh/01wKtRjOn7wGtm9gdCCe9LQXl/Qsu/NCgJymqC+3uWS5z40xvL\nmDw91M1127mHkaRkIpKQwkkoPyW0bP21gAGvAw8eyJua2RtAnyaqbiG0tfAP3P2fZnYR8BBwevDe\ne/IWypt634mEusbIy9PSHm1h/ppt/PmtZVx4VH9+dZ66uUQSWbjL199PBFcYdvfTm6szsynA94KH\n/+Dz5FUCNJ5fmkuoO6wkuL9neVPvOxmYDFBQUKBdJ6OssqaOHz0znz7ZadyqZCKS8Fpavv6Z4OcC\nM/t4z1sUY1oHnBTcPxVYFtyfClxiZp3NbBChwfdZ7r4eKDezscFYz+XAi1GMT8L0x38vZfnmXdwx\n/giy01JjHY6IRFlLLZSGVsI5bRFII1cD9wR711cSdFG5+6IgyX0C1ALXuXtdcMy1wCNAOjAtuEkM\nzVlVyl/fXcGlx+ZxwrBesQ5HRNqAhSZGdTwFBQVeWFgY6zASUnllDV+Z9B717rz6/RPJ0ta9IgnD\nzOa4e0FTdc3+pZtZOc0MbgO4e3ZzddJxuTu3PL+QtdsqeOY7Y5VMRDqQllYb7gJgZr8CNgCPEZpR\n9Q2gS5tEJ+3Os3NKmPrROn585nBGD+wR63BEpA2Fc2HjWe7+F3cvd/cd7n4/8LVoBybtz/LNO/nF\ni4sYO7gH1548NNbhiEgbCyeh1JnZN8ws2cySzOwbQN0+j5IOpaK6juuemEtaahJ/uvgoXQkv0gGF\nk1AuBS4CNgKbgAlBmQjw+XL0SzaWc/fFo+jTNS3WIYlIDIRzYWMxoXW0RJr02IereH7eWn54xnBO\nPrh3rMMRkRgJZ/n6XDN73sw2mdlGM/unmWl7PQFC15v86qVPOP3Q3lx/isZNRDqycLq8HiZ0lXo/\nQosuvhSUSQdXVVvH956aT//u6dx10Sgt+ijSwYWTUHq5+8PuXhvcHgF06bPw95mrKSmr4Nfnj6Rr\nupZWEenowkkoW8zssmCWV7KZXQZsjXZgEt92VtVy71tFfGlIT44fmhPrcEQkDoSTUL5NaJbXBmA9\nMD4okw7swXdXsHVXNT8dd4hWERYRILxZXquBc9sgFmkntu6s4q/TV3D2yD4cOaBbrMMRkTjR0lpe\nN7r7nWb2Z5pY08vdb4hqZBK37nt7ORU1dfzozINjHYqIxJGWWiiLg59aklc+s3FHJY/PXMX40bkM\n7Z0V63BEJI60tDjkS8HPRxvKzCwJyHL3HW0Qm8ShydNXUFfvXH/KsFiHIiJxJpwLG/9uZtlmlklo\nc6slZvaT6Icm8WbrziqemLmK80b1I69nRqzDEZE4E84srxFBi+R84BUgD/hmVKOSuPTgeyupqq3n\nOl0RLyJNCCehpJpZKqGE8qK719DCxluSmLbtrmbKB8V85fC+DOmlsRMR2Vs4CeUBoBjIBKab2UBA\nYygdzMPvF7Oruo7rT1XrRESaFs51KJOASY2KVpnZKdELSeJN6a5qHn5/JWeOOIhD+mjnZxFpWjiD\n8j3NbJKZzTWzOWZ2D9D1QN7UzCaY2SIzqzezgj3qbjazIjNbYmZnNSofbWYLgrpJFlyebWadzezp\noHymmeUfSGyyt7teX8Ku6jp+cpauOxGR5oXT5fUUsJnQtr/jg/tPH+D7LgQuBKY3LjSzEcAlwGHA\nOOAvZpYcVN8PTASGBbdxQfmVQJm7DwXuBu44wNikkUXrtvPkrNVcftxAhh3UJdbhiEgcCyeh9HD3\n/+fuK4Pbr4EDWm/D3Re7+5Imqs4DnnL3KndfCRQBx5hZXyDb3We4uwNTCE0SaDim4VqZZ4HTGlov\ncmDcndumfkK3jE58//ThsQ5HROJcOAnlbTO7JNhPPsnMLgJejlI8/YE1jR6XBGX9g/t7ln/hGHev\nBbYDPZt6cTObaGaFZla4efPmCIeeeP718XpmFZfy4zMP1vL0IrJP4SSU7wB/B6qC21PAD82s3Mya\nne1lZm+Y2cImbi1tJ9xUy8JbKG/pmL0L3Se7e4G7F/TqpS1dWlJZU8dvX1nMYf2yuXjMgFiHIyLt\nQDizvFrVce7up7fisBKg8adXLrAuKM9torzxMSVmlkJowkBpK95bGnlh3lrWba/kzvFHkqydGEUk\nDM22UIKNtBruf3mPuuujFM9U4JJg5tYgQoPvs9x9PVBuZmOD8ZHLgRcbHXNFcH888FYwziKtVF/v\nTJ6+gpH9s/ny0CZ7D0VE9tJSl9cPG93/8x51B7TBlpldYGYlwHHAy2b2GoC7LwKeIbRm2KvAde5e\nFxx2LfAgoYH65cC0oPwhoKeZFQUx33QgsQn8e/FGVmzZxXdOHKLNs0QkbC11eVkz95t6vF/c/Xng\n+Wbqbgdub6K8EBjZRHklMOFA4pEvmjx9Bbnd0zl7ZJ9YhyIi7UhLLRRv5n5TjyVBFBaXMmdVGVef\nMJiU5HDmbIiIhLTUQjnEzD4m1BoZEtwneDw46pFJTPzfOyvonpHKhILcfT9ZRKSRlhLKoW0WhcSF\nFZt38sbijdxw2jAyOu1zAqCIyBe0tGPjqrYMRGLviZmrSUkyLhubF+tQRKQdUie5AFBRXcc/Ctcw\nbmQfendJi3U4ItIOKaEIAFM/WsuOylouPy4/1qGISDulhCK4O1NmrOLgg7owJr97rMMRkXaqVQnF\nzG6NcBwSQ/PXbGPRuh1cdtxAXcgoIq3W2hbKnIhGITH12IxVZHZK5oKj+u/7ySIizWhVQnH3lyId\niMRG6a5q/rVgPRcenUtWZ00VFpHW2+cniJlNaqJ4O1Do7i82USftyJOzVlNdW883jxsY61BEpJ0L\np4WSBowClgW3I4AewJVm9qcoxiZRVlNXz2MzVnH80ByGa3tfETlA4fRxDAVODXZDxMzuB14HzgAW\nRDE2ibJXF25gw45Kbr9grzU3RUT2WzgtlP5AZqPHmUC/YFn5qqhEJW3i4fdXMrBnBqcc3DvWoYhI\nAginhXInMN/M/kNoYcgTgd+YWSbwRhRjkyj6aM025q7exi/OGUGSdmQUkQgIZwvgh8zsFeAYQgnl\nZ+7esP3uT6IZnETPIx8Uk9U5RasKi0jEhDPLayrwJDDV3XdFPySJtk3llfzr43V849iBdElLjXU4\nIpIgwhlDuQs4AfjEzP5hZuPNTKsHtmMvfbSemjrnsrGaKiwikRNOl9c7wDtmlgycClwN/A3IjnJs\nEiWvLFjPoX2zGdo7K9ahiEgCCetKeTNLB74GXAOMAR49kDc1swlmtsjM6s2soFH5GWY2x8wWBD9P\nbVQ3OigvMrNJFiw6ZWadzezpoHymmeUfSGyJbt22CuasKuOcI/rGOhQRSTD7TChm9jSwmFDr5D5g\niLt/9wDfdyFwITB9j/ItwFfd/XDgCuCxRnX3AxOBYcFtXFB+JVDm7kOBu4E7DjC2hPbKgvUA/Nfh\nSigiElnhtFAeJpRErnH3t4DjzOy+A3lTd1/s7kuaKJ/XaAbZIiAtaIH0BbLdfYa7OzAFOD943nl8\n3mJ6FjitofUie3tlwXpG9M1mUE7mvp8sIrIf9plQ3P1V4HAzu8PMioFfA59GOzBCXWzz3L2K0MWV\nJY3qSoIygp9rglhrCa0z1rMN4mt31m2rYO7qbXxF3V0iEgXNDsqb2XDgEuDrwFbgacDc/ZRwXtjM\n3gD6NFF1y74WlTSzwwh1XZ3ZUNTE0zyMuj1fdyKhbjPy8jrevunq7hKRaGppltenwLuExjSKAMzs\nB+G+sLuf3pqAzCwXeB643IX+Dm0AAA9iSURBVN2XB8UlQOMr8HKBdY3qBgAlZpYCdAVKm4lpMjAZ\noKCgoMmkk8heVneXiERRS11eXwM2AG+b2V/N7DSabg1EjJl1A14Gbnb39xvK3X09UG5mY4PxkcuB\nhlbOVEID+ADjgbeCcRZpZO22Cuapu0tEoqjZhOLuz7v7xcAhwH+AHwAHmdn9ZnZmc8eFw8wuMLMS\n4DjgZTN7Lai6ntDqxj83s/nBrWHlwmuBB4EiYDkwLSh/COhpZkXAD4GbDiS2RPXI+ytJMjRdWESi\nxvbny7yZ9QAmABe7+6n7en48Kygo8MLCwliH0SbWlO7mtLve4bxR/fj9hCNjHY6ItGNmNsfdC5qq\n268tgN291N0faO/JpKP547+XYgY/PHN4rEMRkQTWqj3lpf1YuHY7L8xfy7ePH0TfrumxDkdEEpgS\nSoK749VP6ZqeyjUnDYl1KCKS4JRQEth7y7bw7rItfPfUYXRN1zL1IhJdSigJyt350xtL6dc1jcvG\ndryLOEWk7SmhJKgZK7ZSuKqMa08eQueU5FiHIyIdgBJKgpr05jJ6d+nMhIIBsQ5FRDoIJZQENGtl\nKR+uKOWak4aQlqrWiYi0DSWUBPTnt5aRk9WJrx+jsRMRaTtKKAlm3uoy3l22hatPGEx6J7VORKTt\nKKEkmL++u4Ku6alcNnZgrEMRkQ5GCSWBrN9ewWuLNnLJmAFkdm5pZwIRkchTQkkgf5+5mnp3tU5E\nJCaUUBJEVW0dT85azWmHHMSAHhmxDkdEOiAllATxyoL1bNlZzRVfUutERGJDCaWd+vW/PuHUu/7D\ntAXrcXce/WAVg3tlcvzQnFiHJiIdlEZu26GVW3bx8AfFdE5J4ton5nJ4/64sWLud2849jNAOySIi\nbU8tlHbonjeWkppsvPWjk/n1+SNZu62CLmkpXHh0/1iHJiIdmFoo7czSjeW8+NE6vnPiEPp0TeOy\nsQM5b1Q/dlbV0iVNS9SLSOwoobQzd/97KZmdUvjOiYM/K+uSlqpkIiIxF5MuLzObYGaLzKzezPba\n7N7M8sxsp5n9uFHZaDNbYGZFZjbJgsECM+tsZk8H5TPNLL/tzqRtLVy7nWkLN3Dl8YPontkp1uGI\niHxBrMZQFgIXAtObqb8bmLZH2f3ARGBYcBsXlF8JlLn70OC4OyIebRxwd347bTHdMlK58oRBsQ5H\nRGQvMUko7r7Y3Zc0VWdm5wMrgEWNyvoC2e4+w90dmAKcH1SfBzwa3H8WOM0ScKrTs3NKeL9oKz86\nYzjZ6t4SkTgUV7O8zCwT+Clw2x5V/YGSRo9LgrKGujUA7l4LbAd6NvP6E82s0MwKN2/eHMnQo2pT\neSX/71+fcEx+D75xrC5cFJH4FLWEYmZvmNnCJm7ntXDYbcDd7r5zz5dr4rkeRt0XC90nu3uBuxf0\n6tVr3ycRJ3754iIqa+v57dcOJykp4RpfIpIgojbLy91Pb8VhxwLjzexOoBtQb2aVwD+B3EbPywXW\nBfdLgAFAiZmlAF2B0lYHHmdeXbieaQs38JOzDmZIr6xYhyMi0qy4mjbs7ic03DezW4Gd7n5v8Ljc\nzMYCM4HLgT8HT50KXAHMAMYDbwXjLO3ezqpafjl1ESP6ZjOx0TRhEZF4FKtpwxeYWQlwHPCymb0W\nxmHXAg8CRcByPp8F9hDQ08yKgB8CN0Uh5H3aVVXLW59uJJK5bNKby9i4o4pfXzCS1OS4Gu4SEdlL\nTFoo7v488Pw+nnPrHo8LgZFNPK8SmBDJ+PZXRXUd33pkNrNWlvLk1WM5bkiTcwL2y7KN5fztvZVc\nXDCAo/O6RyBKEZHo0tfeA1RZU8fVUwopLC7FDGYXH/jwjbvzy6mLyOiUzI3jDo5AlCIi0aeE0go1\ndfVs2F7JwrXb+Z8n5vJe0RbuHH8kBx/UJSIJ5eUF6/lg+VZ+ctbB9MzqHIGIRUSiL64G5duDB95Z\nzm+nffqFst9ccDjjR+fy0ZptPDe3hNq6elJaOeZRX+/c8eqnHNYvm0t1zYmItCNKKPtp9MDu/OD0\n4eR06UROVmcG52Qy7KAuABTkd+exD1fx6YZyRvbv2qrXf3/5FtaUVnDvpYeQrGtORKQdUULZTwX5\nPSjI79Fk3ZigvLC4tNUJ5anZa+iekcoZIw5qdYwiIrGgMZQI6tctnf7d0pm9qqxVx5fuqub1RRu4\n8OhcOqckRzg6EZHoUkKJsIL87hQWl7bqepTn5pZQU+dcPGZAFCITEYkuJZQIK8jvwcYdVZSUVezX\nce7O07PXcHReN4YHYzIiIu2JEkqEjckPXYS4v9OH567exrJNO7lkTF40whIRiTollAgb3rsLXdJS\nmF28f+MoT89eTWanZL5yRN8oRSYiEl1KKBGWlGQUDAyNo4Rr1spSXpi3jnNH9SOzsybeiUj7pIQS\nBQX5PVi2aSdlu6r3+dyiTTu5ekohuT3S+em4Q9ogOhGR6FBCiYLjh+YAcP2Tc1tMKpvLq/jvh2eR\nmmw8+q1j6JbRqa1CFBGJOCWUKDhyQDfuHH8Es1eWce5977F4/Y4v1JeU7ebRD4q59K8fsnVnNQ9d\nMYYBPTJiFK2ISGSowz5KLioYwLDeWVzz+BzOv+99+ndPB6C6tv6zKcWDczK5/7KjOXJAt1iGKiIS\nEUooUXRUXndeuv547nlzGdsraj7b6P6K4/I57dDeDNaWviKSQJRQoqx3dhq3X3B4rMMQEYk6jaGI\niEhEKKGIiEhExCShmNkEM1tkZvVmVrBH3RFmNiOoX2BmaUH56OBxkZlNMjMLyjub2dNB+Uwzy2/7\nMxIRkVi1UBYCFwLTGxeaWQrwOHCNux8GnAzUBNX3AxOBYcFtXFB+JVDm7kOBu4E7oh28iIjsLSYJ\nxd0Xu/uSJqrOBD5294+C52119zoz6wtku/sMD60LPwU4PzjmPODR4P6zwGkNrRcREWk78TaGMhxw\nM3vNzOaa2Y1BeX+gpNHzSoKyhro1AO5eC2wHerZRvCIiEojatGEzewPo00TVLe7+YgvxHA+MAXYD\nb5rZHGBHE89tuKyjqdZIk7tbmdlEQt1m5OVpmXgRkUiKWkJx99NbcVgJ8I67bwEws1eAowmNq+Q2\nel4usK7RMQOAkmAMpivQ5FK/7j4ZmAxQUFCw/1sqiohIs+LtwsbXgBvNLAOoBk4C7nb39WZWbmZj\ngZnA5cCfg2OmAlcAM4DxwFsexv67c+bM2WJmq1oZZw6wpZXHtmcd8bw74jlDxzzvjnjOsP/nPbC5\nCmvN3ucHyswuIJQQegHbgPnuflZQdxlwM6Fuq1fc/cagvAB4BEgHpgHfdXcPphU/BhxFqGVyibuv\niHL8he5esO9nJpaOeN4d8ZyhY553RzxniOx5x6SF4u7PA883U/c4oS6uPcsLgZFNlFcCEyIdo4iI\n7J94m+UlIiLtlBJK60yOdQAx0hHPuyOeM3TM8+6I5wwRPO+YjKGIiEjiUQtFREQiQglFREQiQgll\nP5nZODNbEqxufFOs44kGMxtgZm+b2eJg1efvBeU9zOzfZrYs+Nk91rFGmpklm9k8M/tX8LgjnHM3\nM3vWzD4N/s+PS/TzNrMfBL/bC83sSTNLS8RzNrO/mdkmM1vYqKzZ8zSzm4PPtiVmdtb+vp8Syn4w\ns2TgPuBsYATwdTMbEduooqIW+JG7HwqMBa4LzvMm4E13Hwa8GTxONN8DFjd63BHO+R7gVXc/BDiS\n0Pkn7HmbWX/gBqDA3UcCycAlJOY5P8LnK7M3aPI8g7/xS4DDgmP+EnzmhU0JZf8cAxS5+wp3rwae\nIrTacUJx9/XuPje4X07oA6Y/X1zZ+VE+X/E5IZhZLvAV4MFGxYl+ztnAicBDAO5e7e7bSPDzJnQN\nXnqwXFMGoaWcEu6c3X06ey9F1dx5ngc85e5V7r4SKCL0mRc2JZT989nKxoHGqx4npGDDsqMILXlz\nkLuvh1DSAXrHLrKo+BNwI1DfqCzRz3kwsBl4OOjqe9DMMkng83b3tcAfgNXAemC7u79OAp/zHpo7\nzwP+fFNC2T9hr2ycCMwsC/gn8H13b2rF54RhZucAm9x9TqxjaWMphBZgvd/djwJ2kRhdPc0KxgzO\nAwYB/YDMYMmnju6AP9+UUPZPw8rGDRqvepxQzCyVUDJ5wt2fC4o3BpudEfzcFKv4ouDLwLlmVkyo\nK/NUM3ucxD5nCP1Ol7j7zODxs4QSTCKf9+nASnff7O41wHPAl0jsc26sufM84M83JZT9MxsYZmaD\nzKwToQGsqTGOKeKCHS8fAha7+x8bVTWs7Ezws7l9bdodd7/Z3XPdPZ/Q/+tb7n4ZCXzOAO6+AVhj\nZgcHRacBn5DY570aGGtmGcHv+mmExgkT+Zwba+48pwKXmFlnMxtEaKv1WfvzwrpSfj+Z2X8R6mtP\nBv7m7rfHOKSIM7PjgXeBBXw+nvAzQuMozwB5hP4oJ7h7k3vPtGdmdjLwY3c/x8x6kuDnbGajCE1E\n6ASsAL5F6Mtmwp63md0GXExoRuM84CogiwQ7ZzN7EjiZ0BL1G4FfAi/QzHma2S3Atwn9u3zf3aft\n1/spoYiISCSoy0tERCJCCUVERCJCCUVERCJCCUVERCJCCUVERCJCCUUkQsyszszmN7q1eMW5mV1j\nZpdH4H2LzSznQF9H5EBp2rBIhJjZTnfPisH7FhNaOXdLW7+3SGNqoYhEWdCCuMPMZgW3oUH5rWb2\n4+D+DWb2iZl9bGZPBWU9zOyFoOxDMzsiKO9pZq8Hizk+QKM1mMzssuA95pvZA/u7/LjIgVBCEYmc\n9D26vC5uVLfD3Y8B7iW00sKebgKOcvcjgGuCstuAeUHZz4ApQfkvgfeCxRynErriGTM7lNDV3192\n91FAHfCNyJ6iSPNSYh2ASAKpCD7Im/Jko593N1H/MfCEmb1AaGkMgOOBrwG4+1tBy6Qrof1LLgzK\nXzazsuD5pwGjgdmhJapIJ3EXOJQ4pIQi0ja8mfsNvkIoUZwL/NzMDqPl5cSbeg0DHnX3mw8kUJHW\nUpeXSNu4uNHPGY0rzCwJGODubxPa4KsboYUKpxN0WQULVm4J9qVpXH420LAn+JvAeDPrHdT1MLOB\nUTwnkS9QC0UkctLNbH6jx6+6e8PU4c5mNpPQl7iv73FcMvB40J1lwN3uvs3MbiW0k+LHwG4+X3L8\nNuBJM5sLvENoxVjc/RMz+1/g9SBJ1QDXAasifaIiTdG0YZEo07Re6SjU5SUiIhGhFoqIiESEWigi\nIhIRSigiIhIRSigiIhIRSigiIhIRSigiIhIR/x85w68V3DHZMwAAAABJRU5ErkJggg==\n"
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "ep_reward_list = []\n",
    "avg_reward_list = []\n",
    "\n",
    "for ep in range(total_episodes):\n",
    "    prev_state = env.reset()\n",
    "    episodic_reward = 0\n",
    "    while True :\n",
    "        tf_prev_state = tf.expand_dims(tf.convert_to_tensor(prev_state),0)\n",
    "        action = policy(tf_prev_state,ou_noise)\n",
    "        state,reward,done,info = env.step(action)\n",
    "        buffer.record((prev_state,action,reward,state))\n",
    "        episodic_reward += reward\n",
    "        \n",
    "        buffer.learn()\n",
    "        update_target(tau)\n",
    "        if done:\n",
    "            break\n",
    "\n",
    "        prev_state = state\n",
    "\n",
    "    ep_reward_list.append(episodic_reward)\n",
    "\n",
    "    # Mean of last 40 episodes\n",
    "    avg_reward = np.mean(ep_reward_list[-40:])\n",
    "    print(\"Episode * {} * Avg Reward is ==> {}\".format(ep, avg_reward))\n",
    "    avg_reward_list.append(avg_reward)\n",
    "\n",
    "# Plotting graph\n",
    "# Episodes versus Avg. Rewards\n",
    "plt.plot(avg_reward_list)\n",
    "plt.xlabel(\"Episode\")\n",
    "plt.ylabel(\"Avg. Epsiodic Reward\")\n",
    "plt.show()\n",
    "        \n",
    "        \n"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n",
     "is_executing": false
    }
   }
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.6"
  },
  "pycharm": {
   "stem_cell": {
    "cell_type": "raw",
    "source": [],
    "metadata": {
     "collapsed": false
    }
   }
  }
 },
 "nbformat": 4,
 "nbformat_minor": 0
}