{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 15.3 Deep Q-Learning"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 15.3.1 Lunar Lander v2"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "toc-hr-collapsed": true,
    "toc-nb-collapsed": true
   },
   "source": [
    "#### 解决云端渲染问题\n",
    "\n",
    "通常在本地机器运行 gym 框架能够正常渲染，但是在服务器/云 jupyter 环境运行 gym，很可能会遇到 `NoSuchDisplayException` 错误。这是因为服务器通常不会配显示器，但是 gym 框架渲染需要调用显示器。此时我们可以利用 `pyvirtualdisplay` 框架虚拟一个显示器来运行 gym 框架。\n",
    "\n",
    "**虚拟显示器操作配置只需要在服务器没有显示器情况下配置，本地环境配置将会出错**"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "!apt-get install xvfb\n",
    "!pip install pyvirtualdisplay\n",
    "!pip install Pillow"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "from pyvirtualdisplay import Display\n",
    "display = Display(visible=0, size=(1400, 900))\n",
    "display.start()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 15.3.2 随机动作 Agent"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import gym\n",
    "import time\n",
    "import numpy as np\n",
    "from IPython import display\n",
    "from PIL import Image"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "class RandomAgent:\n",
    "    def __init__(self, env_name: str):\n",
    "        self.env = gym.make(env_name)\n",
    "        self.action_count = self.env.action_space.n\n",
    "\n",
    "    def choose_action(self, state):\n",
    "        return np.random.randint(self.action_count)\n",
    "\n",
    "    def simulate(self, visualize=True):\n",
    "        records = []\n",
    "        state = self.env.reset()\n",
    "        is_done = False\n",
    "        total_score = 0\n",
    "\n",
    "        while not is_done:\n",
    "            action = self.choose_action(state)\n",
    "            # 执行动作\n",
    "            observation, reward, is_done, info = self.env.step(action)\n",
    "            # 记录总分\n",
    "            total_score += reward\n",
    "            \n",
    "            # 渲染画面，得到画面的像素数组\n",
    "            rgb_array = self.env.render(mode='rgb_array')\n",
    "            # 记录中间过程，用于后期可视化\n",
    "            records.append((rgb_array, action, reward, total_score))\n",
    "\n",
    "            if visualize:\n",
    "                # 清除当前 Cell 的输出\n",
    "                display.clear_output(wait=True)\n",
    "                                \n",
    "                # 使用像素数组生成图片\n",
    "                img = Image.fromarray(rgb_array)\n",
    "                # 当前 Cell 中展示图片\n",
    "                display.display(img)\n",
    "                print(f'Action {action} Action reward {reward:.2f} | Total score {total_score:.2f}')\n",
    "                # 防止刷新过快，可以根据实际情况调小\n",
    "                time.sleep(0.01)\n",
    "        self.env.close()\n",
    "        return total_score, records"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "agent = RandomAgent('LunarLander-v2')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "total_score, records = agent.simulate(visualize=True)\n",
    "print(f'Total score {total_score:.2f}')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "record_list = []\n",
    "for i in range(100):\n",
    "    total_score, _ = agent.simulate(visualize=False)\n",
    "    record_list.append(total_score)\n",
    "    \n",
    "print(f'Average score in 100 episode {np.mean(record_list):.2f}')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### 可视化中间过程"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "plt.rcParams['figure.dpi'] = 180\n",
    "\n",
    "def visualize_episode(episode_record):\n",
    "    nrows, ncols = 3, 5\n",
    "    fig, axs = plt.subplots(nrows=nrows, ncols=ncols, figsize=(ncols*5, nrows*4))\n",
    "\n",
    "    for index, ax in enumerate(axs.flatten()):\n",
    "        ax.set_yticklabels([])\n",
    "        ax.set_xticklabels([])\n",
    "        img_index = int(len(episode_record) / (nrows * ncols)) * index\n",
    "        if img_index < len(episode_record):\n",
    "            sample = episode_record[img_index]\n",
    "            ax.imshow(sample[0])\n",
    "\n",
    "            ax.set_title(f'action {sample[1]} reward {sample[2]:.2f} score {sample[3]:.2f}')\n",
    "        \n",
    "    plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "visualize_episode(records)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 15.3.2 DQN Agent"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import random\n",
    "import numpy as np\n",
    "from collections import namedtuple\n",
    "\n",
    "# 代表每一个样本的 namedtuple，方便存储和读取数据\n",
    "Experience = namedtuple('Experience', ('state', 'action', 'reward', 'next_state', 'done'))\n",
    "\n",
    "class ReplayMemory:\n",
    "\n",
    "    def __init__(self, max_size):\n",
    "        self.max_size = max_size\n",
    "        self.memory = []\n",
    "\n",
    "    def append(self, state, action, reward, next_state, done):\n",
    "        \"\"\"记录一个新的样本\"\"\"\n",
    "        sample = Experience(state, action, reward, next_state, done)\n",
    "        self.memory.append(sample)\n",
    "        # 只留下最新记录的 self.max_size 个样本\n",
    "        self.memory = self.memory[-self.max_size:]\n",
    "\n",
    "    def sample(self, batch_size):\n",
    "        \"\"\"按照给定批次大小取样\"\"\"\n",
    "        samples = random.sample(self.memory, batch_size)\n",
    "        batch = Experience(*zip(*samples))\n",
    "\n",
    "        # 转换数据为 numpy 张量返回\n",
    "        states = np.array(batch.state)\n",
    "        actions = np.array(batch.action)\n",
    "        rewards = np.array(batch.reward)\n",
    "        states_next = np.array(batch.next_state)\n",
    "        dones = np.array(batch.done)\n",
    "\n",
    "        return states, actions, rewards, states_next, dones\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.memory)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### 模型初始化"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [],
   "source": [
    "import gym\n",
    "import time\n",
    "import numpy as np\n",
    "from IPython import display\n",
    "from PIL import Image\n",
    "import tensorflow as tf\n",
    "\n",
    "L = tf.keras.layers\n",
    "\n",
    "\n",
    "def create_network_model(input_shape: np.ndarray,\n",
    "                         action_space: np.ndarray,\n",
    "                         learning_rate=0.001) -> tf.keras.Sequential:\n",
    "    model = tf.keras.Sequential([\n",
    "        L.Dense(512, input_shape=input_shape, activation=\"relu\"),\n",
    "        L.Dense(256, input_shape=input_shape, activation=\"relu\"),\n",
    "        L.Dense(action_space)\n",
    "    ])\n",
    "    model.compile(loss=\"mse\", \n",
    "                  optimizer=tf.optimizers.Adam(lr=learning_rate))\n",
    "    return model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "from IPython import display\n",
    "from PIL import Image\n",
    "\n",
    "# 定义超参数\n",
    "LEARNING_RATE = 0.001\n",
    "GAMMA = 0.99\n",
    "EPSILON_DECAY = 0.995\n",
    "EPSILON_MIN = 0.01\n",
    "\n",
    "\n",
    "class DQNAgent:\n",
    "    def __init__(self, env_name):\n",
    "        self.env = gym.make(env_name)\n",
    "        self.observation_shape = self.env.observation_space.shape\n",
    "        self.action_count = self.env.action_space.n\n",
    "        self.model = create_network_model(self.observation_shape, self.action_count)\n",
    "        self.memory = ReplayMemory(500000)\n",
    "        self.epsilon = 1.0\n",
    "        self.batch_size = 64\n",
    "\n",
    "    def choose_action(self, state, epsilon=None):\n",
    "        \"\"\"\n",
    "        根据给定状态选择行为\n",
    "        - epsilon == 0 完全使用模型选择行为\n",
    "        - epsilon == 1 完全随机选择行为\n",
    "        \"\"\"\n",
    "        if epsilon is None:\n",
    "            epsilon = self.epsilon\n",
    "        if np.random.rand() < epsilon:\n",
    "            return np.random.randint(self.action_count)\n",
    "        else:\n",
    "            q_values = self.model.predict(np.expand_dims(state, axis=0))\n",
    "            print(q_values)\n",
    "            return np.argmax(q_values[0])\n",
    "\n",
    "    def replay(self):\n",
    "        \"\"\"进行经验回放学习\"\"\"\n",
    "\n",
    "        # 如果当前经验池经验数量少于批次大小，则跳过\n",
    "        if len(self.memory) < self.batch_size:\n",
    "            return\n",
    "\n",
    "        states, actions, rewards, states_next, dones = self.memory.sample(self.batch_size)\n",
    "        q_pred = self.model.predict(states)\n",
    "\n",
    "        q_next = self.model.predict(states_next).max(axis=1)\n",
    "        q_next = q_next * (1 - dones)\n",
    "        q_update = rewards + GAMMA * q_next\n",
    "\n",
    "        indices = np.arange(self.batch_size)\n",
    "        q_pred[[indices], [actions]] = q_update\n",
    "\n",
    "        self.model.train_on_batch(states, q_pred)\n",
    "\n",
    "    def simulate(self, epsilon=None, visualize=True):\n",
    "        records = []\n",
    "        state = self.env.reset()\n",
    "        is_done = False\n",
    "        total_score = 0\n",
    "\n",
    "        while not is_done:\n",
    "            action = self.choose_action(state, epsilon)\n",
    "            state, reward, is_done, info = self.env.step(action)\n",
    "            total_score += reward\n",
    "            \n",
    "            rgb_array = self.env.render(mode='rgb_array')\n",
    "            records.append((rgb_array, action, reward, total_score))\n",
    "\n",
    "            if visualize:\n",
    "                display.clear_output(wait=True)\n",
    "                img = Image.fromarray(rgb_array)\n",
    "                # 当前 Cell 中展示图片\n",
    "                display.display(img)\n",
    "                print(f'Action {action} Action reward {reward:.2f} | Total score {total_score:.2f}')\n",
    "\n",
    "                time.sleep(0.01)\n",
    "        self.env.close()\n",
    "        return total_score, records\n",
    "\n",
    "    def train(self, episode_count: int, log_dir: str):\n",
    "        \"\"\"\n",
    "        训练方法，按照给定 episode 数量进行训练，并记录训练过程关键参数到 TensorBoard\n",
    "        \"\"\"\n",
    "        # 初始化一个 TensorBoard 记录器\n",
    "        file_writer = tf.summary.create_file_writer(log_dir)\n",
    "        file_writer.set_as_default()\n",
    "\n",
    "        score_list = []\n",
    "        best_avg_score = -np.inf\n",
    "\n",
    "        for episode_index in range(episode_count):\n",
    "            state = self.env.reset()\n",
    "            score, step = 0, 0\n",
    "            is_done = False\n",
    "            while not is_done:\n",
    "                # 根据状态选择一个行为\n",
    "                action = self.choose_action(state)\n",
    "                # 执行行为，记录行为和结果到经验池\n",
    "                state_next, reward, is_done, info = self.env.step(action)\n",
    "                self.memory.append(state, action, reward, state_next, is_done)\n",
    "                score += reward\n",
    "\n",
    "                state = state_next\n",
    "                # 每 6 步进行一次回放训练\n",
    "                # 此处也可以选择每一步回放训练，但会降低训练速度，这个是一个经验技巧\n",
    "                if step % 1 == 0:\n",
    "                    self.replay()\n",
    "                step += 1\n",
    "\n",
    "            # 记录当前 Episode 的得分，计算最后 100 Episode 的平均得分\n",
    "            score_list.append(score)\n",
    "            avg_score = np.mean(score_list[-100:])\n",
    "\n",
    "            # 记录当前 Episode 得分，epsilon 和最后 100 Episode 的平均得分到 TensorBoard\n",
    "            tf.summary.scalar('score', data=score, step=episode_index)\n",
    "            tf.summary.scalar('average score', data=avg_score, step=episode_index)\n",
    "            tf.summary.scalar('epsilon', data=self.epsilon, step=episode_index)\n",
    "\n",
    "            # 终端输出训练进度\n",
    "            print(f'Episode: {episode_index} Reward: {score:03.2f} '\n",
    "                  f'Average Reward: {avg_score:03.2f} Epsilon: {self.epsilon:.3f}')\n",
    "\n",
    "            # 调整 epsilon 值，逐渐减少随机探索比例\n",
    "            if self.epsilon > EPSILON_MIN:\n",
    "                self.epsilon *= EPSILON_DECAY\n",
    "\n",
    "            # 如果当前平均得分比之前有改善，保存模型\n",
    "            # 确保提前创建目录 outputs/chapter_15\n",
    "            if avg_score > best_avg_score:\n",
    "                best_avg_score = avg_score\n",
    "                self.model.save('outputs/chapter_15/dqn_best.h5')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "import glob\n",
    "\n",
    "# 使用 LunarLander 初始化 Agent\n",
    "agent = DQNAgent('LunarLander-v2')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 读取现在已经记录的日志数量，避免日志重复记录\n",
    "tf_log_index = len(glob.glob('tf_dir/lunar_lander/run_*'))\n",
    "log_dir = f'tf_dir/lunar_lander/run_{tf_log_index}'\n",
    "\n",
    "# 训练 700 个 Episode\n",
    "agent.train(700, log_dir)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [],
   "source": [
    "agent.model.load_weights('/Users/brikerman/Desktop/saved_modelstrained_model.h5')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[[93.36385 92.80037 91.98748 93.56141]]\n",
      "[[94.84423  94.50144  93.476364 94.84576 ]]\n",
      "[[96.516075 96.33012  95.14998  96.355606]]\n",
      "[[98.54236 98.32191 97.16588 98.35609]]\n",
      "[[100.53853  100.28109   99.160995 100.31969 ]]\n",
      "[[102.42324 102.12711 101.03785 102.1866 ]]\n",
      "[[104.20788 103.87081 102.82997 103.97037]]\n",
      "[[105.8958   105.49106  104.55191  105.688095]]\n",
      "[[107.53094 107.06009 106.22218 107.35267]]\n",
      "[[109.12069  108.584946 107.846375 108.97334 ]]\n",
      "[[110.64673  110.0622   109.419014 110.54106 ]]\n",
      "[[112.18979 111.57501 111.04082 112.1653 ]]\n",
      "[[113.6573   112.98869  112.57815  113.717186]]\n",
      "[[114.7114  114.26757 113.69389 114.6204 ]]\n",
      "[[116.00607  115.521736 115.0548   115.99176 ]]\n",
      "[[117.25826  116.74409  116.368195 117.31511 ]]\n",
      "[[117.99647  117.78689  117.1428   117.831764]]\n",
      "[[119.053734 118.85091  118.23447  118.921425]]\n",
      "[[120.465065 120.27351  119.73811  120.31785 ]]\n",
      "[[122.03222  121.857635 121.4593   121.82127 ]]\n",
      "[[123.69015  123.52641  123.26406  123.410995]]\n",
      "[[125.51608 125.34781 125.21008 125.16224]]\n",
      "[[127.30688 127.13426 127.13365 126.88428]]\n",
      "[[128.96202 128.79329 128.92427 128.46419]]\n",
      "[[130.59386 130.35823 130.73254 130.08018]]\n",
      "[[128.1437  127.90126 128.27391 127.67382]]\n",
      "[[125.21721  124.99427  125.335365 124.7856  ]]\n",
      "[[123.66121  123.374954 123.7992   123.30458 ]]\n",
      "[[123.643456 123.289406 123.83755  123.32116 ]]\n",
      "[[122.345345 122.03965  122.55745  121.98651 ]]\n",
      "[[121.04802 120.69634 121.26066 120.73409]]\n",
      "[[121.29361  120.873695 121.57675  121.01121 ]]\n",
      "[[119.1199   118.701385 119.34845  118.85083 ]]\n",
      "[[119.02413 118.54304 119.31089 118.79265]]\n",
      "[[117.493706 117.044464 117.762024 117.23986 ]]\n",
      "[[117.04038 116.50003 117.33651 116.86229]]\n",
      "[[116.430176 115.88151  116.750465 116.25471 ]]\n",
      "[[115.506805 114.94002  115.83139  115.34886 ]]\n",
      "[[112.851204 112.33205  112.94452  112.68609 ]]\n",
      "[[109.52011 109.05646 109.41143 109.27813]]\n",
      "[[111.123726 110.63584  111.15153  110.93956 ]]\n",
      "[[109.106575 108.63729  109.051834 108.87802 ]]\n",
      "[[110.61672  110.125496 110.7008   110.449036]]\n",
      "[[109.31699  108.88059  109.362946 109.10548 ]]\n",
      "[[105.93654 105.62378 105.86    105.57728]]\n",
      "[[107.48312  107.14676  107.552414 107.17517 ]]\n",
      "[[106.256065 105.98109  106.30652  105.88508 ]]\n",
      "[[105.12045  104.877556 105.086365 104.733215]]\n",
      "[[106.4124   106.121155 106.59244  106.047485]]\n",
      "[[103.6567  103.45251 103.57948 103.24207]]\n",
      "[[104.94286  104.690094 105.078445 104.551414]]\n",
      "[[104.23083 104.02248 104.42227 103.78227]]\n",
      "[[101.5535  101.42207 101.49358 101.06506]]\n",
      "[[102.86181 102.68662 103.02771 102.3824 ]]\n",
      "[[101.40906  101.233246 101.36347  100.967186]]\n",
      "[[102.76829 102.54978 102.94552 102.33352]]\n",
      "[[98.87421  98.801025 98.793976 98.32108 ]]\n",
      "[[100.21558  100.100685 100.35657   99.660225]]\n",
      "[[100.61327  100.45814  100.77676  100.090256]]\n",
      "[[99.25724 99.16365 99.43265 98.6557 ]]\n",
      "[[96.860985 96.816895 96.790085 96.24603 ]]\n",
      "[[98.30411 98.20727 98.47766 97.68162]]\n",
      "[[97.56544  97.49474  97.808044 96.8831  ]]\n",
      "[[97.29336 97.20709 97.5883  96.60322]]\n",
      "[[96.473465 96.391754 96.76462  95.765656]]\n",
      "[[95.40902  95.341606 95.66632  94.68288 ]]\n",
      "[[94.4051  94.36294 94.67537 93.63497]]\n",
      "[[92.00151  91.915016 91.97721  91.30528 ]]\n",
      "[[93.61492 93.50032 93.88034 92.89332]]\n",
      "[[91.28344 91.14494 91.34072 90.6111 ]]\n",
      "[[89.21479  89.12804  89.20693  88.477554]]\n",
      "[[90.90441  90.7953   91.17576  90.145874]]\n",
      "[[87.77602  87.669365 87.81223  87.025764]]\n",
      "[[84.80255 84.53024 84.81262 84.17113]]\n",
      "[[81.961525 81.499725 81.98157  81.482605]]\n",
      "[[78.12332 77.62995 78.05232 77.59082]]\n",
      "[[80.356514 79.96754  80.37112  79.78685 ]]\n",
      "[[77.275566 76.80176  77.24166  76.713684]]\n",
      "[[79.493996 79.12816  79.54709  78.89455 ]]\n",
      "[[76.431274 75.97714  76.43695  75.838135]]\n",
      "[[71.81784 71.33707 71.79564 71.27563]]\n",
      "[[74.08817 73.64171 74.0947  73.46409]]\n",
      "[[72.40644 72.0151  72.40405 71.73506]]\n",
      "[[74.71746  74.39109  74.77094  73.973816]]\n",
      "[[72.79557 72.39616 72.86153 72.09976]]\n",
      "[[72.13778  71.767265 72.21647  71.406906]]\n",
      "[[70.63891  70.1533   70.79492  70.045944]]\n",
      "[[70.080734 69.64593  70.23682  69.42657 ]]\n",
      "[[69.444786 68.932816 69.67618  68.87977 ]]\n",
      "[[65.32898 64.86762 65.52476 64.79076]]\n",
      "[[63.251434 62.755344 63.477997 62.80056 ]]\n",
      "[[62.525497 61.949432 62.822956 62.18683 ]]\n",
      "[[59.24668  58.64367  59.54117  59.028606]]\n",
      "[[57.461414 56.842598 57.737007 57.31209 ]]\n",
      "[[57.333344 56.625217 57.57264  57.302395]]\n",
      "[[56.74259  55.97132  56.933163 56.838036]]\n",
      "[[54.65082  53.971554 54.744034 54.87338 ]]\n",
      "[[57.32523  56.79301  57.2782   57.213196]]\n",
      "[[58.671818 58.109375 58.73667  58.46837 ]]\n",
      "[[57.666306 57.105656 57.65773  57.504517]]\n",
      "[[58.96892  58.400887 59.106544 58.739765]]\n",
      "[[58.103905 57.5699   58.214985 57.845592]]\n",
      "[[57.402355 56.91417  57.51083  57.095787]]\n",
      "[[56.876923 56.32895  56.91116  56.651787]]\n",
      "[[54.85702  54.4331   54.76457  54.778378]]\n",
      "[[56.186604 55.64779  56.211723 56.005093]]\n",
      "[[53.95018  53.569782 53.824417 53.989944]]\n",
      "[[56.179478 56.02618  55.943794 55.840572]]\n",
      "[[57.4688   57.221516 57.370678 57.026108]]\n",
      "[[58.653214 58.395252 58.746056 58.138474]]\n",
      "[[56.648712 56.45269  56.57258  56.214428]]\n",
      "[[57.883926 57.657032 57.984337 57.368675]]\n",
      "[[55.142296 55.044525 55.04253  54.647194]]\n",
      "[[56.37117  56.23555  56.446342 55.776707]]\n",
      "[[54.88758  54.855602 54.749573 54.509815]]\n",
      "[[56.12293  55.99947  56.122486 55.641075]]\n",
      "[[57.3244   57.172924 57.496113 56.759193]]\n",
      "[[55.599022 55.531864 55.569458 55.189472]]\n",
      "[[56.83774  56.69     56.945824 56.328423]]\n",
      "[[55.89315  55.817547 55.9032   55.50994 ]]\n",
      "[[54.4693   54.493492 54.364048 54.165436]]\n",
      "[[54.561794 54.163593 54.84954  54.422596]]\n",
      "[[52.477066 52.193966 52.623825 52.48884 ]]\n",
      "[[51.20997  50.9786   51.266655 51.279606]]\n",
      "[[53.480286 53.367626 53.472992 53.2666  ]]\n",
      "[[54.662285 54.425102 54.822754 54.356922]]\n",
      "[[54.405746 54.156807 54.5853   54.08014 ]]\n",
      "[[53.675484 53.44232  53.825974 53.298965]]\n",
      "[[52.93718  52.756317 53.06924  52.67346 ]]\n",
      "[[50.57113  50.550346 50.49026  50.43303 ]]\n",
      "[[51.701523 51.556114 51.798058 51.476692]]\n",
      "[[50.098133 50.04957  50.05582  49.93093 ]]\n",
      "[[51.292213 51.117523 51.43719  51.037052]]\n",
      "[[50.583714 50.4307   50.70525  50.347908]]\n",
      "[[49.099243 49.024773 49.09366  48.878513]]\n",
      "[[50.57027  50.368046 50.76381  50.256832]]\n",
      "[[50.469635 50.249733 50.70964  50.16855 ]]\n",
      "[[50.26807 50.03488 50.52033 49.90542]]\n",
      "[[48.756752 48.61203  48.902435 48.484306]]\n",
      "[[48.199486 48.071415 48.33139  47.9685  ]]\n",
      "[[46.162792 46.19242  46.071136 45.95618 ]]\n",
      "[[46.17628  45.832554 46.51302  46.11438 ]]\n",
      "[[45.384575 45.085873 45.646477 45.28716 ]]\n",
      "[[43.29524  43.215412 43.3154   43.279316]]\n",
      "[[43.163815 43.075268 43.222572 43.193417]]\n",
      "[[43.00248  42.91484  43.055153 43.045307]]\n",
      "[[42.36841  42.341164 42.324326 42.386703]]\n",
      "[[44.916172 44.855465 44.977417 44.70814 ]]\n",
      "[[43.17266  43.357864 42.965843 43.115818]]\n",
      "[[42.610123 42.402534 42.799255 42.64541 ]]\n",
      "[[41.904587 41.77425  41.974884 41.966843]]\n",
      "[[41.36887  41.30441  41.322533 41.465145]]\n",
      "[[44.201595 44.081676 44.40565  44.174335]]\n",
      "[[42.89708  42.99191  42.779045 42.89568 ]]\n",
      "[[42.57627  42.252365 42.870655 42.660202]]\n",
      "[[41.405888 41.335583 41.29616  41.507687]]\n",
      "[[43.762417 43.609512 43.938087 43.76169 ]]\n",
      "[[42.900536 42.946095 42.764484 42.92425 ]]\n",
      "[[43.03094  42.69497  43.287476 43.115906]]\n",
      "[[43.311485 42.930614 43.60463  43.40134 ]]\n",
      "[[42.40669  42.279976 42.34117  42.50534 ]]\n",
      "[[44.236748 44.01561  44.42114  44.24352 ]]\n",
      "[[43.337055 43.380592 43.14681  43.376694]]\n",
      "[[43.19198  42.7988   43.395767 43.307613]]\n",
      "[[43.73694  43.24545  44.031593 43.829815]]\n",
      "[[43.078568 42.827827 43.070168 43.200684]]\n",
      "[[44.93855  44.613182 45.14172  44.972103]]\n",
      "[[44.734352 44.476715 44.848137 44.784397]]\n",
      "[[43.48665  43.41524  43.23635  43.561623]]\n",
      "[[45.019363 44.879723 45.00959  44.95079 ]]\n",
      "[[45.738285 45.446095 46.045017 45.668446]]\n",
      "[[44.653336 44.533096 44.563915 44.58151 ]]\n",
      "[[45.29799  45.02511  45.531326 45.22361 ]]\n",
      "[[44.562588 44.438774 44.467136 44.50891 ]]\n",
      "[[45.25175 44.97058 45.48154 45.19236]]\n",
      "[[44.28054  44.147926 44.154823 44.235847]]\n",
      "[[45.01838  44.719498 45.210625 44.946274]]\n",
      "[[44.796616 44.482323 44.95657  44.70154 ]]\n",
      "[[44.429832 44.200325 44.430206 44.365913]]\n",
      "[[44.156136 43.915966 44.12978  44.084293]]\n",
      "[[44.84269  44.440983 45.130917 44.745644]]\n",
      "[[44.020977 43.761253 44.00995  43.957294]]\n",
      "[[44.739746 44.3154   45.043438 44.647755]]\n",
      "[[44.725834 44.282444 45.05017  44.63194 ]]\n",
      "[[43.619827 43.318535 43.60259  43.5486  ]]\n",
      "[[44.35516  43.885365 44.64786  44.23512 ]]\n",
      "[[43.988613 43.548805 44.181747 43.86036 ]]\n",
      "[[42.52     42.32267  42.276524 42.51034 ]]\n",
      "[[43.255203 42.864597 43.311283 43.15166 ]]\n",
      "[[42.388527 42.230705 42.107098 42.43945 ]]\n",
      "[[43.217712 43.013382 43.10382  43.028633]]\n",
      "[[43.776333 43.417423 43.971306 43.53287 ]]\n",
      "[[43.71414  43.38616  43.885952 43.50136 ]]\n",
      "[[43.016376 42.829754 42.950024 42.853935]]\n",
      "[[43.710304 43.360718 43.958065 43.519485]]\n",
      "[[43.493404 43.191597 43.670624 43.32501 ]]\n",
      "[[42.381527 42.325157 42.15759  42.305126]]\n",
      "[[43.29247  43.023052 43.420116 43.151768]]\n",
      "[[43.21622  42.905075 43.3774   43.054214]]\n",
      "[[43.124065 42.79697  43.29362  42.954838]]\n",
      "[[41.9128   41.815342 41.676437 41.82495 ]]\n",
      "[[42.82648  42.51206  42.941956 42.65862 ]]\n",
      "[[41.68427  41.61056  41.328434 41.63571 ]]\n",
      "[[42.229332 41.975857 42.202053 42.08828 ]]\n",
      "[[42.88102  42.450222 43.19568  42.70317 ]]\n",
      "[[42.530487 42.177525 42.674263 42.36518 ]]\n",
      "[[41.55386  41.40848  41.36836  41.482685]]\n",
      "[[42.51138  42.141136 42.689407 42.353813]]\n",
      "[[42.292137 41.98587  42.369095 42.166   ]]\n",
      "[[41.520763 41.379498 41.322857 41.47207 ]]\n",
      "[[42.64849  42.267357 42.8261   42.51006 ]]\n",
      "[[41.387444 41.26731  41.10639  41.346024]]\n",
      "[[42.570625 42.21504  42.67862  42.47872 ]]\n",
      "[[42.74228  42.305553 42.93302  42.628788]]\n",
      "[[42.851303 42.409916 43.042862 42.750626]]\n",
      "[[43.071945 42.57932  43.320026 42.96428 ]]\n",
      "[[42.93336  42.436893 43.104977 42.85549 ]]\n",
      "[[43.209564 42.906113 42.88769  43.27944 ]]\n",
      "[[43.168343 42.844616 43.111702 43.068092]]\n",
      "[[43.922836 43.403324 44.20685  43.746838]]\n",
      "[[43.30621  42.866924 43.355938 43.15053 ]]\n",
      "[[43.38488  43.08496  43.060593 43.38242 ]]\n",
      "[[42.572865 42.18533  42.49108  42.431435]]\n",
      "[[43.26101  42.727554 43.46936  43.049767]]\n",
      "[[42.11194  41.74298  41.94676  41.986614]]\n",
      "[[42.842773 42.331543 42.987488 42.64256 ]]\n",
      "[[42.756477 42.236534 42.89662  42.547935]]\n",
      "[[41.875202 41.55132  41.583305 41.814514]]\n",
      "[[42.103832 41.64762  42.157017 41.948257]]\n",
      "[[41.34297  41.046795 41.08488  41.290977]]\n",
      "[[42.018394 41.54     42.135326 41.881367]]\n",
      "[[41.400303 41.114227 41.126945 41.385567]]\n",
      "[[41.80309  41.33658  41.8868   41.700264]]\n",
      "[[41.580135 41.194527 41.326553 41.527325]]\n",
      "[[41.875206 41.365955 41.945038 41.754738]]\n",
      "[[42.477665 42.03321  42.130478 42.464108]]\n",
      "[[41.700798 41.21128  41.5896   41.598175]]\n",
      "[[42.53894  41.917553 42.705143 42.390747]]\n",
      "[[42.008324 41.460625 41.859184 41.87839 ]]\n",
      "[[42.83772 42.18036 42.96001 42.67357]]\n",
      "[[41.884426 41.318615 41.75922  41.751015]]\n",
      "[[42.859535 42.175335 43.002514 42.699978]]\n",
      "[[43.04845  42.37148  43.042114 42.862595]]\n",
      "[[44.05196  43.23885  44.295498 43.88098 ]]\n",
      "[[44.004356 43.17661  44.284966 43.839947]]\n",
      "[[43.524105 42.78255  43.568962 43.33888 ]]\n",
      "[[42.34472  41.73396  41.927624 42.20394 ]]\n",
      "[[42.721962 42.035664 42.592102 42.536877]]\n",
      "[[43.608166 42.81364  43.757965 43.435715]]\n",
      "[[43.495377 42.764935 43.308144 43.29543 ]]\n",
      "[[44.245396 43.422    44.340443 44.057144]]\n",
      "[[42.663326 41.95852  42.5158   42.476025]]\n",
      "[[43.469307 42.661545 43.61918  43.295193]]\n",
      "[[43.56457  42.776436 43.53971  43.373478]]\n",
      "[[44.48484  43.581345 44.763332 44.29891 ]]\n",
      "[[43.1912   42.41531  43.108543 42.999344]]\n",
      "[[44.049442 43.1714   44.286873 43.865147]]\n",
      "[[43.023342 42.221184 43.08503  42.850418]]\n",
      "[[42.186344 41.434383 42.269165 42.03594 ]]\n",
      "[[40.892426 40.3378   40.442207 40.76745 ]]\n",
      "[[41.4899   40.822357 41.358864 41.338577]]\n",
      "[[42.4785   41.67048  42.682877 42.34395 ]]\n",
      "[[41.43232  40.739426 41.424843 41.304466]]\n",
      "[[42.51495  41.683025 42.84674  42.401363]]\n",
      "[[42.536793 41.711437 42.741592 42.4204  ]]\n",
      "[[41.98282  41.309788 41.61931  41.840477]]\n",
      "[[42.89023  42.071213 42.87193  42.75449 ]]\n",
      "[[43.896194 42.957253 44.221516 43.75956 ]]\n",
      "[[42.51232  41.68593  42.629314 42.404747]]\n",
      "[[43.509304 42.619892 43.58377  43.380527]]\n",
      "[[42.672203 41.890022 42.48107  42.54156 ]]\n",
      "[[43.71237  42.791042 43.873474 43.588135]]\n",
      "[[44.216038 43.28438  44.19064  44.068756]]\n",
      "[[45.203167 44.16209  45.555622 45.03751 ]]\n",
      "[[45.802303 44.73413  46.07072  45.599674]]\n",
      "[[44.396484 43.477562 44.18663  44.227253]]\n",
      "[[45.282486 44.251335 45.46078  45.088367]]\n",
      "[[43.90069  43.054474 43.53636  43.733536]]\n",
      "[[44.785095 43.80792  44.815483 44.595764]]\n",
      "[[44.29882  43.356262 44.35847  44.1392  ]]\n",
      "[[43.85788  43.040977 43.46016  43.686188]]\n",
      "[[44.752853 43.803757 44.763092 44.554417]]\n",
      "[[42.815563 42.109932 42.319016 42.674984]]\n",
      "[[43.64564  42.808723 43.531147 43.490334]]\n",
      "[[44.64809  43.69021  44.949173 44.458233]]\n",
      "[[44.60901 43.67998 44.75762 44.4148 ]]\n",
      "[[43.818184 43.0484   43.40896  43.637856]]\n",
      "[[44.699913 43.801907 44.705486 44.48941 ]]\n",
      "[[44.637066 43.805984 44.373264 44.41021 ]]\n",
      "[[45.549416 44.59098  45.711594 45.29575 ]]\n",
      "[[45.314987 44.402298 45.426033 45.063297]]\n",
      "[[45.052334 44.176933 45.19204  44.80832 ]]\n",
      "[[43.669037 43.02137  43.23409  43.446953]]\n",
      "[[44.53842  43.766724 44.516045 44.290077]]\n",
      "[[45.696728 44.811012 46.07534  45.388554]]\n",
      "[[45.639015 44.80903  45.848206 45.32704 ]]\n",
      "[[44.64664  43.959362 44.623898 44.396645]]\n",
      "[[45.946045 45.14489  46.31159  45.63044 ]]\n",
      "[[46.073036 45.298927 46.445644 45.75674 ]]\n",
      "[[46.227276 45.49304  46.4526   45.887085]]\n",
      "[[45.06989 44.5194  44.81355 44.79976]]\n",
      "[[46.32095  45.673573 46.436836 45.968117]]\n",
      "[[45.723133 45.17711  45.792133 45.424335]]\n",
      "[[44.678207 44.296074 44.317867 44.450035]]\n",
      "[[45.83444  45.364697 45.850994 45.521484]]\n",
      "[[44.276196 43.98455  43.841415 43.9878  ]]\n",
      "[[45.38616  45.0431   45.350338 45.094173]]\n",
      "[[46.60514  46.16786  46.94328  46.229866]]\n",
      "[[45.20157  44.957813 44.969425 44.85259 ]]\n",
      "[[46.486507 46.166008 46.634865 46.09846 ]]\n",
      "[[46.783936 46.495975 46.86444  46.343727]]\n",
      "[[46.439186 46.26207  46.330086 45.9788  ]]\n",
      "[[47.665577 47.413185 47.932    47.12675 ]]\n",
      "[[36.81669  35.86128  36.677834 36.679195]]\n",
      "[[36.396416 35.139687 37.62445  36.655094]]\n",
      "[[36.56884  35.29041  37.154182 36.6548  ]]\n",
      "[[47.715378 46.990105 48.30847  47.454575]]\n",
      "[[47.795856 47.009735 48.194695 47.551723]]\n",
      "[[47.782856 47.152584 47.521667 47.459927]]\n",
      "[[46.05568  45.325005 46.190014 45.81983 ]]\n",
      "[[46.380714 45.748436 45.97398  46.10341 ]]\n",
      "[[44.8557   44.107727 44.85844  44.666016]]\n",
      "[[46.95455  46.162506 46.242176 46.74881 ]]\n",
      "[[45.191284 44.330334 44.925682 45.06193 ]]\n",
      "[[44.129944 43.13352  44.3343   44.119404]]\n",
      "[[46.249767 45.165127 45.73204  46.197952]]\n",
      "[[44.734688 43.588238 44.59151  44.77737 ]]\n",
      "[[44.40171  43.22836  44.548687 44.552704]]\n",
      "[[44.33     43.138683 44.844376 44.54373 ]]\n",
      "[[44.817375 43.68221  44.622524 44.882904]]\n",
      "[[44.46048  43.344425 44.644646 44.60994 ]]\n",
      "[[44.719658 43.64083  44.220154 44.73428 ]]\n",
      "[[44.028336 42.986168 43.9346   44.10254 ]]\n",
      "[[45.26013  44.18094  45.537315 45.254852]]\n",
      "[[44.106106 43.141575 43.782646 44.139694]]\n",
      "[[45.283684 44.307053 45.296288 45.250237]]\n",
      "[[45.56732  44.641544 45.05607  45.67415 ]]\n",
      "[[45.516266 44.597153 45.193348 45.46424 ]]\n",
      "[[46.78099  45.741253 46.858032 46.630066]]\n",
      "[[45.96496  45.02518  46.008686 45.825497]]\n",
      "[[43.948082 43.213627 43.536896 43.91312 ]]\n",
      "[[45.366367 44.494427 45.399296 45.233303]]\n",
      "[[44.32023  43.580982 44.25438  44.225937]]\n",
      "[[45.717094 44.81824  46.10526  45.52608 ]]\n",
      "[[43.59429 42.93825 43.29096 43.51565]]\n",
      "[[45.008606 44.18921  45.230076 44.85137 ]]\n",
      "[[45.2877   44.43956  45.45944  45.133545]]\n",
      "[[43.375423 42.751675 43.046394 43.286926]]\n",
      "[[44.813602 44.028282 45.022545 44.66489 ]]\n",
      "[[43.88897  43.193302 43.622494 43.81772 ]]\n",
      "[[45.367756 44.518074 45.568283 45.207664]]\n",
      "[[44.799465 44.007404 44.7187   44.691605]]\n",
      "[[46.133892 45.202393 46.4726   45.953304]]\n",
      "[[44.752567 43.97338  44.590904 44.65375 ]]\n",
      "[[46.073387 45.155888 46.333153 45.900238]]\n",
      "[[34.484306 34.11057  33.602707 33.528187]]\n",
      "[[34.91184  34.700367 34.858448 33.949146]]\n",
      "[[35.0487   34.992317 35.910522 34.136845]]\n",
      "[[34.545418 34.526363 35.235256 33.551785]]\n",
      "[[23.801569 23.379814 21.495138 23.102459]]\n",
      "[[23.829397 23.425508 21.509733 23.119932]]\n",
      "[[23.962576 23.538391 21.491777 23.197718]]\n",
      "[[23.97648  23.608208 21.657934 23.256256]]\n",
      "[[24.043148 23.694323 21.728579 23.31696 ]]\n",
      "[[24.120459 23.754604 21.685701 23.35253 ]]\n",
      "[[24.12559  23.796797 21.8092   23.393513]]\n",
      "[[24.122568 23.793991 21.808285 23.390903]]\n",
      "[[24.113424 23.782187 21.798065 23.38216 ]]\n",
      "[[24.10671  23.77372  21.791069 23.375818]]\n",
      "[[24.116968 23.74912  21.678684 23.348904]]\n",
      "[[24.102505 23.766178 21.783665 23.373344]]\n",
      "[[24.103056 23.768122 21.786974 23.374111]]\n",
      "[[24.10685  23.772596 21.790201 23.377592]]\n",
      "[[24.106867 23.772608 21.79019  23.3776  ]]\n",
      "[[24.105957 23.771463 21.789246 23.376738]]\n",
      "[[24.105373 23.770731 21.788643 23.376186]]\n",
      "[[24.105291 23.77063  21.78856  23.376114]]\n",
      "[[24.10543  23.770798 21.788702 23.376245]]\n",
      "[[24.105547 23.770948 21.788822 23.376352]]\n",
      "[[24.105577 23.770987 21.788853 23.376379]]\n",
      "[[24.105556 23.770964 21.788836 23.376364]]\n",
      "[[24.105537 23.77094  21.788816 23.376347]]\n",
      "[[24.105528 23.770927 21.788805 23.376337]]\n",
      "[[24.10553  23.770931 21.788807 23.376337]]\n",
      "[[24.105536 23.770935 21.78881  23.37634 ]]\n",
      "[[24.105537 23.77094  21.788813 23.376345]]\n",
      "[[24.105541 23.77094  21.788816 23.376347]]\n",
      "[[24.105541 23.77094  21.788816 23.376345]]\n",
      "[[24.105537 23.770939 21.78881  23.376345]]\n",
      "[[24.105537 23.770935 21.78881  23.376343]]\n",
      "[[24.105537 23.770935 21.78881  23.37634 ]]\n",
      "[[24.105537 23.770935 21.78881  23.37634 ]]\n",
      "[[24.105537 23.770937 21.78881  23.37634 ]]\n",
      "[[24.105537 23.770935 21.78881  23.37634 ]]\n"
     ]
    }
   ],
   "source": [
    "score, records = agent.simulate(epsilon=0.0, visualize=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# https://github.com/fakemonk1/Reinforcement-Learning-Lunar_Lander"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.5"
  },
  "toc-autonumbering": false,
  "toc-showcode": false,
  "toc-showmarkdowntxt": false,
  "toc-showtags": false
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
