{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch.utils.data as data\n",
    "\n",
    "import random\n",
    "import numbers\n",
    "from PIL import Image, ImageMath\n",
    "import os\n",
    "import os.path\n",
    "import numpy as np\n",
    "import struct\n",
    "import math\n",
    "\n",
    "import torch\n",
    "import torchvision\n",
    "import matplotlib.pyplot as plt\n",
    "from mpl_toolkits.mplot3d import Axes3D\n",
    "\n",
    "import faiss\n",
    "import time\n",
    "\n",
    "import pcl\n",
    "from PIL import Image, ImageDraw\n",
    "import faiss\n",
    "\n",
    "import timeit\n",
    "\n",
    "%matplotlib qt5"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "class FarthestSampler:\n",
    "    def __init__(self):\n",
    "        pass\n",
    "\n",
    "    def calc_distances(self, p0, points):\n",
    "        return ((p0 - points) ** 2).sum(axis=1)\n",
    "\n",
    "    def sample(self, pts, k):\n",
    "        farthest_pts = np.zeros((k, 3))\n",
    "        farthest_pts[0] = pts[np.random.randint(len(pts))]\n",
    "        distances = self.calc_distances(farthest_pts[0], pts)\n",
    "        for i in range(1, k):\n",
    "            farthest_pts[i] = pts[np.argmax(distances)]\n",
    "            distances = np.minimum(distances, self.calc_distances(farthest_pts[i], pts))\n",
    "        return farthest_pts\n",
    "    \n",
    "\n",
    "class KNNBuilder:\n",
    "    def __init__(self, k):\n",
    "        self.k = k\n",
    "        self.dimension = 3\n",
    "\n",
    "    def build_nn_index(self, database):\n",
    "        '''\n",
    "        :param database: numpy array of Nx3\n",
    "        :return: Faiss index, in CPU\n",
    "        '''\n",
    "        index = faiss.IndexFlatL2(self.dimension)  # dimension is 3\n",
    "        index.add(database)\n",
    "        return index\n",
    "\n",
    "    def search_nn(self, index, query, k):\n",
    "        '''\n",
    "        :param index: Faiss index\n",
    "        :param query: numpy array of Nx3\n",
    "        :return: D: numpy array of Nxk\n",
    "                 I: numpy array of Nxk\n",
    "        '''\n",
    "        D, I = index.search(query, k)\n",
    "        return D, I\n",
    "\n",
    "    def self_build_search(self, x):\n",
    "        '''\n",
    "\n",
    "        :param x: numpy array of Nxd\n",
    "        :return: D: numpy array of Nxk\n",
    "                 I: numpy array of Nxk\n",
    "        '''\n",
    "        x = np.ascontiguousarray(x, dtype=np.float32)\n",
    "        index = self.build_nn_index(x)\n",
    "        D, I = self.search_nn(index, x, self.k)\n",
    "        return D, I\n",
    "    \n",
    "\n",
    "class PCSampler:\n",
    "    def __init__(self, leaf_size, minimum_pc_num):       \n",
    "        self.leaf_size = leaf_size\n",
    "        self.minimum_pc_num = minimum_pc_num\n",
    "    \n",
    "    def sample_pc(self, pc, leaf_size):\n",
    "        '''\n",
    "        :param pc: input numpy array of Nx3\n",
    "        :return: sampled_pc of Mx3\n",
    "        '''\n",
    "        cloud = pcl.PointCloud(pc)\n",
    "        sor = cloud.make_voxel_grid_filter()\n",
    "        sor.set_leaf_size(leaf_size, leaf_size, leaf_size)\n",
    "        cloud_filtered = sor.filter()\n",
    "        sampled_pc = np.asarray(cloud_filtered)\n",
    "        \n",
    "        return sampled_pc\n",
    "    \n",
    "    def sample_pc_wrapper(self, pc):\n",
    "        '''\n",
    "        ensure that the sampled pc is more than a certain amount\n",
    "        '''\n",
    "        retry_counter = 0\n",
    "        \n",
    "        sampled_pc = self.sample_pc(pc, self.leaf_size)\n",
    "        while sampled_pc.shape[0] < self.minimum_pc_num:\n",
    "            retry_counter += 1\n",
    "            leaf_size = self.leaf_size - 0.04*retry_counter\n",
    "            if leaf_size <= 0:\n",
    "                break\n",
    "            sampled_pc = self.sample_pc(pc, leaf_size)\n",
    "        \n",
    "        return sampled_pc\n",
    "    \n",
    "    \n",
    "def axisEqual3D(ax):\n",
    "    extents = np.array([getattr(ax, 'get_{}lim'.format(dim))() for dim in 'xyz'])\n",
    "    sz = extents[:,1] - extents[:,0]\n",
    "    centers = np.mean(extents, axis=1)\n",
    "    maxsize = max(abs(sz))\n",
    "    r = maxsize/2\n",
    "    for ctr, dim in zip(centers, 'xyz'):\n",
    "        getattr(ax, 'set_{}lim'.format(dim))(ctr - r, ctr + r)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "def read_txt_as_list(txt_file):\n",
    "    with open(txt_file, 'r') as f:\n",
    "        subset = [int(x) for x in f.readlines()]\n",
    "    return subset\n",
    "\n",
    "\n",
    "root = '/ssd/dataset/SceneNN-DS-compact'\n",
    "phase = 'train'\n",
    "output_folder = 'frames_' + phase\n",
    "\n",
    "data = np.load(os.path.join(root, 'data_' + phase + '.npz'), mmap_mode=None)\n",
    "print(data['pairs'].shape)\n",
    "print(data['offsets'].shape)\n",
    "\n",
    "offsets = data['offsets']\n",
    "# for i in range(len(offsets)):\n",
    "#     pc_np = data['points'][offsets[i]:offsets[i+1]]\n",
    "#     np.save(os.path.join(root, output_folder, '%d.npy'%i), pc_np)\n",
    "#     if i % 100 ==0:\n",
    "#         print(phase+' %d'%i)\n",
    "\n",
    "i = data['offsets'].shape[0] - 1 \n",
    "pc_np = data['points'][offsets[i]:, :]\n",
    "np.save(os.path.join(root, output_folder, '%d.npy'%i), pc_np)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# get surface normal\n",
    "def Surface_normals(cloud):\n",
    "    ne = cloud.make_NormalEstimation()\n",
    "    tree = cloud.make_kdtree()\n",
    "    ne.set_SearchMethod(tree)\n",
    "#     ne.set_RadiusSearch(2)\n",
    "    ne.set_KSearch(9)\n",
    "    cloud_normals = ne.compute()\n",
    "    return cloud_normals\n",
    "\n",
    "root = '/ssd/dataset/SceneNN-DS-compact'\n",
    "phase = 'train'\n",
    "output_folder = 'frames_' + phase\n",
    "\n",
    "file_names = os.listdir(os.path.join(root, output_folder))\n",
    "print(len(file_names))\n",
    "# for i, fname in enumerate(file_names):  \n",
    "#     pc_np = np.load(os.path.join(root, output_folder, '%d.npy'%i))[:, 0:3]\n",
    "    \n",
    "#     cloud = pcl.PointCloud(pc_np.astype(np.float32))    \n",
    "#     sn = Surface_normals(cloud)\n",
    "#     sn_np = np.asarray(sn.to_array(), dtype=np.float32)  # Nx4, nx,ny,nz,curvature\n",
    "    \n",
    "#     output_np = np.concatenate((pc_np, sn_np), axis=1)  # Nx7\n",
    "#     np.save(os.path.join(root, output_folder, '%d.npy'%i), output_np)\n",
    "    \n",
    "#     if i % 100 ==0:\n",
    "#         print(phase+' %d'%i)\n",
    "\n",
    "i = len(file_names) - 1\n",
    "pc_np = np.load(os.path.join(root, output_folder, '%d.npy'%i))[:, 0:3]\n",
    "\n",
    "cloud = pcl.PointCloud(pc_np.astype(np.float32))    \n",
    "sn = Surface_normals(cloud)\n",
    "sn_np = np.asarray(sn.to_array(), dtype=np.float32)  # Nx4, nx,ny,nz,curvature\n",
    "\n",
    "output_np = np.concatenate((pc_np, sn_np), axis=1)  # Nx7\n",
    "np.save(os.path.join(root, output_folder, '%d.npy'%i), output_np)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# prepare negative mining and other information\n",
    "# info -> 'pairs', 'icp', sample_num\n",
    "\n",
    "import pickle\n",
    "\n",
    "def save_obj(obj, name):\n",
    "    with open(name, 'wb') as f:\n",
    "        pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)\n",
    "\n",
    "def load_obj(name):\n",
    "    with open(name, 'rb') as f:\n",
    "        return pickle.load(f)\n",
    "\n",
    "root = '/ssd/dataset/SceneNN-DS-compact'\n",
    "phase = 'val'\n",
    "output_folder = 'frames_' + phase\n",
    "info_obj_name = 'info_' + phase + '.pkl'\n",
    "\n",
    "if phase == 'train':\n",
    "    sample_num = 2638\n",
    "elif phase == 'test':\n",
    "    sample_num = 1054\n",
    "elif phase == 'val':\n",
    "    sample_num = 972\n",
    "\n",
    "data = np.load(os.path.join(root, 'data_' + phase + '.npz'), mmap_mode=None)\n",
    "pairs_np = data['pairs']\n",
    "icp_np = data['icp']\n",
    "\n",
    "print(pairs_np.shape)\n",
    "print(icp_np.shape)\n",
    "\n",
    "# for each frame, find the overlapped frames\n",
    "positive_list = []\n",
    "for i in range(sample_num):\n",
    "    positive_list.append([])\n",
    "for i in range(sample_num):\n",
    "    for j in range(pairs_np.shape[0]):\n",
    "        if pairs_np[j, 0] == i:\n",
    "            positive_list[i].append(pairs_np[j, 1])\n",
    "        if pairs_np[j, 1] == i:\n",
    "            positive_list[i].append(pairs_np[j, 0])\n",
    "\n",
    "# remove repeated, find frames that with no neighbors\n",
    "# according to experiments of 2018-10-18, every frame has a neighbor\n",
    "for i in range(sample_num):\n",
    "    positive_list[i] = list(set(positive_list[i]))\n",
    "    if len(positive_list) == 0:\n",
    "        print('empty: %d' % i)\n",
    "\n",
    "# save to python dictionary\n",
    "info_dict = {'pairs_np': pairs_np, 'icp_np': icp_np, 'positive_list': positive_list, 'sample_num': sample_num}\n",
    "save_obj(info_dict, os.path.join(root, info_obj_name))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "def cart2hom(pts_3d):\n",
    "    ''' Input: nx3 points in Cartesian\n",
    "        Output: nx4 points in Homogeneous by pending 1\n",
    "    '''\n",
    "    n = pts_3d.shape[0]\n",
    "    pts_3d_hom = np.hstack((pts_3d, np.ones((n,1))))\n",
    "    return pts_3d_hom\n",
    "\n",
    "def hom2cart(pts_3d_hom):\n",
    "    return pts_3d_hom[:, 0:3] / pts_3d_hom[:, 3:4]\n",
    "\n",
    "# validate the icp and neighbor data\n",
    "root = '/ssd/dataset/SceneNN-DS-compact'\n",
    "phase = 'train'\n",
    "output_folder = 'frames_' + phase\n",
    "info_obj_name = 'info_' + phase + '.pkl'\n",
    "\n",
    "info_dict = load_obj(os.path.join(root, info_obj_name))\n",
    "# print(info_dict['pairs_np'])\n",
    "# print(info_dict['icp_np'])\n",
    "# print(info_dict['positive_list'])\n",
    "# print(info_dict['sample_num'])\n",
    "\n",
    "pairs_np = info_dict['pairs_np']\n",
    "icp_np = info_dict['icp_np']\n",
    "positive_list = info_dict['positive_list']\n",
    "sample_num = info_dict['sample_num']\n",
    "\n",
    "print(len(positive_list))\n",
    "\n",
    "i = 888\n",
    "anc_idx = pairs_np[i, 0]\n",
    "pos_idx = pairs_np[i, 1]\n",
    "assert anc_idx in positive_list[pos_idx]\n",
    "assert pos_idx in positive_list[anc_idx]\n",
    "\n",
    "anc_pc_np = np.load(os.path.join(root, output_folder, '%d.npy'%anc_idx))[:, 0:3]\n",
    "pos_pc_np = np.load(os.path.join(root, output_folder, '%d.npy'%pos_idx))[:, 0:3]\n",
    "\n",
    "# fig1 = plt.figure()\n",
    "# ax = Axes3D(fig1)\n",
    "# ax.scatter(anc_pc_np[:,0].tolist(), anc_pc_np[:,1].tolist(), anc_pc_np[:,2].tolist(), s=5, c=[0, 0, 0])\n",
    "# axisEqual3D(ax)\n",
    "\n",
    "# fig2 = plt.figure()\n",
    "# ax = Axes3D(fig2)\n",
    "# ax.scatter(pos_pc_np[:,0].tolist(), pos_pc_np[:,1].tolist(), pos_pc_np[:,2].tolist(), s=5, c=[0, 0, 0])\n",
    "# axisEqual3D(ax)\n",
    "\n",
    "transformed_anc_pc_np = hom2cart(np.dot(icp_np[i], cart2hom(anc_pc_np).T).T)\n",
    "fig3 = plt.figure()\n",
    "ax = Axes3D(fig3)\n",
    "ax.scatter(pos_pc_np[:,0].tolist(), pos_pc_np[:,1].tolist(), pos_pc_np[:,2].tolist(), s=5, c=[1, 0, 0])\n",
    "ax.scatter(transformed_anc_pc_np[:,0].tolist(), \n",
    "           transformed_anc_pc_np[:,1].tolist(), \n",
    "           transformed_anc_pc_np[:,2].tolist(), \n",
    "           s=5, c=[0, 0, 1])\n",
    "axisEqual3D(ax)\n",
    "ax.set_xlabel('x')\n",
    "ax.set_ylabel('y')\n",
    "ax.set_zlabel('z')\n",
    "\n",
    "plt.ion()\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "root = '/ssd/dataset/SceneNN-DS-compact'\n",
    "phase = 'train'\n",
    "output_folder = 'frames_' + phase\n",
    "\n",
    "file_names = os.listdir(os.path.join(root, output_folder))\n",
    "print(len(file_names))\n",
    "pc_num_list = []\n",
    "large_pc_idx_list = []\n",
    "\n",
    "pc_np_list = []\n",
    "for i, fname in enumerate(file_names):    \n",
    "    pc_np = np.load(os.path.join(root, output_folder, '%d.npy'%i))[:, 0:3]\n",
    "    \n",
    "#     fig = plt.figure()\n",
    "#     ax = Axes3D(fig)\n",
    "#     ax.scatter(pc_np[:,0].tolist(), pc_np[:,1].tolist(), pc_np[:,2].tolist(), s=0.1, c=[0.5,0.5,0.5])\n",
    "#     axisEqual3D(ax)\n",
    "\n",
    "#     plt.ion()\n",
    "#     plt.show()\n",
    "\n",
    "    pc_num_list.append(pc_np.shape[0])\n",
    "    pc_np_list.append(pc_np)\n",
    "    \n",
    "    if pc_np.shape[0] > 8000:\n",
    "        large_pc_idx_list.append(i)\n",
    "    \n",
    "    \n",
    "pc_num_np = np.asarray(pc_num_list)\n",
    "print(np.mean(pc_num_np))\n",
    "print(np.max(pc_num_np))\n",
    "print(np.min(pc_num_np))\n",
    "\n",
    "pc_np_all = np.concatenate(pc_np_list, axis=0)\n",
    "print('x_max: %f, x_min: %f, y_max: %f, y_min: %f, z_max: %f, z_min: %f' % \n",
    "      (np.max(pc_np_all[:, 0]), np.min(pc_np_all[:, 0]), \n",
    "       np.max(pc_np_all[:, 1]), np.min(pc_np_all[:, 1]), \n",
    "       np.max(pc_np_all[:, 2]), np.min(pc_np_all[:, 2])))\n",
    "\n",
    "print('x_mean: %f, y_mean: %f, z_mean: %f' % \n",
    "      (np.mean(pc_np_all[:, 0]), \n",
    "       np.mean(pc_np_all[:, 1]), \n",
    "       np.mean(pc_np_all[:, 2])))\n",
    "\n",
    "\n",
    "plt.hist(pc_num_np, bins='auto')\n",
    "plt.show()\n",
    "\n",
    "n = 4096\n",
    "print(np.sum(pc_num_np<n) / pc_num_np.shape[0])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "print(large_pc_idx_list)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# investigate how many points should be chosen for training\n",
    "i = 2566\n",
    "pc_np = np.load(os.path.join(root, output_folder, '%d.npy'%i))[:, 0:3]\n",
    "print(pc_np.shape)\n",
    "\n",
    "choice_idx = np.random.choice(pc_np.shape[0], 5000, replace=False)\n",
    "pc_np_sampled = pc_np[choice_idx]\n",
    "    \n",
    "fig = plt.figure()\n",
    "ax = Axes3D(fig)\n",
    "ax.scatter(pc_np[:,0].tolist(), pc_np[:,1].tolist(), pc_np[:,2].tolist(), s=1, c=[0, 0, 0])\n",
    "ax.scatter(pc_np_sampled[:,0].tolist(), pc_np_sampled[:,1].tolist(), pc_np_sampled[:,2].tolist(), s=10, c=[1, 0, 0])\n",
    "axisEqual3D(ax)\n",
    "\n",
    "plt.ion()\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1054\n",
      "node k=12 - mean 0.214459, max 0.852333, min 0.141794 \n",
      "\n"
     ]
    }
   ],
   "source": [
    "# investigate the statistics of the dataset\n",
    "root = '/ssd/dataset/SceneNN-DS-compact'\n",
    "phase = 'test'\n",
    "output_folder = 'frames_' + phase\n",
    "\n",
    "sampler = FarthestSampler()\n",
    "\n",
    "file_names = os.listdir(os.path.join(root, output_folder))\n",
    "print(len(file_names))\n",
    "\n",
    "mean_list = []\n",
    "min_list = []\n",
    "max_list = []\n",
    "for i, fname in enumerate(file_names):    \n",
    "    pc_np = np.load(os.path.join(root, output_folder, '%d.npy'%i))[:, 0:3]\n",
    "\n",
    "#     ###### knn analysis\n",
    "#     knn_k=2\n",
    "#     knn = KNNBuilder(k=knn_k)\n",
    "\n",
    "#     start_t = timeit.default_timer()\n",
    "#     index = knn.build_nn_index(np.ascontiguousarray(pc_np[:, 0:3], dtype=np.float32))\n",
    "#     D, I = knn.search_nn(index, np.ascontiguousarray(pc_np[:, 0:3], dtype=np.float32), k=knn_k)\n",
    "#     stop_t = timeit.default_timer()\n",
    "#     print(stop_t - start_t)\n",
    "\n",
    "#     D = np.sqrt(np.fabs(D))\n",
    "#     D = D[:, knn_k-1:knn_k]\n",
    "#     print(D.shape)\n",
    "#     print('pc k=%d - mean %f, max %f, min %f' % (knn_k, np.mean(D), np.max(D), np.min(D)))\n",
    "#     ###### knn analysis\n",
    "    \n",
    "    farthest_pts = sampler.sample(pc_np, 768)\n",
    "    # nearest neighbor analysis for nodes\n",
    "    knn_k=12\n",
    "    knn = KNNBuilder(k=knn_k)\n",
    "\n",
    "    index = knn.build_nn_index(np.ascontiguousarray(farthest_pts[:, 0:3], dtype=np.float32))\n",
    "    D, I = knn.search_nn(index, np.ascontiguousarray(farthest_pts[:, 0:3], dtype=np.float32), k=knn_k)\n",
    "\n",
    "    D = np.sqrt(np.fabs(D))\n",
    "    D = D[:, knn_k-1:knn_k]\n",
    "    \n",
    "    mean_list.append(np.mean(D))\n",
    "    max_list.append(np.max(D))\n",
    "    min_list.append(np.min(D))\n",
    "    \n",
    "    \n",
    "node_knn_mean = np.mean(np.asarray(mean_list))\n",
    "node_knn_min = np.mean(np.asarray(min_list))\n",
    "node_knn_max = np.mean(np.asarray(max_list))\n",
    "print('node k=%d - mean %f, max %f, min %f \\n' % (knn_k, node_knn_mean, node_knn_max, node_knn_min))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# how many points within a 30cm neighborhood\n",
    "root = '/ssd/dataset/SceneNN-DS-compact'\n",
    "phase = 'test'\n",
    "output_folder = 'frames_' + phase\n",
    "\n",
    "sampler = FarthestSampler()\n",
    "\n",
    "file_names = os.listdir(os.path.join(root, output_folder))\n",
    "print(len(file_names))\n",
    "\n",
    "r = 0.8\n",
    "within_r_num_list = []\n",
    "for i, fname in enumerate(file_names):    \n",
    "    pc_np = np.load(os.path.join(root, output_folder, '%d.npy'%i))[:, 0:3]  # Nx3\n",
    "    \n",
    "    choice_idx = np.random.choice(pc_np.shape[0], int(pc_np.shape[0]/2), replace=False)\n",
    "    pc_np_sampled = pc_np[choice_idx]\n",
    "    farthest_pts = sampler.sample(pc_np_sampled, 512)  # Mx3\n",
    "    \n",
    "    farthest_pts_Mx1x3 = np.expand_dims(farthest_pts, axis=1)\n",
    "    pc_np_1xNx3 = np.expand_dims(pc_np, axis=0)\n",
    "    \n",
    "    dist_np = np.linalg.norm(farthest_pts_Mx1x3 - pc_np_1xNx3, axis=2)\n",
    "    ball_mask = dist_np < r  # MxN\n",
    "    within_r_num_list.append(np.sum(ball_mask) / farthest_pts.shape[0])\n",
    "    \n",
    "#     if i > 100:\n",
    "#         break\n",
    "        \n",
    "print(np.mean(np.asarray(within_r_num_list)))\n",
    "print(np.max(np.asarray(within_r_num_list)))\n",
    "print(np.min(np.asarray(within_r_num_list)))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "root = '/ssd/dataset/SceneNN-DS-compact'\n",
    "phase = 'train'\n",
    "output_folder = 'frames_' + phase\n",
    "\n",
    "data = np.load(os.path.join(root, 'data_' + phase + '.npz'), mmap_mode=None)\n",
    "pairs_np = data['pairs']\n",
    "offsets = data['offsets']\n",
    "icp_np = data['icp']\n",
    "print(pairs_np)\n",
    "print(offsets.shape)\n",
    "print(icp_np.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.5.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
