{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch.utils.data as data\n",
    "\n",
    "import random\n",
    "import numbers\n",
    "from PIL import Image, ImageMath\n",
    "import os\n",
    "import os.path\n",
    "import numpy as np\n",
    "import struct\n",
    "import math\n",
    "\n",
    "import torch\n",
    "import torchvision\n",
    "import matplotlib.pyplot as plt\n",
    "from mpl_toolkits.mplot3d import Axes3D\n",
    "\n",
    "import faiss\n",
    "import time\n",
    "\n",
    "import pcl\n",
    "from PIL import Image, ImageDraw\n",
    "import faiss\n",
    "\n",
    "import timeit\n",
    "\n",
    "%matplotlib qt5"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "class KNNBuilder_GPU:\n",
    "    def __init__(self, k):\n",
    "        self.k = k\n",
    "        self.dimension = 3\n",
    "        \n",
    "        # we need only a StandardGpuResources per GPU\n",
    "        self.res = faiss.StandardGpuResources()\n",
    "        self.res.setTempMemoryFraction(0.1)\n",
    "        self.flat_config = faiss.GpuIndexFlatConfig()\n",
    "        self.flat_config.device = 0\n",
    "        \n",
    "    def build_nn_index(self, database):\n",
    "        '''\n",
    "        :param database: numpy array of Nx3\n",
    "        :return: Faiss index, in CPU\n",
    "        '''\n",
    "        index = faiss.GpuIndexFlatL2(self.res, self.dimension, self.flat_config)  # dimension is 3\n",
    "        index.add(database)\n",
    "        return index\n",
    "    \n",
    "    def search_nn(self, index, query, k):\n",
    "        '''\n",
    "        :param index: Faiss index\n",
    "        :param query: numpy array of Nx3\n",
    "        :return: D: numpy array of Nxk\n",
    "                 I: numpy array of Nxk\n",
    "        '''\n",
    "        D, I = index.search(query, k)\n",
    "        return D, I\n",
    "    \n",
    "    def self_build_search(self, x):\n",
    "        '''\n",
    "\n",
    "        :param x: numpy array of Nxd\n",
    "        :return: D: numpy array of Nxk\n",
    "                 I: numpy array of Nxk\n",
    "        '''\n",
    "        x = np.ascontiguousarray(x, dtype=np.float32)\n",
    "        index = self.build_nn_index(x)\n",
    "        D, I = self.search_nn(index, x, self.k)\n",
    "        return D, I\n",
    "    \n",
    "\n",
    "class KNNBuilder:\n",
    "    def __init__(self, k):\n",
    "        self.k = k\n",
    "        self.dimension = 3\n",
    "\n",
    "    def build_nn_index(self, database):\n",
    "        '''\n",
    "        :param database: numpy array of Nx3\n",
    "        :return: Faiss index, in CPU\n",
    "        '''\n",
    "        index = faiss.IndexFlatL2(self.dimension)  # dimension is 3\n",
    "        index.add(database)\n",
    "        return index\n",
    "\n",
    "    def search_nn(self, index, query, k):\n",
    "        '''\n",
    "        :param index: Faiss index\n",
    "        :param query: numpy array of Nx3\n",
    "        :return: D: numpy array of Nxk\n",
    "                 I: numpy array of Nxk\n",
    "        '''\n",
    "        D, I = index.search(query, k)\n",
    "        return D, I\n",
    "\n",
    "    def self_build_search(self, x):\n",
    "        '''\n",
    "\n",
    "        :param x: numpy array of Nxd\n",
    "        :return: D: numpy array of Nxk\n",
    "                 I: numpy array of Nxk\n",
    "        '''\n",
    "        x = np.ascontiguousarray(x, dtype=np.float32)\n",
    "        index = self.build_nn_index(x)\n",
    "        D, I = self.search_nn(index, x, self.k)\n",
    "        return D, I\n",
    "    \n",
    "\n",
    "class PCSampler:\n",
    "    def __init__(self, leaf_size, minimum_pc_num):       \n",
    "        self.leaf_size = leaf_size\n",
    "        self.minimum_pc_num = minimum_pc_num\n",
    "    \n",
    "    def sample_pc(self, pc, leaf_size):\n",
    "        '''\n",
    "        :param pc: input numpy array of Nx3\n",
    "        :return: sampled_pc of Mx3\n",
    "        '''\n",
    "        cloud = pcl.PointCloud(pc)\n",
    "        sor = cloud.make_voxel_grid_filter()\n",
    "        sor.set_leaf_size(leaf_size, leaf_size, leaf_size)\n",
    "        cloud_filtered = sor.filter()\n",
    "        sampled_pc = np.asarray(cloud_filtered)\n",
    "        \n",
    "        return sampled_pc\n",
    "    \n",
    "    def sample_pc_wrapper(self, pc):\n",
    "        '''\n",
    "        ensure that the sampled pc is more than a certain amount\n",
    "        '''\n",
    "        retry_counter = 0\n",
    "        \n",
    "        sampled_pc = self.sample_pc(pc, self.leaf_size)\n",
    "        while sampled_pc.shape[0] < self.minimum_pc_num:\n",
    "            retry_counter += 1\n",
    "            leaf_size = self.leaf_size - 0.04*retry_counter\n",
    "            if leaf_size <= 0:\n",
    "                break\n",
    "            sampled_pc = self.sample_pc(pc, leaf_size)\n",
    "        \n",
    "        return sampled_pc\n",
    "    \n",
    "    \n",
    "def axisEqual3D(ax):\n",
    "    extents = np.array([getattr(ax, 'get_{}lim'.format(dim))() for dim in 'xyz'])\n",
    "    sz = extents[:,1] - extents[:,0]\n",
    "    centers = np.mean(extents, axis=1)\n",
    "    maxsize = max(abs(sz))\n",
    "    r = maxsize/2\n",
    "    for ctr, dim in zip(centers, 'xyz'):\n",
    "        getattr(ax, 'set_{}lim'.format(dim))(ctr - r, ctr + r)\n",
    "        \n",
    "        "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "def load_train_txt(txt_path):\n",
    "    f = open(txt_path, 'r')\n",
    "    lines_list = f.readlines()\n",
    "    \n",
    "    dataset = []\n",
    "    for i, line_str in enumerate(lines_list):\n",
    "        # convert each line to a dict\n",
    "        line_splitted_list = line_str.split('|')\n",
    "        try:\n",
    "            assert len(line_splitted_list) == 3\n",
    "        except Exception:\n",
    "            print('Invalid line.')\n",
    "            print(i)\n",
    "            print(line_splitted_list)\n",
    "            continue\n",
    "        \n",
    "        file_name = line_splitted_list[0].strip()\n",
    "        positive_lines = list(map(int, line_splitted_list[1].split()))\n",
    "        non_negative_lines = list(map(int, line_splitted_list[2].split()))\n",
    "        \n",
    "#         print(file_name)\n",
    "#         print(positive_lines)\n",
    "#         print(non_negative_lines)\n",
    "\n",
    "        data = {'file': file_name, 'pos_list': positive_lines, 'nonneg_list': non_negative_lines}\n",
    "        dataset.append(data)\n",
    "        \n",
    "    f.close()\n",
    "    return dataset\n",
    "\n",
    "\n",
    "def load_bin(file_path, num_cols=6):\n",
    "    pc_np = np.fromfile(file_path, dtype=np.float32)\n",
    "    pc_np = np.reshape(pc_np, (-1, num_cols))\n",
    "    \n",
    "    # remove the surface normals, I will compute it myself using python-pcl\n",
    "    return pc_np[:, 0:3]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# get statistic and visualization of data provided by 3dfeatnet\n",
    "train_folder_path = '/ssd/dataset/oxford/train'\n",
    "train_txt_path = '/ssd/dataset/oxford/train_relative.txt'\n",
    "\n",
    "test_folder_path = '/ssd/dataset/oxford/test_models'\n",
    "test_txt_path = '/ssd/dataset/oxford/test_models/groundtruths.txt'  # with rotation, translation gt, for each pair\n",
    "\n",
    "dataset = load_train_txt(train_txt_path)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# test\n",
    "for i, data in enumerate(dataset):\n",
    "    if i<1100:\n",
    "        continue\n",
    "    print('---')\n",
    "    \n",
    "    # original pc\n",
    "    pc_np = load_bin(os.path.join(train_folder_path, data['file']))\n",
    "    print(pc_np.shape)\n",
    "    \n",
    "    ###### knn analysis\n",
    "    knn_k=2\n",
    "    knn = KNNBuilder(k=knn_k)\n",
    "\n",
    "    start_t = timeit.default_timer()\n",
    "    index = knn.build_nn_index(np.ascontiguousarray(pc_np[:, 0:3], dtype=np.float32))\n",
    "    D, I = knn.search_nn(index, np.ascontiguousarray(pc_np[:, 0:3], dtype=np.float32), k=knn_k)\n",
    "    stop_t = timeit.default_timer()\n",
    "    print(stop_t - start_t)\n",
    "\n",
    "    D = np.sqrt(np.fabs(D))\n",
    "    D = D[:, knn_k-1:knn_k]\n",
    "    print(D.shape)\n",
    "    print('pc k=%d - mean %f, max %f, min %f' % (knn_k, np.mean(D), np.max(D), np.min(D)))\n",
    "    ###### knn analysis\n",
    "    \n",
    "    # outlier filer\n",
    "    fil = pcl.PointCloud(pc_np).make_statistical_outlier_filter()\n",
    "    fil.set_mean_k(50)\n",
    "    fil.set_std_dev_mul_thresh(2)\n",
    "    sampled_pc_np = np.asarray(fil.filter())\n",
    "    print(sampled_pc_np.shape)\n",
    "\n",
    "    # random selection\n",
    "    choice_idx = np.random.choice(sampled_pc_np.shape[0], 16384, replace=False)\n",
    "    sampled_pc_np = sampled_pc_np[choice_idx, :]\n",
    "    print(sampled_pc_np.shape)\n",
    "    \n",
    "    ###### knn analysis\n",
    "    knn_k=2\n",
    "    knn = KNNBuilder(k=knn_k)\n",
    "\n",
    "    start_t = timeit.default_timer()\n",
    "    index = knn.build_nn_index(np.ascontiguousarray(sampled_pc_np[:, 0:3], dtype=np.float32))\n",
    "    D, I = knn.search_nn(index, np.ascontiguousarray(sampled_pc_np[:, 0:3], dtype=np.float32), k=knn_k)\n",
    "    stop_t = timeit.default_timer()\n",
    "    print(stop_t - start_t)\n",
    "\n",
    "    D = np.sqrt(np.fabs(D))\n",
    "    D = D[:, knn_k-1:knn_k]\n",
    "    print(D.shape)\n",
    "    print('pc k=%d - mean %f, max %f, min %f' % (knn_k, np.mean(D), np.max(D), np.min(D)))\n",
    "    ###### knn analysis\n",
    "    \n",
    "    break\n",
    "    \n",
    "    \n",
    "#     fig = plt.figure()\n",
    "#     ax = Axes3D(fig)\n",
    "#     ax.scatter(pc_np[:,0].tolist(), pc_np[:,1].tolist(), pc_np[:,2].tolist(), s=0.1, c=[0.5,0.5,0.5])\n",
    "#     axisEqual3D(ax)\n",
    "\n",
    "#     plt.ion()\n",
    "#     plt.show()\n",
    "#     break\n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "# get surface normal\n",
    "def Surface_normals(cloud):\n",
    "    ne = cloud.make_NormalEstimation()\n",
    "    tree = cloud.make_kdtree()\n",
    "    ne.set_SearchMethod(tree)\n",
    "#     ne.set_RadiusSearch(2)\n",
    "    ne.set_KSearch(9)\n",
    "    cloud_normals = ne.compute()\n",
    "    return cloud_normals\n",
    "\n",
    "# 1. voxel grid filer\n",
    "# 2. apply outlier filter\n",
    "# 3. calculate surface normal and curvature\n",
    "def process_pc(pc_np):\n",
    "#     print('origin')\n",
    "#     print(pc_np.shape)\n",
    "    \n",
    "    # 1. voxel grid filer\n",
    "    sampler = PCSampler(leaf_size=0.2, minimum_pc_num=20480)\n",
    "    pc_np = sampler.sample_pc_wrapper(pc_np)\n",
    "#     print('after voxel grid filter')\n",
    "#     print(pc_np.shape)\n",
    "    \n",
    "    # 2. apply outlier filter\n",
    "#     fil = pcl.PointCloud(pc_np).make_statistical_outlier_filter()\n",
    "#     fil.set_mean_k(50)\n",
    "#     fil.set_std_dev_mul_thresh(2)\n",
    "#     pc_np = np.asarray(fil.filter())\n",
    "#     print('after outlier filter')\n",
    "#     print(pc_np.shape)\n",
    "    \n",
    "#     fig = plt.figure()\n",
    "#     ax = Axes3D(fig)\n",
    "#     ax.scatter(pc_np[:,0].tolist(), pc_np[:,1].tolist(), pc_np[:,2].tolist(), s=0.1, c=[0.5,0.5,0.5])\n",
    "#     axisEqual3D(ax)\n",
    "\n",
    "#     plt.ion()\n",
    "#     plt.show()\n",
    "    \n",
    "    # 3. calculate surface normal and curvature\n",
    "    assert pc_np.shape[1] == 3\n",
    "    cloud = pcl.PointCloud(pc_np)    \n",
    "    sn = Surface_normals(cloud)\n",
    "    sn_np = np.asarray(sn.to_array(), dtype=np.float32)  # Nx4, nx,ny,nz,curvature\n",
    "    \n",
    "    output_np = np.concatenate((pc_np, sn_np), axis=1)  # Nx7\n",
    "    \n",
    "    return output_np\n",
    "    \n",
    "    \n",
    "def process_dataset(dataset, train_folder_path, output_folder):\n",
    "    min_pc_num = 1000000\n",
    "    for i, data in enumerate(dataset):\n",
    "        # load bin\n",
    "        bin_file_name = data['file']\n",
    "        pc_np = load_bin(os.path.join(train_folder_path, bin_file_name))\n",
    "        \n",
    "        # process pc\n",
    "        pc_np = process_pc(pc_np)\n",
    "        \n",
    "        # save pc as numpy array\n",
    "        if not os.path.isdir(os.path.join(output_folder, bin_file_name[0:19])):\n",
    "            os.makedirs(os.path.join(output_folder, bin_file_name[0:19]))\n",
    "        npy_file_name = bin_file_name[0:-3] + 'npy'\n",
    "        np.save(os.path.join(output_folder, npy_file_name), pc_np)\n",
    "        \n",
    "        if pc_np.shape[0] < min_pc_num:\n",
    "            min_pc_num = pc_np.shape[0]\n",
    "            \n",
    "        if i % 100 == 0:\n",
    "            print('%d - min_pc_num %d' % (i, min_pc_num))\n",
    "    print('minimum point number: %d' % min_pc_num)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 - min_pc_num 37907\n",
      "100 - min_pc_num 29706\n",
      "200 - min_pc_num 25185\n",
      "300 - min_pc_num 23140\n",
      "400 - min_pc_num 21409\n",
      "500 - min_pc_num 21361\n",
      "600 - min_pc_num 21361\n",
      "700 - min_pc_num 21361\n",
      "800 - min_pc_num 20889\n",
      "900 - min_pc_num 20889\n",
      "1000 - min_pc_num 20889\n",
      "1100 - min_pc_num 20889\n",
      "1200 - min_pc_num 20889\n",
      "1300 - min_pc_num 20889\n",
      "1400 - min_pc_num 20889\n",
      "1500 - min_pc_num 20889\n",
      "1600 - min_pc_num 20889\n",
      "1700 - min_pc_num 20698\n",
      "1800 - min_pc_num 20698\n",
      "1900 - min_pc_num 20698\n",
      "2000 - min_pc_num 20698\n",
      "2100 - min_pc_num 20698\n",
      "2200 - min_pc_num 20698\n",
      "2300 - min_pc_num 20698\n",
      "2400 - min_pc_num 20698\n",
      "2500 - min_pc_num 20698\n",
      "2600 - min_pc_num 20698\n",
      "2700 - min_pc_num 20698\n",
      "2800 - min_pc_num 20698\n",
      "2900 - min_pc_num 20698\n",
      "3000 - min_pc_num 20698\n",
      "3100 - min_pc_num 20630\n",
      "3200 - min_pc_num 20630\n",
      "3300 - min_pc_num 20630\n",
      "3400 - min_pc_num 20630\n",
      "3500 - min_pc_num 20630\n",
      "3600 - min_pc_num 20630\n",
      "3700 - min_pc_num 20630\n",
      "3800 - min_pc_num 20630\n",
      "3900 - min_pc_num 20630\n",
      "4000 - min_pc_num 20630\n",
      "4100 - min_pc_num 20630\n",
      "4200 - min_pc_num 20630\n",
      "4300 - min_pc_num 20630\n",
      "4400 - min_pc_num 20630\n",
      "4500 - min_pc_num 20630\n",
      "4600 - min_pc_num 20630\n",
      "4700 - min_pc_num 20630\n",
      "4800 - min_pc_num 20630\n",
      "4900 - min_pc_num 20630\n",
      "5000 - min_pc_num 20630\n",
      "5100 - min_pc_num 20630\n",
      "5200 - min_pc_num 20630\n",
      "5300 - min_pc_num 20630\n",
      "5400 - min_pc_num 20630\n",
      "5500 - min_pc_num 20525\n",
      "5600 - min_pc_num 20525\n",
      "5700 - min_pc_num 20525\n",
      "5800 - min_pc_num 20525\n",
      "5900 - min_pc_num 20525\n",
      "6000 - min_pc_num 20525\n",
      "6100 - min_pc_num 20525\n",
      "6200 - min_pc_num 20525\n",
      "6300 - min_pc_num 20525\n",
      "6400 - min_pc_num 20525\n",
      "6500 - min_pc_num 20525\n",
      "6600 - min_pc_num 20525\n",
      "6700 - min_pc_num 20525\n",
      "6800 - min_pc_num 20525\n",
      "6900 - min_pc_num 20525\n",
      "7000 - min_pc_num 20525\n",
      "7100 - min_pc_num 20525\n",
      "7200 - min_pc_num 20525\n",
      "7300 - min_pc_num 20525\n",
      "7400 - min_pc_num 20525\n",
      "7500 - min_pc_num 20525\n",
      "7600 - min_pc_num 20525\n",
      "7700 - min_pc_num 20525\n",
      "7800 - min_pc_num 20525\n",
      "7900 - min_pc_num 20525\n",
      "8000 - min_pc_num 20525\n",
      "8100 - min_pc_num 20525\n",
      "8200 - min_pc_num 20525\n",
      "8300 - min_pc_num 20525\n",
      "8400 - min_pc_num 20525\n",
      "8500 - min_pc_num 20525\n",
      "8600 - min_pc_num 20525\n",
      "8700 - min_pc_num 20525\n",
      "8800 - min_pc_num 20525\n",
      "8900 - min_pc_num 20525\n",
      "9000 - min_pc_num 20525\n",
      "9100 - min_pc_num 20525\n",
      "9200 - min_pc_num 20525\n",
      "9300 - min_pc_num 20525\n",
      "9400 - min_pc_num 20525\n",
      "9500 - min_pc_num 20525\n",
      "9600 - min_pc_num 20525\n",
      "9700 - min_pc_num 20500\n",
      "9800 - min_pc_num 20500\n",
      "9900 - min_pc_num 20500\n",
      "10000 - min_pc_num 20500\n",
      "10100 - min_pc_num 20500\n",
      "10200 - min_pc_num 20500\n",
      "10300 - min_pc_num 20500\n",
      "10400 - min_pc_num 20500\n",
      "10500 - min_pc_num 20500\n",
      "10600 - min_pc_num 20500\n",
      "10700 - min_pc_num 20500\n",
      "10800 - min_pc_num 20500\n",
      "10900 - min_pc_num 20500\n",
      "11000 - min_pc_num 20500\n",
      "11100 - min_pc_num 20500\n",
      "11200 - min_pc_num 20500\n",
      "11300 - min_pc_num 20500\n",
      "11400 - min_pc_num 20500\n",
      "11500 - min_pc_num 20500\n",
      "11600 - min_pc_num 20500\n",
      "11700 - min_pc_num 20500\n",
      "11800 - min_pc_num 20500\n",
      "11900 - min_pc_num 20500\n",
      "12000 - min_pc_num 20500\n",
      "12100 - min_pc_num 20500\n",
      "12200 - min_pc_num 20500\n",
      "12300 - min_pc_num 20500\n",
      "12400 - min_pc_num 20500\n",
      "12500 - min_pc_num 20500\n",
      "12600 - min_pc_num 20500\n",
      "12700 - min_pc_num 20500\n",
      "12800 - min_pc_num 20500\n",
      "12900 - min_pc_num 20500\n",
      "13000 - min_pc_num 20500\n",
      "13100 - min_pc_num 20500\n",
      "13200 - min_pc_num 20500\n",
      "13300 - min_pc_num 20500\n",
      "13400 - min_pc_num 20500\n",
      "13500 - min_pc_num 20500\n",
      "13600 - min_pc_num 20500\n",
      "13700 - min_pc_num 20500\n",
      "13800 - min_pc_num 20500\n",
      "13900 - min_pc_num 20500\n",
      "14000 - min_pc_num 20500\n",
      "14100 - min_pc_num 20500\n",
      "14200 - min_pc_num 20500\n",
      "14300 - min_pc_num 20500\n",
      "14400 - min_pc_num 20500\n",
      "14500 - min_pc_num 20500\n",
      "14600 - min_pc_num 20500\n",
      "14700 - min_pc_num 20500\n",
      "14800 - min_pc_num 20500\n",
      "14900 - min_pc_num 20500\n",
      "15000 - min_pc_num 20500\n",
      "15100 - min_pc_num 20500\n",
      "15200 - min_pc_num 20500\n",
      "15300 - min_pc_num 20500\n",
      "15400 - min_pc_num 20500\n",
      "15500 - min_pc_num 20500\n",
      "15600 - min_pc_num 20500\n",
      "15700 - min_pc_num 20500\n",
      "15800 - min_pc_num 20500\n",
      "15900 - min_pc_num 20500\n",
      "16000 - min_pc_num 20500\n",
      "16100 - min_pc_num 20500\n",
      "16200 - min_pc_num 20500\n",
      "16300 - min_pc_num 20500\n",
      "16400 - min_pc_num 20500\n",
      "16500 - min_pc_num 20500\n",
      "16600 - min_pc_num 20500\n",
      "16700 - min_pc_num 20500\n",
      "16800 - min_pc_num 20500\n",
      "16900 - min_pc_num 20500\n",
      "17000 - min_pc_num 20500\n",
      "17100 - min_pc_num 20500\n",
      "17200 - min_pc_num 20500\n",
      "17300 - min_pc_num 20500\n",
      "17400 - min_pc_num 20500\n",
      "17500 - min_pc_num 20500\n",
      "17600 - min_pc_num 20500\n",
      "17700 - min_pc_num 20500\n",
      "17800 - min_pc_num 20500\n",
      "17900 - min_pc_num 20500\n",
      "18000 - min_pc_num 20500\n",
      "18100 - min_pc_num 20500\n",
      "18200 - min_pc_num 20500\n",
      "18300 - min_pc_num 20500\n",
      "18400 - min_pc_num 20500\n",
      "18500 - min_pc_num 20500\n",
      "18600 - min_pc_num 20500\n",
      "18700 - min_pc_num 20500\n",
      "18800 - min_pc_num 20500\n",
      "18900 - min_pc_num 20500\n",
      "19000 - min_pc_num 20500\n",
      "19100 - min_pc_num 20500\n",
      "19200 - min_pc_num 20500\n",
      "19300 - min_pc_num 20500\n",
      "19400 - min_pc_num 20500\n",
      "19500 - min_pc_num 20500\n",
      "19600 - min_pc_num 20500\n",
      "19700 - min_pc_num 20500\n",
      "19800 - min_pc_num 20500\n",
      "19900 - min_pc_num 20500\n",
      "20000 - min_pc_num 20500\n",
      "20100 - min_pc_num 20500\n",
      "20200 - min_pc_num 20500\n",
      "20300 - min_pc_num 20500\n",
      "20400 - min_pc_num 20500\n",
      "20500 - min_pc_num 20500\n",
      "20600 - min_pc_num 20500\n",
      "20700 - min_pc_num 20500\n",
      "20800 - min_pc_num 20500\n",
      "20900 - min_pc_num 20500\n",
      "21000 - min_pc_num 20500\n",
      "21100 - min_pc_num 20500\n",
      "21200 - min_pc_num 20500\n",
      "21300 - min_pc_num 20500\n",
      "21400 - min_pc_num 20500\n",
      "21500 - min_pc_num 20500\n",
      "21600 - min_pc_num 20500\n",
      "21700 - min_pc_num 20500\n",
      "21800 - min_pc_num 20500\n",
      "minimum point number: 20500\n"
     ]
    }
   ],
   "source": [
    "# train\n",
    "train_folder_path = '/ssd/dataset/oxford/train'\n",
    "train_txt_path = '/ssd/dataset/oxford/train_relative.txt'\n",
    "dataset = load_train_txt(train_txt_path)\n",
    "process_dataset(dataset, train_folder_path, '/ssd/dataset/oxford/train_np_nofilter')\n",
    "\n",
    "# test\n",
    "test_folder_path = '/ssd/dataset/oxford/test_models'\n",
    "test_txt_path = '/ssd/dataset/oxford/test_models/groundtruths.txt'  # with rotation, translation gt, for each pair\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "pc_np = np.load('/ssd/dataset/oxford/train_np/2014-06-26-09-31-18/0.npy')  # x, y, z, nx, ny, nz, curvature\n",
    "print(pc_np.dtype)\n",
    "print(pc_np.shape)\n",
    "print(pc_np)\n",
    "\n",
    "# mean of three axis\n",
    "print('max of three axis')\n",
    "print(np.max(pc_np[:, 0:3], axis=0))\n",
    "print('min of three axis')\n",
    "print(np.min(pc_np[:, 0:3], axis=0))\n",
    "\n",
    "# mean of norm\n",
    "print('surface normal mean norm, should be 1')\n",
    "print(np.mean(np.linalg.norm(pc_np[:, 3:6], axis=1)))\n",
    "\n",
    "# statistics of curvature\n",
    "print('curvature mean %f, min %f, max %f' % (np.mean(pc_np[:, 6]), np.min(pc_np[:, 6]), np.max(pc_np[:, 6])))\n",
    "\n",
    "\n",
    "fig = plt.figure()\n",
    "ax = Axes3D(fig)\n",
    "ax.scatter(pc_np[:,0].tolist(), pc_np[:,1].tolist(), pc_np[:,2].tolist(), s=0.1, c=[0.5,0.5,0.5])\n",
    "axisEqual3D(ax)\n",
    "\n",
    "plt.ion()\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.5.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
