from asyncore import read
from datetime import datetime
import json
from math import ceil
from multiprocessing.spawn import prepare
import os
import sqlite3
import struct
import sys
import laspy
import numpy as np

from data_reader.DataReader import DataReader
from data_reader.NsReader import NsReader
from data_reader.OrReader import OrReader
from utils.StatusThread import StatusThread
from utils.Timer import Timer

class LAS:
    cur = None
    conn = None
    table = ""
    data_dir = ""
    pcm_prefix = "PCD_"
    index_columns = []
    index_names = []
    store_dir = ""

    HEADER = '''\
    # .PCD v0.7 - Point Cloud Data file format
    VERSION 0.7
    FIELDS x y z
    SIZE 4 4 4
    TYPE F F F
    COUNT 1 1 1
    WIDTH {}
    HEIGHT 1
    VIEWPOINT 0 0 0 1 0 0 0
    POINTS {}
    DATA ascii
    '''

    def drop_table(self):
        # drop table and index if exists
        self.cur.execute('DROP TABLE IF EXISTS ' + self.table)
        for index in self.index_names:
            self.cur.execute('DROP INDEX IF EXISTS ' + index)
        self.conn.commit()

    def create_table(self):
        # create point cloud table
        self.cur.execute("CREATE TABLE %s( "
                    "id INTEGER PRIMARY KEY AUTOINCREMENT,"
                    "data_file TEXT NOT NULL,"
                    "tstamp TIMESTAMP NOT NULL)"%self.table)
        self.conn.commit()

    def create_index(self):
        # create index on timestamp
        for i in range(len(self.index_names)):
            col = self.index_columns[i]
            name = self.index_names[i]
            self.cur.execute("CREATE INDEX %s ON %s(%s)"%(name, self.table, col))
        self.conn.commit()

    def load_test(self, test_case):
        # load data from file
        reader_name = test_case["reader"]
        data_dir = test_case["dir"]
        self.data_dir = data_dir
        self.table = test_case["table"]
        self.store_dir = self.data_dir + "las/"
        self.index_columns.append("data_file")
        self.index_columns.append("tstamp")
        self.index_names.append(self.table + "_data_file_index")
        self.index_names.append(self.table + "_tstamp_index")
            
        # load data reader
        loadermodule = "data_reader." + reader_name
        exec('from ' + loadermodule + ' import ' + reader_name)
        reader_class = getattr(sys.modules[loadermodule], reader_name)
        reader = reader_class(data_dir)

        timer = Timer()
        timer.Start()
        self.prepare()
        timer.StopAndRecord("Init")

        # start load data
        # first create a thread to record resources usage
        status = StatusThread(os.getpid(), self.pcm_prefix + "load", test_case["dir"])
        status.start()

        sweep = 0
        counts = 0
        while reader.has_more_sweep():
            sweep += 1
            if sweep%10 == 1:
                print(sweep)
            points = reader.read_sweep()
            tstamp = reader.read_timestamp()
            timer.Start()
            self.load_data(sweep, points, tstamp)
            timer.Pause()
            counts += len(points)
        print("count " + str(counts))

        timer.StopAndRecord("Load")
        status.end()

        status = StatusThread(os.getpid(), self.pcm_prefix + "close", test_case["dir"])
        status.start()
        timer.Start()
        self.close()
        timer.StopAndRecord("Close")
        timer.PrintAll()
        timer.Save(test_case["dir"] + self.pcm_prefix + "time.txt")
        status.end()


    def close(self):
        self.create_index()
        self.cur.close()
        self.conn.close()

    def prepare(self):
        self.conn = sqlite3.connect(self.data_dir + self.table + ".db")
        self.cur = self.conn.cursor()
        self.drop_table()
        self.create_table()
        if not os.path.exists(self.store_dir):
            os.mkdir(self.store_dir)

    def load_data(self, sweep, points, tstamp):
        # 1. Create a new header
        header = laspy.LasHeader(point_format=1, version="1.4")
        header.offsets = np.array([0, 0, 0])
        header.scales = np.array([0.000001, 0.000001, 0.000001])

        writer = laspy.open(self.store_dir + str(sweep) + ".las", mode="w", header=header)
        point_record = laspy.PackedPointRecord.zeros(len(points), header.point_format)
        x_arr = [p[0] for p in points]
        y_arr = [p[1] for p in points]
        z_arr = [p[2] for p in points]
        point_record.x = x_arr
        point_record.y = y_arr
        point_record.z = z_arr
        writer.write_points(point_record)
        writer.close()

        self.cur.execute("INSERT INTO " + self.table + "(data_file, tstamp) values (?, ?)",
                            (sweep, tstamp))
        self.conn.commit()

if __name__ == '__main__':
    # load config
    with open(os.getcwd() + '/config.json', 'r') as f:
        config = json.load(f)

    # start monitor the running process
    data_base_path = config["data_base_path"]
    for c in config["test_case"]:
        test_case = config["collect_dataset"][c]
        LAS().load_test(test_case)
