from asyncore import read
from datetime import datetime
import json
from math import ceil
from multiprocessing.spawn import prepare
import os
import sqlite3
import struct
import sys
import pymongo

from data_reader.DataReader import DataReader
from data_reader.NsReader import NsReader
from data_reader.OrReader import OrReader
from utils.StatusThread import StatusThread
from utils.Timer import Timer

class SQLiteJson:
    cur = None
    conn = None
    table = ""
    data_dir = ""
    pcm_prefix = "SQLiteJson_"
    index_columns = []
    index_names = []

    def drop_table(self):
        # drop table and index if exists
        self.cur.execute('DROP TABLE IF EXISTS ' + self.table)
        for index in self.index_names:
            self.cur.execute('DROP INDEX IF EXISTS ' + index)
        self.conn.commit()

    def create_table(self):
        # create point cloud table
        self.cur.execute("CREATE TABLE %s( "
                    "sweep INTEGER PRIMARY KEY AUTOINCREMENT,"
                    "pjson TEXT NOT NULL,"
                    "classification INTEGER NOT NULL,"
                    "tstamp TIMESTAMP NOT NULL)"%self.table)
        self.conn.commit()

    def create_index(self):
        # create index on timestamp
        for i in range(len(self.index_names)):
            col = self.index_columns[i]
            name = self.index_names[i]
            self.cur.execute("CREATE INDEX %s ON %s(%s)"%(name, self.table, col))
        self.conn.commit()

    def load_test(self, test_case):
        # load data from file
        reader_name = test_case["reader"]
        data_dir = test_case["dir"]
        self.data_dir = data_dir
        self.table = test_case["table"]
        self.index_columns.append("sweep")
        self.index_columns.append("tstamp")
        self.index_columns.append("classification")
        self.index_names.append(self.table + "_sweep_index")
        self.index_names.append(self.table + "_tstamp_index")
        self.index_names.append(self.table + "_classification_index")
            
        # load data reader
        loadermodule = "data_reader." + reader_name
        exec('from ' + loadermodule + ' import ' + reader_name)
        reader_class = getattr(sys.modules[loadermodule], reader_name)
        reader = reader_class(data_dir)

        timer = Timer()
        timer.Start()
        self.prepare()
        timer.StopAndRecord("Init")

        # start load data
        # first create a thread to record resources usage
        status = StatusThread(os.getpid(), self.pcm_prefix + "load", test_case["dir"])
        status.start()

        sweep = 0
        while reader.has_more_sweep():
            sweep += 1
            if sweep%10 == 1:
                print(sweep)
            points = reader.read_sweep()
            tstamp = reader.read_timestamp()
            timer.Start()
            self.load_data(sweep, points, tstamp)
            timer.Pause()

        timer.StopAndRecord("Load")
        status.end()

        status = StatusThread(os.getpid(), self.pcm_prefix + "close", test_case["dir"])
        status.start()
        timer.Start()
        self.close()
        timer.StopAndRecord("Close")
        timer.PrintAll()
        timer.Save(test_case["dir"] + self.pcm_prefix + "time.txt")
        status.end()


    def close(self):
        self.create_index()
        self.cur.close()
        self.conn.close()

    def prepare(self):
        self.conn = sqlite3.connect(self.data_dir + self.table + ".db")
        self.cur = self.conn.cursor()
        self.cur.execute("PRAGMA synchronous = OFF"); 
        self.drop_table()
        self.create_table()


    def load_data(self, sweep, points, tstamp):
        # Execute copy
        # points_count = len(points)
        # per_block = 1000
        # start = 0
        # while start < points_count:
        #     end = points_count
        #     if start + per_block < points_count:
        #         end = start + per_block
        #     cur_it_points = points[start:end]
        #     self.cur.execute("INSERT INTO " + self.table + "(sweep, pjson, tstamp) values (?, ?, ?)",
        #                     (sweep, json.dumps(cur_it_points), tstamp))
        #     start += per_block

        self.cur.execute("INSERT INTO " + self.table + "(sweep, pjson, tstamp) values (?, ?, ?)",
                            (sweep, json.dumps(points), tstamp))
        self.conn.commit()

if __name__ == '__main__':
    # load config
    with open(os.getcwd() + '/config.json', 'r') as f:
        config = json.load(f)

    # start monitor the running process
    data_base_path = config["data_base_path"]
    for c in config["test_case"]:
        test_case = config["collect_dataset"][c]
        SQLiteJson().load_test(test_case)
