from asyncore import read
from datetime import datetime
from io import StringIO
import json
from multiprocessing.spawn import prepare
import os
import sys
import time
import psycopg2

sys.path.append('/home/ubuntu/benxdb/benchmark/python/')

from data_reader.DataReader import DataReader
from data_reader.NsReader import NsReader
from data_reader.OrReader import OrReader
from utils.StatusThread import StatusThread
from utils.Timer import Timer

class PgBlock:
    cur = None
    conn = None
    table = ""
    pcm_prefix = "PgBlock_"

    def drop_table(self):
        self.cur.execute('DROP TABLE IF EXISTS patches')
        self.cur.execute('DELETE FROM pointcloud_formats')
        self.cur.execute('DROP INDEX IF EXISTS patches_tstamp_index')
        self.cur.execute('DROP INDEX IF EXISTS patches_sweep_index')
        self.conn.commit()

    def create_table(self):
        self.cur.execute("""INSERT INTO pointcloud_formats (pcid, srid, schema) VALUES (1, 4326, '<?xml version="1.0" encoding="UTF-8"?>
        <pc:PointCloudSchema xmlns:pc="http://pointcloud.org/schemas/PC/1.1"
                            xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
            <pc:dimension>
                <pc:position>1</pc:position>
                <pc:size>4</pc:size>
                <pc:description>X coordinate as a long integer. You must use the
                    scale and offset information of the header to
                    determine the double value.</pc:description>
                <pc:name>X</pc:name>
                <pc:interpretation>int32_t</pc:interpretation>
                <pc:scale>0.001</pc:scale>
            </pc:dimension>
            <pc:dimension>
                <pc:position>2</pc:position>
                <pc:size>4</pc:size>
                <pc:description>Y coordinate as a long integer. You must use the
                    scale and offset information of the header to
                    determine the double value.</pc:description>
                <pc:name>Y</pc:name>
                <pc:interpretation>int32_t</pc:interpretation>
                <pc:scale>0.001</pc:scale>
            </pc:dimension>
            <pc:dimension>
                <pc:position>3</pc:position>
                <pc:size>4</pc:size>
                <pc:description>Z coordinate as a long integer. You must use the
                    scale and offset information of the header to
                    determine the double value.</pc:description>
                <pc:name>Z</pc:name>
                <pc:interpretation>int32_t</pc:interpretation>
                <pc:scale>0.001</pc:scale>
            </pc:dimension>
            <pc:dimension>
                <pc:position>4</pc:position>
                <pc:size>4</pc:size>
                <pc:description>Classification</pc:description>
                <pc:name>Classification</pc:name>
                <pc:interpretation>uint32_t</pc:interpretation>
                <pc:scale>1</pc:scale>
            </pc:dimension>
            <pc:metadata>
                <Metadata name="compression">None</Metadata>
            </pc:metadata>
        </pc:PointCloudSchema>');""")
        self.cur.execute("CREATE TABLE patches( "
                    "sweep SERIAL PRIMARY KEY,"
                    "pa PCPATCH(1),"
                    "tstamp TIMESTAMP NOT NULL)")
        self.conn.commit()

    def create_index(self):
        self.cur.execute("CREATE INDEX patches_tstamp_index ON patches(tstamp)")
        self.cur.execute("CREATE INDEX patches_sweep_index ON patches(sweep)")
        self.conn.commit()

    def load_test(self, test_case):
        reader_name = test_case["reader"]
        data_dir = test_case["dir"]
        self.table = 'patches'
            
        loadermodule = "data_reader." + reader_name
        exec('from ' + loadermodule + ' import ' + reader_name)
        reader_class = getattr(sys.modules[loadermodule], reader_name)
        reader = reader_class(data_dir)

        timer = Timer()
        timer.Start()
        self.prepare()
        timer.StopAndRecord("Init")

        # start load data
        # first create a thread to record resources usage
        status = StatusThread(os.getpid(), self.pcm_prefix + "load", test_case["dir"])
        status.start()
        posgres_status = StatusThread(os.getpid()+2, "postgres_load", test_case["dir"])
        posgres_status.start()

        sweep = 0
        while reader.has_more_sweep():
            sweep += 1
            if sweep%10 == 1:
                print(sweep)
            points = reader.read_sweep()
            tstamp = reader.read_timestamp()
            timer.Start()
            self.load_data(points, tstamp, sweep)
            timer.Pause()

        timer.StopAndRecord("Load")
        status.end()
        posgres_status.end()

        status = StatusThread(os.getpid(), self.pcm_prefix + "close", test_case["dir"])
        status.start()
        posgres_status = StatusThread(os.getpid()+2, "postgres_close", test_case["dir"])
        posgres_status.start()
        timer.Start()
        self.close()
        timer.StopAndRecord("Close")
        timer.PrintAll()
        timer.Save(test_case["dir"] + self.pcm_prefix + "time.txt")
        status.end()
        posgres_status.end()


    def close(self):
        self.create_index()
        self.cur.close()
        self.conn.close()

    def prepare(self):
        self.conn = psycopg2.connect("dbname=postgres user=postgres password=postgres")
        self.cur = self.conn.cursor()
        self.drop_table()
        self.create_table()

    def load_data(self, points, tstamp, sweep):
        points_flat = []
        for p in points:
            points_flat.append(p[0])
            points_flat.append(p[1])
            points_flat.append(p[2])
            points_flat.append(p[3])

        # Execute copy
        self.cur.execute("INSERT INTO patches(pa, tstamp) values (PC_MakePatch(1, %s), %s)",
                    (points_flat, datetime.fromtimestamp(tstamp)))
        self.conn.commit()

if __name__ == '__main__':
    with open(os.getcwd() + '/config.json', 'r') as f:
        config = json.load(f)

    data_base_path = config["data_base_path"]
    test_case = config["test_case"]
    for test_case in config["test_case"]:
        test_case = config["collect_dataset"][test_case]
        PgBlock().load_test(test_case)
        