from asyncore import read
from datetime import datetime
from io import StringIO
import json
from multiprocessing.spawn import prepare
import os
import sys
import time
import psycopg2

from data_reader.DataReader import DataReader
from data_reader.NsReader import NsReader
from data_reader.OrReader import OrReader
from utils.StatusThread import StatusThread
from utils.Timer import Timer

class PgBlockJson:
    cur = None
    conn = None
    table = ""

    def drop_table(self):
        self.cur.execute('DROP TABLE IF EXISTS ' + self.table)
        self.cur.execute('DROP INDEX IF EXISTS ' + self.index_name)
        self.conn.commit()

    def create_table(self):
        self.cur.execute("CREATE TABLE %s( "
                    "sweep SERIAL PRIMARY KEY,"
                    "points json,"
                    "tstamp TIMESTAMP NOT NULL)"%self.table)
        self.conn.commit()

    def create_index(self):
        self.cur.execute("CREATE INDEX %s ON %s(tstamp)"%(self.index_name, self.table))
        self.conn.commit()

    def load_test(self, test_case):
        reader_name = test_case["reader"]
        data_dir = test_case["dir"]
        self.table = test_case["table"]
        self.index_name = self.table + "_tstamp_index"
            
        loadermodule = "data_reader." + reader_name
        exec('from ' + loadermodule + ' import ' + reader_name)
        reader_class = getattr(sys.modules[loadermodule], reader_name)
        reader = reader_class(data_dir)

        timer = Timer()
        timer.Start()
        self.prepare()
        timer.StopAndRecord("Init")

        status = StatusThread(os.getpid()+2, "postgres")
        status.start()

        sweep = 0
        while reader.has_more_sweep():
            sweep += 1
            # print(sweep)
            points = reader.read_sweep()
            tstamp = reader.read_timestamp()
            timer.Start()
            self.load_data(points, tstamp)
            timer.Pause()

        status.set_end_flat()

        timer.StopAndRecord("Load")
        timer.Start()
        self.close()
        timer.StopAndRecord("Close")
        timer.PrintAll()


    def close(self):
        self.create_index()
        self.cur.close()
        self.conn.close()

    def prepare(self):
        self.conn = psycopg2.connect("dbname=postgres user=postgres password=postgres")
        self.cur = self.conn.cursor()
        self.drop_table()
        self.create_table()

    def load_data(self, points, tstamp):
        # Execute copy
        self.cur.execute("INSERT INTO " + self.table + "(points, tstamp) values (%s, %s)",
                        (json.dumps(points), datetime.fromtimestamp(tstamp)))
        self.conn.commit()

if __name__ == '__main__':
    # load config
    with open(os.getcwd() + '/config.json', 'r') as f:
        config = json.load(f)

    # start monitor the running process
    status = StatusThread(os.getpid(), "load")
    status.start()
    data_base_path = config["data_base_path"]
    for c in config["test_case"]:
        test_case = config["collect_dataset"][c]
        PgBlockJson().load_test(test_case)

    status.set_end_flat()
