# -*- coding: utf-8 -*-
from __future__ import print_function
from random import randint
from ddl_platform.database.base import BaseTable
from ddl_platform.database.base import main_database
from ddl_platform.ddlib.job import JobStatus, JobTaskStatus

import os
import yaml
import time
import pickle
import ddl_platform.common.utils as utils
import ddl_platform.common.settings as settings
import ddl_platform.scheduler.job_info as job 


class SilenceJobTable(BaseTable):
    def __init__(self, db=main_database):
        self.db = db
        self.TABLE = 'silencejobtb'
        self.IDKEY = self.TABLE + '-key' 

    def query(self):
        jobs = self.db.hvals(self.TABLE)
        decoded_jobs = [pickle.loads(j) for j in jobs]
        return decoded_jobs

    def query_waiting_jobs(self):
        jobs = self.query()
        return [j for j in jobs if j._status == JobStatus.WAITING]

    def query_profiling_jobs(self):
        jobs = self.query()
        return [j for j in jobs if j._status == JobStatus.PROFILING]

    def query_waiting_and_profiled_jobs(self):
        jobs = self.query()
        return [j for j in jobs if j._status in [JobStatus.WAITING, JobStatus.PROFILED]]

    def get(self, job_id):
        job = self.db.hget(self.TABLE, job_id)
        job_info = None
        if job:
            job_info = pickle.loads(job)
        return job_info

    def update_status(self, job_id, status):
        job = self.get(job_id)
        job._status = status
        encoded_job = pickle.dumps(job)
        self.db.hset(self.TABLE, job._job_id, encoded_job)

    def update_gpu_mem(self, job_id, bs, gpu_mem):
        job = self.get(job_id)
        job.update_occupied_mem_with_bs(bs, gpu_mem)
        encoded_job = pickle.dumps(job)
        self.db.hset(self.TABLE, job._job_id, encoded_job)

    def update_iteration_time(self, job_id, iteration_time):
        job = self.get(job_id)
        job._iteration_time = iteration_time
        encoded_job = pickle.dumps(job)
        self.db.hset(self.TABLE, job._job_id, encoded_job)

    def update_job_info(self, job_id, bs, gpu_mem, iteration_time):
        job = self.get(job_id)
        job.update_occupied_mem_with_bs(bs, gpu_mem)
        job.update_iteration_time_with_bs(bs, iteration_time)
        encoded_job = pickle.dumps(job)
        self.db.hset(self.TABLE, job._job_id, encoded_job)

    def get_unique_id(self):
        unique_id = self.db.incr(self.IDKEY)
        return unique_id

    def insert_with_id(self, job, job_id):
        job._job_id = job_id
        encoded_job = pickle.dumps(job)
        self.db.hset(self.TABLE, job_id, encoded_job)

    def insert(self, job):
        unique_id = self.db.incr(self.IDKEY)
        job._job_id = unique_id
        encoded_job = pickle.dumps(job)
        self.db.hset(self.TABLE, unique_id, encoded_job)
        return unique_id

    def delete(self, job_id):
        self.db.hdel(self.TABLE, job_id)

    def clear_table(self):
        self.db.delete(self.TABLE)
        self.db.delete(self.IDKEY)

def clear_table():
    #sjt = SilenceJobTable()
    tb.clear_table()

def create_microsoft_dataset():
    pass

def create_random_dataset():
    SUPPORT_NETS = ['resnet20', 'fcn5', 'resnet50', 'vgg16']
    CODES = {
             'resnet20': '%s/examples/cifar10_resnet20' % settings.ROOT_DIR, \
             'fcn5': '%s/examples/mnist_fcn5' % settings.ROOT_DIR, \
             'resnet50': '%s/examples/imagenet_resnet50' % settings.ROOT_DIR, \
             'vgg16': '%s/examples/imagenet_vgg16' % settings.ROOT_DIR, \
            }

    net_yamls = {}
    for net in SUPPORT_NETS:
        with open(os.path.join(CODES[net], 'job.yaml'), 'r') as f:
            net_yamls[net] = yaml.safe_load(f)

    job_dist = [0, 4, 47, 60, 75, 79, 80]
    net_ids = [randint(0,len(SUPPORT_NETS) - 1) for i in range(80)]
    yaml_list = []
    for i in range(3):
        for j in range(job_dist[i], job_dist[i+1]):
            #net_type = 'mnist'
            net_type = SUPPORT_NETS[net_ids[j]]
            basic_yaml = net_yamls[net_type]
            basic_yaml['id'] = str(j)
            basic_yaml['dataset']['batch_size'] = 2 ** randint(2, 4)
            basic_yaml['optimizer']['ngpus'] = 2 ** 3
            basic_yaml['log']['logfile'] = 'job_%d_%s.log' % (j, net_type)
            yaml_list.append(os.path.join(CODES[net_type], 'job_%d.yaml' % j))
            config_fn = os.path.join(CODES[net_type], 'job_%d.yaml' % j)
            with open(os.path.join(config_fn), 'w') as f:
                yaml.safe_dump(basic_yaml, f)
            j = job.JobInfo(CODES[net_type], config_fn)
            jid = tb.insert(j)
            utils.update_yaml_attr(j._conf, 'id', j._job_id)



def create_debug_dataset():
    #SUPPORT_NETS = ['resnet20', 'fcn5', 'resnet50', 'vgg16']
    #SUPPORT_NETS = ['resnet50', 'vgg16']
    #SUPPORT_NETS = ['bert', 'resnet50', 'resnet20']
    SUPPORT_NETS = ['resnet20']
    CODES = {
             'resnet20': '%s/examples/cifar10_resnet20' % settings.ROOT_DIR, \
             'fcn5': '%s/examples/mnist_fcn5' % settings.ROOT_DIR, \
             'resnet50': '%s/examples/imagenet_resnet50' % settings.ROOT_DIR, \
             'vgg16': '%s/examples/imagenet_vgg16' % settings.ROOT_DIR, \
              "yolov3":    "%s/examples/yolov3" % settings.ROOT_DIR, \
             'bert': '%s/examples/bert' % settings.ROOT_DIR
            }

    net_yamls = {}
    for net in SUPPORT_NETS:
        yaml_file = os.path.join(CODES[net], 'job.yaml') 
        print('yaml file: ', yaml_file)
        with open(yaml_file, 'r') as f:
            net_yamls[net] = yaml.safe_load(f)

    yaml_list = []
    for j in range(2):
        net_type = SUPPORT_NETS[j%len(SUPPORT_NETS)]
        basic_yaml = net_yamls[net_type]
        basic_yaml['id'] = str(j)
        basic_yaml['scheduling'] = True
        bs = 64
        if net_type == 'vgg16' or net_type == 'resnet20':
            bs = 8
        elif net_type == 'bert':
            bs = 8
        elif net_type == 'yolov3':
            bs = 8
        basic_yaml['dataset']['batch_size'] = bs
        basic_yaml['optimizer']['ngpus'] = 4
        #basic_yaml['optimizer']['steps'] = 500
        basic_yaml['log']['logfile'] = 'job_%d_%s.log' % (j, net_type)
        basic_yaml['evaluation']['enabled'] = False
        yaml_list.append(os.path.join(CODES[net_type], 'job_%d.yaml' % j))
        config_fn = os.path.join(CODES[net_type], 'job_%d.yaml' % j)
        with open(config_fn, 'w') as f:
            yaml.safe_dump(basic_yaml, f)
        j = job.JobInfo(CODES[net_type], config_fn)
        job_id = tb.get_unique_id()
        utils.update_yaml_attr(j._conf, 'id', job_id)
        jid = tb.insert_with_id(j, job_id)
        #time.sleep(5)
        print('a new job inserted: %s' % j._job_id, j._conf)
    query()


def create_job_set_on_folder(folder, use_arrival_time=False):
    files = os.listdir(folder)
    idx = 0
    arrival_time_fn = 'arrival.yaml'
    job_fns = [f for f in files if f != arrival_time_fn]
    with open(os.path.join(folder, arrival_time_fn)) as f:
        arrival_time = yaml.safe_load(f)
    print('job_fns: ', job_fns)
    print('arrival time: ', arrival_time)

    max_njobs=30

    start_time = 0 
    for i, fn in enumerate(job_fns):
        if i >= max_njobs:
            break
        at = int(arrival_time[fn])
        if use_arrival_time:
            if at - start_time > 0:
                print('sleep: ', at - start_time)
                time.sleep(at-start_time)
            start_time = at

        yaml_file = os.path.join(folder, fn)
        sandbox_yaml = os.path.join(settings.SANDBOX_DIR, fn)
        print('current fn: ', fn)
        with open(yaml_file, 'r') as f:
            data = yaml.safe_load(f)

            sandbox_job_yaml = data.copy()
            with open(sandbox_yaml, 'w') as f:
                f.write(yaml.dump(sandbox_job_yaml))

            code_folder = data['root']
            j = job.JobInfo(code_folder, sandbox_yaml)
            job_id = tb.get_unique_id()
            utils.update_yaml_attr(j._conf, 'id', job_id)
            jid = tb.insert_with_id(j, job_id)
            print('a new job inserted: %s, info: %s' % (j._job_id, data))
    query()

def query():
    jobs = tb.query()
    print(jobs)


tb = SilenceJobTable()
silence_job_table = tb
print(tb.db)

def test():
    j = job.JobInfo('/home/esetstore/repos/ddl-platform/examples/cifar10_resnet20', '/home/esetstore/repos/ddl-platform/examples/cifar10_resnet20/job_0.yaml')
    #print('a new job: %s' % j._job_id)
    #jid = tb.insert(j)
    #utils.update_yaml_attr(j._conf, 'id', j._job_id)
    #print('a job inserted: ', jid)
    jobs = tb.query()
    print(tb.db)
    print(jobs)
    print(jobs[0].is_waiting())


if __name__ == '__main__':
    clear_table()
    jobset_folder = '/home/esetstore/repos/ddl-platform/job_configs/microsoft'
    create_debug_dataset()
    #create_job_set_on_folder(jobset_folder, True)
    #test()
    #create_random_dataset()
