import os, glob, sys, functools
from random import sample, randint
import collections
import json, yaml
import numpy as np
import random, math
import operator
import pandas as pd

#SUPPORT_NETS = ["resnet20", "lstm", "lstman4", "resnet50"]
SUPPORT_NETS = ["resnet20", "resnet50", "yolov3", "bert"]# "bert", "deepspeech", "neumf"]
#SUPPORT_NETS = ["resnet20", "resnet50", "yolov3"]# "bert", "deepspeech", "neumf"]
#SUPPORT_NETS = ["resnet50"]
TEMPLATES = {
        "resnet20":  "examples/cifar10_resnet20/job.yaml",
        "resnet50":  "examples/imagenet_resnet50/job.yaml",
        "yolov3":    "examples/yolov3/job.yaml",
        "bert":      "examples/bert/job.yaml"
}
NGPUS = [1, 2, 4, 8]
DEBUG_LOG_DIR='/home/esetstore/repos/ddl-platform/debug-logs'
#NGPUS = [8]


class job_generator:

    def __init__(self, set_name, num_jobs):
        self.set_name = set_name
        self.num_jobs = num_jobs

        self.validation = {}
        self.max_epochs = {}
        self.load_convergence()
        print(self.max_epochs)

    def load_convergence(self, max_epochs = None):
        validation = {}
        for net_type in SUPPORT_NETS:
            validation[net_type] = {}
            for path in glob.glob(os.path.join("traces/%s"%net_type, "validation-*.csv")):
                batch_size = int(path.split("-")[-1].split(".")[0])
                validation[net_type][batch_size] = pd.read_csv(path)

            # iters for each batch size
            #self.max_epochs[net_type] = {}
            #for bs in validation[net_type]:
            #    self.max_epochs[net_type][bs] = len(validation[net_type][bs])

            self.validation[net_type] = collections.OrderedDict(sorted(validation[net_type].items()))
            self.max_epochs[net_type] = max_epochs or min(map(len, self.validation[net_type].values()))

    def random_generate(self):
        self.job_root = "job_configs/%s" % self.set_name
        if not os.path.exists(self.job_root):
            os.makedirs(self.job_root)

        for i in range(self.num_jobs):
            job_json = {}
            #job_json["dnn"] = sample(SUPPORT_NETS, 1)[0]
            job_json["dnn"] = SUPPORT_NETS[i%len(SUPPORT_NETS)]
            with open(TEMPLATES[job_json["dnn"]]) as f:
                print(TEMPLATES[job_json["dnn"]])
                base_json = yaml.safe_load(f)
                job_json.update(base_json)

            job_json["id"] = i
            job_json["name"] = "%s-j%d" % (job_json["dnn"], i)
            #job_json["dataset"]["batch_size"] *= randint(1, 3)
            job_json["log"]["wandb"]["enabled"] = False
            job_json['scheduling'] = True
            job_json['evaluation']['enabled'] = False
            job_json['checkpoint']['enabled'] = False
            job_json['optimizer']['ngpus'] = NGPUS[i % len(NGPUS)]
            with open(os.path.join(self.job_root, "job_%d.yaml"%i), "w") as f:
                yaml.safe_dump(job_json, f)
  
    def trace_generate(self, csv_file):

        self.job_root = "job_configs/%s" % self.set_name
        if not os.path.exists(self.job_root):
            os.makedirs(self.job_root)

        df = pd.read_csv(csv_file, header=0)

        i = 0

        with open("job_configs/%s/arrival.yaml"%self.set_name, "w") as ft:
            arrival_time_dict = {}
            for idx, item in df.iterrows():
                net_type = item.network.split("-")[0]

                if net_type not in SUPPORT_NETS:
                    continue
                job_json = {}
                job_json["dnn"] = net_type
                with open(TEMPLATES[job_json["dnn"]]) as f:
                    base_json = yaml.safe_load(f)
                    job_json.update(base_json)

                job_json["id"] = i
                job_json["name"] = "%s-j%d" % (job_json["dnn"], i)
                job_json["log"]["logfile"] = os.path.join(DEBUG_LOG_DIR, job_json["name"]+".log")

                max_resnet50_bs = 64
                max_bert_bs = 8
                max_yolo_bs = 8
                bs = item.batch_size // item.num_replicas
                if net_type == 'resnet50' and bs > max_resnet50_bs:
                    bs = max_resnet50_bs
                if net_type == 'bert' and bs > max_bert_bs:
                    bs = max_bert_bs 
                if net_type == 'yolov3' and bs > max_yolo_bs:
                    bs = max_yolo_bs 
                job_json["dataset"]["batch_size"] = bs 
                #job_json["dataset"]["batch_size"] = item.batch_size // item.num_replicas

                job_json["optimizer"]["ngpus"] = item.num_replicas

                #if item.time < 5000:
                #    job_json["optimizer"]["steps"] = item.time
                #else:
                #    job_json["optimizer"]["steps"] = item.time // 10
                
                job_json["optimizer"]["epochs"] = self.max_epochs[net_type]
                job_json["log"]["wandb"]["enabled"] = False
                job_json['evaluation']['enabled'] = False
                job_json['checkpoint']['enabled'] = False
                job_json['scheduling'] = True
                yaml_fn = "job_%d.yaml"%i
                with open(os.path.join(self.job_root, yaml_fn), "w") as f:
                    yaml.safe_dump(job_json, f)
                #ft.write("%s:%d\n"%(os.path.join(self.job_root, "job_%d.yaml"%i), item.time//10))
                arrival_time_dict[yaml_fn] = item.time//20
                #ft.write("%s:%d\n"%(yaml_fn, item.time//10))
                i += 1
            ft.write(yaml.dump(arrival_time_dict))
  

    def microsoft_generate(self):

        step = self.num_jobs / 80
        job_dist = [40, 47, 60, 75, 79, 80]
        #job_dist = [20, 27, 40, 65, 75, 80] 
        #job_dist = [50, 57, 65, 75, 79, 80]
        job_dist = [i * step for i in job_dist]

        self.job_root = "job_configs/%s" % self.set_name
        if not os.path.exists(self.job_root):
            os.makedirs(self.job_root)

        min_iter = 1 * scale
        max_iter = 6 * scale

        # 1-GPU jobs
        for i in range(job_dist[0]):
            job_json = {}
            job_json["job_id"] = i
            job_json["job_name"] = "j%d" % i
            job_json["dnn"] = sample(SUPPORT_NETS, 1)[0]
            #job_json["dnn"] = "resnet20"
            job_json.update(TEMPLATES[job_json["dnn"]])

            job_json["nworkers"] = 1
            job_json["nsteps_update"] = 1
            job_json["cuda_enabled"] = 1
            job_json["iters"] = randint(min_iter, max_iter) 
            job_json["start_time"] = int(math.floor(np.random.uniform(0, ARRIVAL_MAX)))

            with open(os.path.join(self.job_root, "job_%d.json"%i), "w") as f:
                yaml.safe_dump(job_json, f)

        # 2-GPU jobs
        for i in range(job_dist[0], job_dist[1]):
            job_json = {}
            job_json["job_id"] = i
            job_json["job_name"] = "j%d" % i
            job_json["dnn"] = sample(SUPPORT_NETS, 1)[0]
            #job_json["dnn"] = "resnet20"
            job_json.update(TEMPLATES[job_json["dnn"]])

            job_json["nworkers"] = 2
            job_json["nsteps_update"] = 1
            job_json["cuda_enabled"] = 1
            job_json["iters"] = randint(min_iter, max_iter) 
            job_json["start_time"] = int(math.floor(np.random.uniform(0, ARRIVAL_MAX)))

            with open(os.path.join(self.job_root, "job_%d.json"%i), "w") as f:
                yaml.safe_dump(job_json, f)

        # 4-GPU jobs
        for i in range(job_dist[1], job_dist[2]):
            job_json = {}
            job_json["job_id"] = i
            job_json["job_name"] = "j%d" % i
            job_json["dnn"] = sample(SUPPORT_NETS, 1)[0]
            #job_json["dnn"] = "resnet20"
            job_json.update(TEMPLATES[job_json["dnn"]])

            job_json["nworkers"] = 4
            job_json["nsteps_update"] = 1
            job_json["cuda_enabled"] = 1
            job_json["iters"] = randint(min_iter, max_iter) 
            job_json["start_time"] = int(math.floor(np.random.uniform(0, ARRIVAL_MAX)))

            with open(os.path.join(self.job_root, "job_%d.json"%i), "w") as f:
                yaml.safe_dump(job_json, f)

        # 8-GPU jobs
        for i in range(job_dist[2], job_dist[3]):
            job_json = {}
            job_json["job_id"] = i
            job_json["job_name"] = "j%d" % i
            job_json["dnn"] = sample(SUPPORT_NETS, 1)[0]
            #job_json["dnn"] = "resnet20"
            job_json.update(TEMPLATES[job_json["dnn"]])

            job_json["nworkers"] = 8
            job_json["nsteps_update"] = 1
            job_json["cuda_enabled"] = 1
            job_json["iters"] = randint(min_iter, max_iter)
            job_json["start_time"] = int(math.floor(np.random.uniform(0, ARRIVAL_MAX)))

            with open(os.path.join(self.job_root, "job_%d.json"%i), "w") as f:
                yaml.safe_dump(job_json, f)

        # 16-GPU jobs
        for i in range(job_dist[3], job_dist[4]):
            job_json = {}
            job_json["job_id"] = i
            job_json["job_name"] = "j%d" % i
            job_json["dnn"] = sample(SUPPORT_NETS, 1)[0]
            #job_json["dnn"] = "resnet20"
            job_json.update(TEMPLATES[job_json["dnn"]])

            job_json["nworkers"] = 16
            job_json["nsteps_update"] = 1
            job_json["cuda_enabled"] = 1
            job_json["iters"] = randint(min_iter, max_iter)
            job_json["start_time"] = int(math.floor(np.random.uniform(0, ARRIVAL_MAX)))

            with open(os.path.join(self.job_root, "job_%d.json"%i), "w") as f:
                yaml.safe_dump(job_json, f)

        # 32-GPU jobs
        for i in range(job_dist[4], job_dist[5]):
            job_json = {}
            job_json["job_id"] = i
            job_json["job_name"] = "j%d" % i
            job_json["dnn"] = sample(SUPPORT_NETS, 1)[0]
            #job_json["dnn"] = "resnet20"
            job_json.update(TEMPLATES[job_json["dnn"]])

            job_json["nworkers"] = 32
            job_json["nsteps_update"] = 1
            job_json["cuda_enabled"] = 1
            job_json["iters"] = randint(min_iter, max_iter)
            job_json["start_time"] = int(math.floor(np.random.uniform(0, ARRIVAL_MAX)))

            with open(os.path.join(self.job_root, "job_%d.json"%i), "w") as f:
                yaml.safe_dump(job_json, f)

## microsoft trace           
trace_name = "traces/microsoft.csv"
jobG = job_generator("microsoft", 160)
jobG.trace_generate(trace_name)

### random job set for test
#num_jobs = 10
#jobG = job_generator("test_%djobs" % num_jobs, num_jobs)
##num_jobs = 4
##set_name = "large_4jobs"
##jobG = job_generator(set_name, num_jobs)
#jobG.random_generate()

## microsoft-80 job set
#jobG = job_generator("microsoft-80", 80)
#jobG.microsoft_generate()

# microsoft-160 job set
#jobG = job_generator("microsoft-160", 160)
#jobG.microsoft_generate()

