# -*- coding: utf-8 -*-
from __future__ import print_function
from datetime import datetime
from ddl_platform.database.base import BaseTable
from ddl_platform.database.base import main_database
import pickle


class SchedulingJobTable(BaseTable):
    def __init__(self, db=main_database):
        self.db = db
        self.TABLE = 'schedulingjobtb'
        self.JOB_WORKER_TABLE = 'schedulingjobworkertb-%s'

    def _assign_worker_status(self, job_info):
        if job_info:
            WORKER_TABLE = self.JOB_WORKER_TABLE % (str(job_info._job_id))
            statuses = self.db.hgetall(WORKER_TABLE)
            rank = -1 
            #print('statuses: ', statuses)
            for key, val in statuses.items():
                job_info._worker_statuses[int(key)] = val.decode()

    def query(self):
        jobs = self.db.hvals(self.TABLE)
        decoded_jobs = []
        for j in jobs:
            job_info = pickle.loads(j)
            self._assign_worker_status(job_info)
            decoded_jobs.append(job_info)
        return decoded_jobs

    def get(self, job_id):
        job = self.db.hget(self.TABLE, job_id)
        job_info = None
        if job:
            job_info = pickle.loads(job)
            #self._assign_worker_status(job_info)
        return job_info

    def update_info(self, job_id, 
                    task_status, 
                    iteration, 
                    batch_size, 
                    model_mem, 
                    throughput,
                    tforward,
                    tbackward, 
                    tcomm):
        job = self.get(job_id)
        if job:
            job._task_status = task_status
            job._updated_bs = batch_size 
            job._throughput = throughput
            job.update_progress(iteration)
            if model_mem > 0:
                job.update_occupied_mem_with_bs(batch_size, model_mem)
            job.update_time_info(batch_size, tforward, tbackward, tcomm)

            job._updated_time = datetime.timestamp(datetime.now())
            encoded_job = pickle.dumps(job)
            self.db.hset(self.TABLE, job._job_id, encoded_job)

    def add_running_time(self, job_id, running_time):
        job = self.get(job_id)
        if job:
            job.add_running_time(running_time)
            job._updated_time = datetime.timestamp(datetime.now())
            encoded_job = pickle.dumps(job)
            self.db.hset(self.TABLE, job._job_id, encoded_job)

    def update_job_status(self, job_id, status):
        job = self.get(job_id)
        if job:
            job._status = status
            job._updated_time = datetime.timestamp(datetime.now())
            encoded_job = pickle.dumps(job)
            self.db.hset(self.TABLE, job._job_id, encoded_job)

    def update_status(self, job_id, task_status, checkpoint_path=None):
        job = self.get(job_id)
        if job:
            if checkpoint_path is not None:
                job._checkpoint_path = checkpoint_path
            job._task_status = task_status
            job._updated_time = datetime.timestamp(datetime.now())
            encoded_job = pickle.dumps(job)
            self.db.hset(self.TABLE, job._job_id, encoded_job)

    def update_worker_status(self, job_id, rank, task_status):
        job = self.get(job_id)
        if job:
            WORKER_TABLE = self.JOB_WORKER_TABLE % (str(job_id))
            self.db.hset(WORKER_TABLE, rank, task_status)

            job._updated_time = datetime.timestamp(datetime.now())
            encoded_job = pickle.dumps(job)
            self.db.hset(self.TABLE, job._job_id, encoded_job)

    def insert(self, job):
        job._updated_time = datetime.timestamp(datetime.now())
        encoded_job = pickle.dumps(job)
        self.db.hset(self.TABLE, job._job_id, encoded_job)

        WORKER_TABLE = self.JOB_WORKER_TABLE % (str(job._job_id))
        for rank, status in enumerate(job._worker_statuses):
            self.db.hset(WORKER_TABLE, rank, status)
        return job._job_id

    def delete(self, job_id):
        self.db.hdel(self.TABLE, job_id)

    def clear_table(self):
        self.db.delete(self.TABLE)
        for key in self.db.scan_iter("schedulingjobworkertb-*"):
            # delete the key
            self.db.delete(key)

def clear_table():
    sjt = SchedulingJobTable()
    sjt.clear_table()

tb =  SchedulingJobTable()
scheduling_job_table = tb


def test():
    import ddl_platform.scheduler.job_info as job 
    sjt = SchedulingJobTable()
    j = job.JobInfo('/home/esetstore/repos/ddl-platform/examples/cifar10_resnet20', '/home/esetstore/repos/ddl-platform/examples/cifar10_resnet20/job_0.yaml')
    print('a new job: %s' % j._job_id)
    jid = sjt.insert(j)
    print('a job inserted: ', jid)
    jobs = sjt.query()
    print(jobs)

def query():
    jobs = tb.query()
    print(jobs)


if __name__ == '__main__':
    clear_table()
    #test()
    #query()
