# -*- coding: utf-8 -*-
from __future__ import print_function

import redis
import pickle
from .job import JobTaskStatus, JobStatus
import ddl_platform.common.settings as settings
from ddl_platform.database.silence_job_table import silence_job_table
from ddl_platform.database.profiled_job_table import profiled_job_table
from ddl_platform.database.scheduling_job_table import scheduling_job_table

PUB_CHANNEL_TO_RESOURCE_MANAGER = '/channel/trainer-to-resource-manager/%s'
#SUB_CHANNEL_FROM_SCHEDULER = '/channel/scheduler-to-trainer/%s-%s'
SUB_CHANNEL_FROM_SCHEDULER = '/channel/scheduler-to-trainer/%s'


class CommMsg:
    r"""Communication message between Trainer and Scheduler.
    """
    FINISHED = 'FINISHED'
    WAITING = 'WAITING' 
    CONTINUE = 'CONTINUE' 
    INQUEUE = 'INQUEUE'
    STOP = 'STOP' 

def get_job_key(jobid):
    return str(jobid)

class Communicator(object):
    r"""The communicator between Trainer and Scheduler

    Args:
        host (str, optional): Host of an intermediate channel for communication, e.g., a Redis host.
        port (int, optional): Port of an intermediate channel for communication, e.g., a Redis port.

    Attributes:
        _host (str): The same as ``host``.
        _port (int): The same as ``port``.
        _comm (object): The communication object.
        _job (Job): A job object that uses the communicator.
        _sub_channel_from_scheduler (Redis.Pubsub): Pubsub object.
    """
    def __init__(self, rank, host=None, port=None):
        self._host = host
        self._port = port
        self._rank = rank
        self._comm = None
        self._job = None
        self._sub_channel_from_scheduler = None
        self._running = True
        
        if self._host is None:
            self._host = settings.REDIS_HOST
        if self._port is None:
            self._port = settings.REDIS_PORT
        try:
            #self._comm  = redis.StrictRedis(host=self._host, port=self._port, charset='utf-8', decode_responses=True, db=0)
            self._comm = redis.Redis(host=self._host, port=self._port, db=settings.REDIS_DB_INDEX)
        except:
            self._comm = None

    def get_sub_channel(self):
        r"""Get the subscribe channel str.
        Returns:
            chennel (str): The channel name.
        """
        #return SUB_CHANNEL_FROM_SCHEDULER % (get_job_key(self._job._id), self._rank)
        return SUB_CHANNEL_FROM_SCHEDULER % get_job_key(self._job._id)

    def get_pub_channel(self):
        r"""Get the publish channel str.

        Returns:
            chennel (str): The channel name.
        """
        return PUB_CHANNEL_TO_RESOURCE_MANAGER % get_job_key(self._job._id)

    def create_job_record(self, job, profiling=False):
        r"""Create a job record to Database.

        Args:
            job (Job): The job object.
        """
        if not self._comm:
            return False
        self._job = job
        if not profiling:
            self._sub_channel_from_scheduler = self._comm.pubsub()
            self._sub_channel_from_scheduler.subscribe(self.get_sub_channel())
            msg = self._sub_channel_from_scheduler.get_message() # consume the old messages
        return True

    def update_job_record(self, status, checkpoint_path=None):
        r"""Update the job status. 

        Args:
            status (JobTaskStatus): The job status.
        """

        if not self._comm or not self._job:
            return False
        scheduling_job_table.update_status(self._job._id, status, checkpoint_path=checkpoint_path)
        return True

    def update_job_info(self, 
                    task_status, 
                    iteration, 
                    batch_size, 
                    model_mem, 
                    throughput, 
                    tforward,
                    tbackward, 
                    tcomm):
        if not self._comm or not self._job:
            return False
        scheduling_job_table.update_info(self._job._id, 
                task_status, 
                iteration,
                batch_size,
                model_mem,
                throughput,
                tforward,
                tbackward,
                tcomm)
        return True

    def accumulate_running_time(self, running_time):
        if not self._comm or not self._job:
            return False
        scheduling_job_table.add_running_time(self._job._id, running_time)

    def update_profiling_job_info(self, bs, gpu_mem, iteration_time):
        if not self._job:
            return False
        silence_job_table.update_job_info(self._job._id, bs, gpu_mem, iteration_time)
        return True

    def delete_job_record(self):
        r"""Delete the job. 
        """
        if not self._comm or not self._job:
            return False
        scheduling_job_table.delete(self._job._id)
        #self._comm.delete(get_job_key(self._job._id))
        self._sub_channel_from_scheduler.unsubscribe(self.get_sub_channel())
        self._running = False
        return True


    def sub_info_from_scheduler(self):
        r"""Get the subscribed message the Pubsub channel.
        """
        if not self._comm or not self._job:
            return [0]
        for msg in self._sub_channel_from_scheduler.listen():
            if not self._running:
                return None 
            #print('type(msg[data]: ', type(msg['data']), msg['data'])
            if type(msg['data']) is bytes:
                comm_msg = msg['data']#.decode()
                comm_msg = pickle.loads(comm_msg)
                return comm_msg # should a list to determine the next runs
                #if comm_msg == CommMsg.CONTINUE:
                #    return True
            else:
                return None
                #return pickle.loads(msg['data'])
        return None 

    def pub_info_to_resource_manager(self):
        r"""Publish info the publishing channel.
        """
        if not self._comm or not self._job:
            return False
        self._comm.publish(self.get_pub_channel(), CommMsg.FINISHED)
        return True

