"""
Asynchronous Job Manager
"""
from cache import *
from gputools import *
from pool import *
import cPickle, multiprocessing, threading, os, os.path, time, hashlib, random, gc

class Job(object):
    jobdir = JOBDIR
    
    def __init__(self, function, *args, **kwds):
        """
        Constructor.
        
        *Arguments*
        
        `function`
          The function to evaluate, it is a native Python function. Function
          serialization should take care of ensuring that this function is correctly
          defined in the namespace.
        
        `*args, **kwds`
          The arguments of the function.
        """
        self.function = function
        self.args = args
        self.kwds = kwds
        self.result = None
        self.status = 'queued'
    
    def computeid(self):
        """
        Computes a unique identifier of the job.
        """
        m = hashlib.sha1()
        pid = os.getpid() # current process id
        t = int(time.time()*10000) % 1000000 # time
        s = str(self.function) # function name
        s += cPickle.dumps(self.args, -1) # args dump
        s += cPickle.dumps(self.kwds, -1) # args dump
        m.update(str(random.random())) # random number
        m.update(str(pid))
        m.update(str(t))
        m.update(s)
        hash = m.hexdigest()
        self._id = hash
        return self.getid()
    
    def getid(self):
        return self._id
    
    def getfilename(self):
        return os.path.join(Job.jobdir, self.id+'.pkl')
    
    id = property(getid)
    filename = property(getfilename)
    
    def evaluate(self):
        """
        Evaluates the function on the given arguments.
        """
        try:
            # TODO: function code pickling
            self.status = 'processing'
            self.result = self.function(*self.args, **self.kwds)
            self.status = 'finished'
        except Exception as inst:
            self.result = inst
            self.status = 'crashed'
        return self.result
    
    def record(self):
        """
        Records the job after evaluation on the disk.
        """
        if not os.path.exists(self.jobdir):
            log.debug("creating '%s' folder for code pickling" % self.jobdir)
            os.mkdir(self.jobdir)
        log.debug("writing '%s'" % self.filename)
        file = open(self.filename, 'wb')
#        function = self.function # we remove the function from the object since it can't be pickled
#        self.function = str(function)
        cPickle.dump(self, file, -1)
        file.close()
#        self.function = function

    @staticmethod
    def load(id):
        """
        Returns the Job object stored in the filesystem using its identifier.
        """
        try:
            filename = os.path.join(Job.jobdir, id+'.pkl')
            log.debug("opening file '%s'" % filename)
            file = open(filename, 'rb')
            job = cPickle.load(file)
            file.close()
        except IOError:
            log.warn("file '%s' not found" % filename)
            job = None
        return job

    @staticmethod
    def erase(id):
        """
        Erases the Job object stored in the filesystem using its identifier.
        """
        filename = os.path.join(Job.jobdir, id+'.pkl')
        log.debug("erasing '%s'" % filename)
        os.remove(filename)

    @staticmethod
    def erase_all():
        """
        Erases all Job objects stored in the filesystem.
        """
        files = os.listdir(Job.jobdir)
        log.debug("erasing all files in '%s'" % Job.jobdir)
        [os.remove(os.path.join(Job.jobdir, filename)) for filename in files]

def eval_job(job):
    """
    Evaluates a job. Must be global to be passed to CustomPool.
    Handles Exceptions.
    """
    result = job.evaluate()
    job.record()
    return result




class AsyncJobHandler(object):
    """
    A Handler object handling asynchronous job management, on the server side.
    """
    def __init__(self, max_cpu = None, max_gpu = None):
        """
        max_cpu is the maximum number of CPUs dedicated to the cluster
        idem for max_gpu
        None = use all CPUs/GPUs available
        """
        self.handlers = []
        if max_cpu is None:
            max_cpu = MAXCPU
        if max_gpu is None:
            max_gpu = MAXGPU
        self.max_cpu = max_cpu
        self.max_gpu = max_gpu
        self.cpu = 0
        self.gpu = 0
        self.pool = None
        self.docollect = False
        self.jobs = []
        self.jobids = []
        self.all_jobs = {}
    
    def add_jobs(self, jobs):
        self.jobs.extend(jobs)
        self.jobids.extend([job.computeid() for job in self.jobs])
        for job in self.jobs:
            self.all_jobs[job.id] = job
    
    def get_idle_cpu(self):
        """
        Returns the maximum number of CPUs that can be used for the current handler.
        """
        n = self.max_cpu
        for hs in self.handlers.itervalues():
            for h in hs.itervalues():
                if isinstance(h, AsyncJobHandler) and h.client is not self.client:
                    n -= h.cpu
        return max(n, 0)
        
    def get_idle_gpu(self):
        """
        Returns the maximum number of GPUs that can be used for the current handler.
        """
        n = self.max_gpu
        for hs in self.handlers.itervalues():
            for h in hs.itervalues():
                if isinstance(h, AsyncJobHandler) and h.client is not self.client:
                    n -= h.gpu
        return max(n, 0)
        
    def set_cpu(self, cpu = None):
        if cpu is None:
            self.cpu = self.get_idle_cpu()
        else:
            self.cpu = min(cpu, self.get_idle_cpu())
        return True
        
    def set_gpu(self, gpu = None):
        if gpu is None:
            self.gpu = self.get_idle_gpu()
        else:
            self.gpu = min(gpu, self.get_idle_gpu())
        return True
        
    def get_cpu(self):
        """
        Returns the CPU count for each active handler in the current machine.
        """
        if self.cpu == 0:
            self.set_cpu()
        return self.cpu
    
    def get_gpu(self):
        """
        Returns the GPU count for each active handler in the current machine.
        """
        if self.gpu == 0:
            self.set_gpu()
        return self.gpu
    
    def submit(self, jobs, use_gpu = False):
        """
        Submit jobs.
        
        *Arguments*
        
        `jobs`
          A list of Job objects.
        """
        self.add_jobs(jobs)
        if use_gpu:
            units = self.get_gpu()
            type = 'GPU'
        else:
            units = self.get_cpu()
            type = 'CPU'

        self.pool = CustomPool(units, type=type)
        self.pool.map(eval_job, jobs) # creates a new process
        
#        self.docollect = True
#        self.collect_thread = threading.Thread(target=self._collect)
#        self.collect_thread.start()
        
        return self.jobids

#    def _collect(self):
#        """
#        Periodically checks if jobs are being processed. If not, it clears the handler
#        so that the results and the status are retrieved from the files in the job folder.
#        """
#        while True:
#            if self.docollect:
#                try:
#                    if self.pool.pool is None:
#                        del self.pool
#                        gc.collect()
#                        self.pool = None
#                        self.jobs = []
#                        self.jobids = []
#                        self.docollect = False
#                        log.debug("All jobs have been processed, collecting the pool object. The results are stored in the job files.")
#                except:
#                    pass
#            else:
#                break
    
    def get_status(self, jobids):
        # TODO: check this
        if jobids is not None:
            statuss = []
            for id in jobids:
                job = Job.load(id)
                if job is not None:
                    log.debug("job file found")
                    status = job.status
                else:
                    log.debug("job file NOT found")
                    status = self.all_jobs[id].status
#                else:
                    # if job is None, it means that it probably has not finished yet
#                    status = 'queued'
#                    if self.pool is not None:
#                        log.debug("Tasks have not finished yet, waiting...")
#                        self.pool.join()
#                        job = Job.load(id)
#                        if job is not None:
#                            status = job.status
                statuss.append(status)
            return statuss
        if self.pool is not None:
            return self.pool.get_status()
        raise Exception("Please specify job identifiers.")
        return False
    
    def get_results(self, jobids):
        log.debug(jobids)
        if jobids is not None:
            results = []
            for id in jobids:
                job = Job.load(id)
                if job is not None:
                    result = job.result
                else:
                    # if job is None, it means that it probably has not finished yet
                    result = None
#                    if self.pool is not None:
                    log.debug("Tasks have not finished yet, waiting...")
                    self.pool.join()
                    job = Job.load(id)
                    if job is not None:
                        result = job.result
                results.append(result)
            return results
        if self.pool is not None:
            r = self.pool.get_results()
#            self.pool.close()
            return r
        raise Exception("Please specify job identifiers.")
        return False
    
    def has_finished(self):
        if self.pool is not None:
            return self.pool.has_finished()
        else:
            raise Exception("Jobs have not been submitted yet.")
            return False
    
    def close(self):
        if self.pool is not None:
            self.pool.close()
            self.collect_thread.join(10.0)
        else:
            raise Exception("Jobs have not been submitted yet.")
            return False
    
    def kill(self):
        if self.pool is not None:
            self.pool.kill()
        else:
            raise Exception("Jobs have not been submitted yet.")
            return False
