# -*- coding: utf-8 -*-
#
# Copyright 2009 Vanderbilt University
# 
# Licensed under the Apache License, Version 2.0 (the "License"); 
# you may not use this file except in compliance with the License. 
# You may obtain a copy of the License at 
# 
#     http://www.apache.org/licenses/LICENSE-2.0 
# 
# Unless required by applicable law or agreed to in writing, software 
# distributed under the License is distributed on an "AS IS" BASIS, 
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 
# See the License for the specific language governing permissions and 
# limitations under the License. 

"""
HORNET provides a system for queueing and distributing CPU intensive tasks.
The system escapes allows scaling proportional to the number of available CPUs.
The jobs can even be distributed to remote computers.

.. moduleauthor:: John Paulett <john.paulett -at- vanderbilt.edu>
"""

from __future__ import with_statement

import pyjobs
import thread
import threading
import processing

class Job(processing.Process):
    def __init__(self, callback):
        processing.Process.__init__(self)
        self.callback = callback

class CallbackThread(threading.Thread):
    def __init__(self, callback):
        threading.Thread.__init__(self)
        
    def run(self):
        if job.callback is not None:
            job.callback()
        

class JobRunner(threading.Thread):
    def __init__(self, nprocesses=processing.cpuCount()):
        """
        Parameters:
        nprocesses - the number of processes to have concurrently running.  
            Default: the number of processors on the machine
        
        """
        threading.Thread.__init__(self)
        self._nprocesses = nprocesses
        # make copy to use as a FIFO queue
        self._queue = []
        self._running = []

    def add_job(self, job):
        # Possibly not thread-safe, if an external thread calls this method
        # while the run() method is executing. Effbot indicates that the GIL
        # protects us: http://effbot.org/pyfaq/what-kinds-of-global-value-mutation-are-thread-safe.htm
        self._queue.insert(0, job)
    
    def kill_job(self, job):
        pass
    
    def run(self):
        try:
            while True:
                # currently asymmetric (multiple jobs can be removed, 
                # only a single one can be started within a loop
                self.__handle_finished()
                self.__start_job()
        except (KeyboardInterrupt, SystemExit):
            # make sure to kill processes to avoid runaways
            for job in self._running:
                try:
                    job.terminate()
                except:
                    # ignore
                    pass
            raise
        finally:
            self.__finished = True
            self.runtime.stop()
        
    def __handle_finished(self):
        """If anyone of the processes are finished, we make space for
        new processes to run.
        """
        remove_jobs = []
        for job in self._running:
            if not job.isAlive():
                # avoid removing finished jobs from list while iterating
                remove_jobs.append(job)
                
                # FIXME move to another thread to allow further processing
                #CallbackThread(job.callback).start()
                if job.callback is not None:
                    job.callback()
                
        for job in remove_jobs:
            self._running.remove(job)
    
    def __start_job(self):
        """Starts up a process if the resources are available.        
        """
        if len(self._running) <  self._nprocesses and len(self._queue) > 0:
            job = self._queue.pop()
            self._running.append(job)
            job.start()
            
    @property
    def finished(self):
        return self.__finished


