"""
Created on 23-Jul-2009

@author: dexen
"""



import os
import sys
import time

import rpyc
from rpyc.utils import server
from multiprocessing import Process, Queue, Manager, Lock

import dexen
from dexen.system import util
from dexen.system.constants import NS_NAME
from dexen.system.agent import queue_manager as qmgr, population as pop

#===============================================================================
# GLOBALS
#===============================================================================
DISCOVERY_INTERVAL = 60 # SECS

#===============================================================================
# SYNCHRONIZED SHARED DATA STRUCTURES 
#===============================================================================
queues = {} 
qlocks = {}
inds_db = Manager().dict() 
ind_db_lock = Lock()

#===============================================================================
# HELPER FUNCTIONS
#===============================================================================
def dispatch_task(schema_name, schema_zip, pop_name, tasks_info, queues, 
                   slave_name):
    print "I am in job dispatcher"
    conn = rpyc.connect_by_service(service_name=slave_name, 
            config={ "allow_pickle":True, "instantiate_custom_exceptions":True})
    
    sys.path.append(os.path.join(os.getcwd(), "temp")) #TODO: check
    os.chdir(os.path.join(os.getcwd(), "temp"))
    
    slave = conn.root
    slave.setup_schema(schema_zip, schema_name, tasks_info)
    slave_attrs = slave.get_attributes()
    pop_mgr = pop.Population(inds_db, ind_db_lock, tasks_info)
    queue_mgr = qmgr.QManager(queues, qlocks, tasks_info, pop_mgr)
    
    while True:
        print queue_mgr
        task_name, input = queue_mgr.get_inds()
        print "executing ", task_name
        try:
            output = slave.execute_task(task_name, input)
            output = rpyc.classic.obtain(output)
            print "finished"            
        except Exception, e:
            print e
            print "Slave has an exception I am recovering and exiting!"
            #import traceback
            #traceback.print_tb(sys.exc_info()[2])
            queue_mgr.recover_inds(input, task_name)
            break
        queue_mgr.set_inds(task_name, output)

def discover_slaves(schema_name, schema_zip, pop_name, tasks_info):
    """Administer connecting/disconnecting slaves.
    
    """
    conn = rpyc.connect_by_service(service_name=NS_NAME)
    naming_server = conn.root
    procs = []
    while True:
        slave_names = naming_server.get_free_slave_names()
        print "free slaves", slave_names
        for slave_name in slave_names:
            proc = Process(target=dispatch_task, 
                           args=(schema_name, schema_zip,pop_name, tasks_info, 
                                 queues, slave_name))
            proc.daemonic = True
            procs.append(proc)
            proc.start()
        time.sleep(DISCOVERY_INTERVAL)


#===============================================================================
# SERVICE CLASS
#===============================================================================
class TaskService(dexen.Service):    
    def on_connect(self):
        super(TaskService, self).on_connect()
    
    def on_disconnect(self):
        super(TaskService, self).on_disconnect()
    
    def _get_task_obj(self, file_name, class_name):
        module = __import__(file_name, fromlist=[""])
        classobj = getattr(module, class_name)    
        return classobj()
    
    def _execute_init(self, schema_name, tasks_info):
        prev_dir = os.getcwd()
        os.chdir(os.path.join(prev_dir, "temp"))   
    
        for task_name, item in tasks_info.items():
            #obj = self._get_task_obj(item["file_name"], item["class_name"])
            obj = item["obj"]
            if obj.__isinit__():
                print task_name, "is initdir"
                obj.__initialize__()
                res = obj.__execute__()
                pop_mgr = pop.Population(inds_db, ind_db_lock, tasks_info)
                q_mgr = qmgr.QManager(queues, qlocks, tasks_info, pop_mgr)
                q_mgr.set_inds(task_name, res)
        os.chdir(prev_dir)
        
    def _setup_schema(self, schema_name):
        schema_zip, modified_time = backend.get_schema(schema_name)        
        temp_dir = os.path.abspath("temp")
        sys.path.append(temp_dir)
        util.unzip(schema_zip, temp_dir) # TODO: consider folder structure
        return schema_zip
        
    def _setup_queues(self, schema_name, pop_name):
        """Initialize queues and their associated locks.
        
        After initialization of data structures, init task has been executed
        to kick-off the run. tasks_info is retrieved from the main database.
        
        """
        global queues, qlocks

        tasks_info = backend.get_tasks(schema_name, pop_name)
        tasks_info = rpyc.classic.obtain(tasks_info)
    
        for task_name, item in tasks_info.items():
            obj = self._get_task_obj(item["file_name"], item["class_name"])
            item["obj"] = obj
            if obj.__hasinput__():
                queues[task_name] = Queue()
                qlocks[task_name] = Lock()
        
        self._execute_init(schema_name, tasks_info)
                
        print tasks_info, "*"*23   
        return tasks_info
    
    def run_population(self, schema_name, pop_name):
        print "running in population"
        schema_zip = self._setup_schema(schema_name)
        tasks_info = self._setup_queues(schema_name, pop_name)
        self.slave_mgr = Process(target=discover_slaves, 
                args=(schema_name, schema_zip, pop_name, tasks_info))
        self.slave_mgr.daemonic = True
        print "starting slave manager"
        self.slave_mgr.start()
          
    def stop_population(self):
        pass
    
#===============================================================================
#  MAIN
#===============================================================================
def main():
    global db_conn, backend
    db_conn = rpyc.connect_by_service(service_name="BACKEND")
    backend = db_conn.root
    
    conn = rpyc.connect_by_service(service_name="TASK_MANAGER")
    task_name, = conn.root.register_task()
    TaskService.ALIASES = [task_name]
    tserver = server.ThreadedServer(TaskService, 
                                    protocol_config={"allow_pickle":True})
    tserver.start()

if __name__ == '__main__':
    main()
