'''
Created on Jan 25, 2011

@author: dexen
'''

import os
import sys
import zlib
import time
import cPickle
import threading

import rpyc

from dexen.system.common import service, utils
from dexen.system.common.excepts import (JobNotExistsException,
                                         DexenInvalidJobDefError,
                                         DexenConnectionError,
                                         DexenInvalidJobNameError)
from dexen.system.client.controller.action_mgr import GetActionMgr
from dexen.system.client.controller import timeIt



#===============================================================================
# Server Proxy
#===============================================================================
class ServerProxy(object):
    def __init__(self):
        self.srv_conn = None
        self.job_proxies = {} # job_info : JobProxy
        self.lock = threading.RLock()
        self.svr_ip = None
        self.svr_start_time = None

    def connect(self, reg_host):
        self.srv_conn = service.get_svr_conn(reg_host)
        self.svr_ip = self.srv_conn.root.get_ip()
        self.svr_start_time = self.srv_conn.root.get_start_time()
        self.svr_name = self.srv_conn.root.get_name()
        
        # Reconnect all the cached job_proxies
        for job_proxy in self.job_proxies.values():
            assert isinstance(job_proxy, JobProxy)
            job_proxy.reconnect(self.srv_conn)

    def disconnect(self):
        self.srv_conn.close()
        self.srv_conn = None

    def is_connected(self):
        return self.srv_conn is not None
    
    def check_connection(self):
        if self.srv_conn is None:
            err_msg = "Server is not reachable.\n"
            err_msg += "Please start a server first."
            raise DexenConnectionError(err_msg)

    def get_job_proxy(self, job_info):
        if not self.job_proxies.has_key(job_info):
            self.job_proxies[job_info] = JobProxy(job_info, self.srv_conn)
        return self.job_proxies[job_info]

    def get_svr_info(self):
        from dexen.system.client.db import ServerInfo
        svr_info = ServerInfo()
        svr_info.set_info(self.get_ip(), self.get_start_time(), self.is_connected())
        return svr_info

    def get_ip(self):
        return self.svr_ip
    
    def get_name(self):
        return self.svr_name

    def get_start_time(self):
        return self.svr_start_time
                                       
    def _generate_job_def(self, settings_path):
        from dexen.system.client.generator_selector import GeneratorSelector
        gen = GeneratorSelector(settings_path)
        gen.generate()        

    def _is_valid_job_name(self, job_name):
        if not job_name or len(job_name) == 0: 
            return False
        for i in xrange(len(job_name)):
            if not job_name[i].isalnum() and not job_name[i] == "_":
                return False
        return True

    def start_job(self, job_name, job_def_dir):
        settings_path = os.path.join(job_def_dir, "settings.xml")
        master_task_path = os.path.join(job_def_dir, "master_task.py")
        
        # If the job definition directory is not a valid job def
        if not os.path.isfile(settings_path) and \
           not os.path.isfile(master_task_path):
            err_msg = "The directory for the job definition is not valid.\n"
            err_msg += "It must contain either master task (master_task.py)"
            err_msg += " or settings file (settings.xml)."
            raise DexenInvalidJobDefError(err_msg)
        
        if os.path.isfile(settings_path):
            self._generate_job_def(settings_path)

        if not self._is_valid_job_name(job_name):
            err_msg = "The specified job name is not valid.\n"
            err_msg += "The job name can only contain alphanumeric characters"
            err_msg += " or '_' character."
            raise DexenInvalidJobNameError(err_msg)

        job_def_zip = utils.get_schema_zip(job_def_dir) 
        self.srv_conn.root.start_job(job_def_zip, job_name)
    
    def stop_job(self, job_info):
        job_proxy = self.get_job_proxy(job_info)
        assert isinstance(job_proxy, JobProxy)
        job_proxy.stop()
    
    def delete_job(self, job_info):
        job_proxy = self.get_job_proxy(job_info)
        assert isinstance(job_proxy, JobProxy)
        job_proxy.delete()
    
    def get_node_states(self):
        node_states = self.srv_conn.root.get_node_states()
        node_states = rpyc.classic.obtain(node_states)
        return node_states
    
    def get_job_states(self):
        try:
            job_states = self.srv_conn.root.get_job_states()
            job_states = rpyc.classic.obtain(job_states)
            return job_states
        except EOFError:
            raise DexenConnectionError()
    
    def get_job_infos(self):
        from dexen.system.client.db import JobInfo
        from dexen.system.client.db import ServerInfo
        job_infos = []
        job_states = self.get_job_states()
        svr_info = ServerInfo()
        svr_info.set_info(self.get_ip(), self.get_start_time(), True)
        for job_state in job_states:
            job_info = JobInfo(job_state)
            job_info.set_svr_info(svr_info)
            job_infos.append(job_info)
        return job_infos

#===============================================================================
# Job Proxy
#===============================================================================
class JobProxy(object):
    def __init__(self, job_info, srv_conn):
        self.job_info = job_info
        self.srv_conn = srv_conn
        self.job_id = job_info.get_id()
    
    def reconnect(self, srv_conn):
        self.srv_conn = srv_conn
    
    def stop(self):
        self.srv_conn.root.stop_job(self.job_id)
    
    def delete(self):
        self.srv_conn.root.delete_job(self.job_id)

    @property
    def action_mgr(self):
        return GetActionMgr(self.job_info)
    
    @property
    def last_act_id(self):
        return self.action_mgr.last_act_id
   
    def synch_new_actions(self, num_actions=0):
        new_actions = self.srv_conn.root.get_actions(self.job_id, self.last_act_id, num_actions)
        new_actions = rpyc.classic.obtain(new_actions)
        self.action_mgr.process_actions(new_actions)
        return {
            "new_actions" : new_actions, 
            "updated_inds" : self.action_mgr.get_last_updated_inds()
        }
    
    def num_new_actions(self):
        return self.srv_conn.root.num_actions(self.job_id) - self.last_act_id
        
    def get_ind_archive_names(self, ind_id):
        archive_names = self.srv_conn.root.get_ind_archive_names(self.job_id, ind_id)
        archive_names = rpyc.classic.obtain(archive_names)
        return archive_names
    
    def get_ind_archive(self, ind_id, name):
        content = self.srv_conn.root.get_ind_archive(self.job_id, ind_id, name)
        content = rpyc.classic.obtain(content)
        return content
    
    def get_ind_archives(self, ind_id, archive_names):
        archives = self.srv_conn.root.get_ind_archives(self.job_id, ind_id)
        archives = rpyc.classic.obtain(archives)
        return archives # {name: content}
    
    def get_ind_archive_all(self, ind_id):
        archives = self.srv_conn.root.get_ind_archive_all(self.job_id, ind_id)
        archives = rpyc.classic.obtain(archives)
        return archives # {name: content}
    
    def get_job_info(self):
        job_state = self.srv_conn.root.get_job_info(self.job_id)
        job_state = rpyc.classic.obtain(job_state)
        return job_state
    
    def get_zip(self):
        zip_data = self.srv_conn.root.get_job_def(self.job_id)
        zip_data = rpyc.classic.obtain(zip_data)
        return zip_data
    
    def get_archives_in_batch(self, archives_info_list, batch_size=1000):
        # batch_size is in KB
        archives_info_list = cPickle.dumps(archives_info_list)
        print "len of archives info list", len(archives_info_list)
        archives_info_list = zlib.compress(archives_info_list)
        print "len of compressed archives info list", len(archives_info_list)
        print "before get_archives_in_batch", time.time()
        archive_list = self.srv_conn.root.get_archives_in_batch(self.job_id, archives_info_list, batch_size)
        print "after get_archives_in_batch", time.time()
        archive_list = rpyc.classic.obtain(archive_list)
        print "after obtain archives", time.time()
        return archive_list
