'''
Created on Aug 12, 2012

@author: Cihat Basol
'''


import zlib

from dexen.system import client
from dexen.system.client import db
from dexen.system.client.controller.db_syncher import GetDBSyncher
from dexen.system.client.proxy import JobProxy
from dexen.system.client.controller import timeIt

svr_proxy = client.GetServerProxy()
db_syncher = GetDBSyncher()

ARCHIVE_BATCH_SIZE = 2000 # in KB

_synchers = {}

def GetArchiveSyncher(job_info):
    global _synchers
    if not _synchers.has_key(job_info):
        _synchers[job_info] = ArchiveSyncher(job_info)
    return _synchers[job_info]

def clear():
    _synchers.clear()

def delete_syncher(job_info):
    if _synchers.has_key(job_info):
        del _synchers[job_info]

class ArchiveSyncher(object):
    def __init__(self, job_info):
        self.job_info = job_info
        self.svr_info = job_info.get_svr_info()
        self.job_id = job_info.get_id()
        self.archives_info = {}

    def on_inds_updated(self, inds, islocal):
        for ind in inds:
            ind_id = ind._get_id()
            if not self.archives_info.has_key(ind_id):
                if not islocal:
                    self.archives_info[ind_id] = set()
                else:
                    self.archives_info[ind_id] = set(db_syncher.get_ind_archive_names(ind_id, self.job_info))

    def get_archive(self, ind_id, name):
        content = db_syncher.get_ind_archive(ind_id, name, self.job_info)
        
        if content is not None or not self.svr_info.is_connected():
            return ""
        
        job_proxy = svr_proxy.get_job_proxy(self.job_info)
        assert isinstance(job_proxy, JobProxy)
        content = job_proxy.get_ind_archive(ind_id, name)
        db_syncher.persist_ind_archive(ind_id, name, content, self.job_info, True)
        
        if content:
            return zlib.decompress(content)
        return ""

    def synch_item(self, ind_id, archive_name):
        content = db_syncher.get_ind_archive(ind_id, archive_name, self.job_info)
        
        if content is not None or not self.svr_info.is_connected():
            return
        
        job_proxy = svr_proxy.get_job_proxy(self.job_info)
        assert isinstance(job_proxy, JobProxy)
        content = job_proxy.get_ind_archive(ind_id, archive_name)
        db_syncher.persist_ind_archive(ind_id, archive_name, content, self.job_info, False)

    @timeIt
    def synch(self, ind_id):
        if not self.svr_info.is_connected():
            return
        job_proxy = svr_proxy.get_job_proxy(self.job_info)
        assert isinstance(job_proxy, JobProxy)
        archive_names = job_proxy.get_ind_archive_names(ind_id)
        for archive_name in archive_names:
            self.synch_item(ind_id, archive_name)
        db_syncher.commit()
    
    @timeIt
    def mass_synch(self, ind_id):
        if not self.svr_info.is_connected():
            return { "len": 0, "last_id": -1 }
        
        job_proxy = svr_proxy.get_job_proxy(self.job_info)
        assert isinstance(job_proxy, JobProxy)

        last_id = ind_id
        archives_info_list = []
        for ind_id, archives in self.archives_info.items():
            if ind_id >= last_id:
                archives_info_list.append({
                    "ind_id" : ind_id, 
                    "archives" : archives
                })

        archive_list = job_proxy.get_archives_in_batch(archives_info_list, ARCHIVE_BATCH_SIZE)
        print "retrieved %d inds for archive in batch" % (len(archive_list),)
        for item in archive_list:
            ind_id = item["ind_id"]
            archives = item["archives"]
            for name, content in archives.items():
                #print "synching ind: %s archive: %s" %(ind_id, name)
                db_syncher.persist_ind_archive(ind_id, name, content, self.job_info, False)
        db_syncher.commit()
        
        for item in archive_list:
            ind_id = item["ind_id"]
            archives = item["archives"]
            for name in archives.keys():
                self.archives_info[ind_id].add(name)
        
        if len(archive_list) > 0:
            last_id = max([item["ind_id"] for item in archive_list])
        
        return { "len": len(archive_list), "last_id": last_id }
