'''
Created on 15.12.2009

@author: claudiugh
'''

from apriori import AprioriRulesGenerator 
from threading import Thread, RLock, Condition 

class GlobalCandidates(object):
    
    def __init__(self):
        self._candidates = set()
        self._prev_candidates = None
        self._lock = RLock()  
    
    def __iter__(self):
        return iter(self._candidates)
    
    def __nonzero__(self):
        return bool(self._candidates)
    
    def backup(self):
        if self._candidates:
            self._prev_candidates = self._candidates
            self._candidates = set() 
    
    def get_backup(self):
        return self._prev_candidates
    
    def add(self, itemset):
        with self._lock:
            self._candidates.add(itemset)

    def add_collection(self, collection):
        with self._lock:
            for itemset in collection: 
                self._candidates.add(itemset)
                
class Barrier(object):
    
    def __init__(self, size):
        self.size = size 
        self.cond = Condition() 
        self.counter = 0 
        
    def enter(self):
        with self.cond: 
            self.counter += 1 
            if self.counter < self.size:
                self.cond.wait()
            else:
                self.counter = 0
                self.cond.notifyAll()


class AprioriWorker(Thread):
    
    def __init__(self, id, threads_no, minsupp, db_size, global_candidates, subdb, barrier, sh_supp):
        Thread.__init__(self)
        self.id = id 
        self.threads_no = threads_no 
        self.minsupp = minsupp
        self.db_size = db_size 
        self.global_candidates = global_candidates 
        self.subdb = subdb
        self.barrier = barrier 
        self.sh_supp = sh_supp 

    def _local_count(self, itemset):
        """
        itemset must be a set 
        """
        count = 0
        for t in self.subdb:
            if t & itemset == itemset:
                # the itemset is included in the transaction 
                count += 1
        return count 

    def _compute_local_support(self, itemset):
        count = self._local_count(itemset)
        return float(count) / float(len(self.subdb))
    
    def _get_initial_candidates(self):
        freq = {} 
        for transaction in self.subdb: 
            for item in transaction: 
                freq.setdefault(item, 0)
                freq[item] += 1 
        candidates = []
        for item in freq:
            if float(freq[item]) / float(len(self.subdb)) >= self.minsupp: 
                candidates.append(tuple([item]))
        return candidates 

    def run(self):
        local_candidates = self._get_initial_candidates()
        for itemset in local_candidates: 
            self.global_candidates.add(itemset)
            
        while self.global_candidates:
            
            self.barrier.enter()
            
            if self.id == 0: 
                self.global_candidates.backup()
                               
            self.barrier.enter()
            
            if local_candidates:
                local_candidates = ParallelRulesGenerator.gen_candidates(local_candidates)
            # local prunning 
            self.sh_supp[self.id][self.id] = {} 
            for candidate in local_candidates: 
                local_supp = self._local_count(set(candidate))
                if float(local_supp) / float(len(self.subdb)) >= self.minsupp: 
                    self.sh_supp[self.id][self.id][candidate] = local_supp
            
            self.barrier.enter()
                        
            for i in xrange(self.threads_no): 
                if i != self.id:
                    local_supp = {} 
                    for candidate in self.sh_supp[i][i]:
                        supp = self._local_count(set(candidate))
                        local_supp[candidate] = supp
                    self.sh_supp[i][self.id] = local_supp
                     
            self.barrier.enter()
            
            global_supp = self.sh_supp[self.id][self.id]
            for j in xrange(self.threads_no): 
                if j != self.id: 
                    for candidate, supp in self.sh_supp[self.id][j].iteritems():
                        global_supp[candidate] += supp
            # global prunning
            local_candidates = [] 
            for candidate, supp in self.sh_supp[self.id][self.id].iteritems():
                if float(supp) / float(self.db_size) >= self.minsupp: 
                    local_candidates.append(candidate)
            self.global_candidates.add_collection(local_candidates)
            
            self.barrier.enter()            


class ParallelRulesGenerator(AprioriRulesGenerator):
    
    NO_OF_THREADS = 2 
    global_candidates = GlobalCandidates()  
    
    def __init__(self, db, minsupp, minconf):
        AprioriRulesGenerator.__init__(self, db, minsupp, minconf)
        self._spawn_threads(db) 
        
    def _get_partition(self, thread_no, db):
        items_per_thread = len(db) / self.NO_OF_THREADS
        rest = len(db) % self.NO_OF_THREADS
        if thread_no < rest: 
            items_per_thread += 1
        l = thread_no * items_per_thread
        if thread_no >= rest: 
            l += rest 
        r = l + items_per_thread 
        return db[l:r]
        
    def _generate_sh_matrix(self):
        m = []
        for i in xrange(self.NO_OF_THREADS): 
            mi = []
            for j in xrange(self.NO_OF_THREADS):
                mi.append({})
            m.append(mi)
        return m 

    def _spawn_threads(self, db):
        self.barrier = Barrier(self.NO_OF_THREADS)
        self.threads = [] 
        for i in xrange(self.NO_OF_THREADS):
            self.threads.append(None)
        self.sh_supp = self._generate_sh_matrix()
        # spawn the threads
        for i in xrange(self.NO_OF_THREADS):
            self.threads[i] = AprioriWorker(i, 
                                            self.NO_OF_THREADS, 
                                            self.minsupp,
                                            len(self.db),  
                                            self.global_candidates, 
                                            self._get_partition(i, db), 
                                            self.barrier, 
                                            self.sh_supp)
            
    def _find_freq_itemsets(self):
        
        for thread in self.threads: 
            thread.start() 
            
        for thread in self.threads: 
            thread.join()                                 
        
        return self.global_candidates.get_backup() 

