from sqliteDB import sqliteDB
from svnClient import svnClient
from datetime import datetime
import commands
import os
import math
## @brief execute fts pim indexing
# @author Alessio Di Pietro 
# @author Irene Scionti
class pimIndexer(object):
    '''
    Allows the user to quickly search (full text search) a specific entry that has to be restored
    '''
    ##@param settings settings object
    def __init__(self,settings):
        '''
        Constructor
        '''
        self.__settings=settings
        #svn parameters
        self.__svnworkingcopy=settings.svnWorkingCopy;
        self.__svnserver=settings.svnServer;
        self.__entry_paths=[settings.contacts_diffname,settings.calendar_diffname,settings.memos_diffname];
        
        
        #WINDOW SIZE: number of versions of a single entry that are indexed
        self.__fts_db_window_size=int(settings.fts_db_window_size);
        
        #database parameters
        self.__fts_db_name=settings.fts_db;
        self.__fts_db=sqliteDB(self.__svnworkingcopy+'/index/'+self.__fts_db_name)
        self.__fts_table_names=["contacts_index","calendar_index","memos_index"]
        self.__modified_db_table_names=["modified_contacts","modified_calendar","modified_memos"]
        self.__removed_db_table_names=["removed_contacts","removed_calendar","removed_memos"]
        
        #garbage collector parameters
        self.__garbage_year_threshold=settings.garbage_year_threshold
        self.__garbage_size_threshold=settings.garbage_size_threshold
                
        self.__svnclients=[]
        #initialize a list that contains one svn client object for each entry type
        for dir in self.__entry_paths:
            self.__svnclients.append(svnClient(self.__svnworkingcopy+dir,settings.svnServer,settings.svnRepoName,settings.svnUser,settings.svnPassword))
    
    ## @brief avoids hidden files or directory in ls command
    def __nodot(self,item): 
        return item[0] != '.'
    
    ## @brief avoid index db overgrow
    # @param year_threshold 
    # @param entry_type entry type 
    def garbage_collector(self,year_threshold,entry_type):
        '''
        Cleans index db removing permanently too old entries (year_threshold)
        '''
        c=self.__fts_db.open();
        current_year=datetime.utcnow().year
        th_current_year=current_year-int(year_threshold)
        c.execute("DELETE from "+str(self.__removed_db_table_names[entry_type])+" WHERE timestamp LIKE '"+str(th_current_year)+"%'")
        c.execute("DELETE from "+str(self.__fts_table_names[entry_type])+" WHERE timestamp LIKE '"+str(th_current_year)+"%'")
        self.__fts_db.commit();
        self.__fts_db.close()
    
    ##@brief execute indexing
    def indexing(self):
        '''
        Execute PIM indexing
        '''
        #for each entry type (contacts,calendar,memos)
        for i in range(3):
            #garbage collection
            if int(os.path.getsize(self.__svnworkingcopy+'/index/'+self.__fts_db_name)) > (int(self.__garbage_size_threshold)*math.pow(2,20)):
                self.garbage_collector(self.__garbage_year_threshold, i)
            
            #retrieve existing entries
            print "\nindexing "+self.__entry_paths[i][1:]+"..."
            c=self.__fts_db.open()
            #Retrieves existing entries in incremental way: existing_entries=existing-indexed
            c.execute("SELECT uid as entry,revision,timestamp FROM "+self.__modified_db_table_names[i]+' WHERE NOT EXISTS( SELECT uid,rev,timestamp FROM '+self.__fts_table_names[i]+' WHERE uid=entry AND rev=revision AND timestamp=timestamp)')
            existing_entries=[]
            existing_entries_timestamps=[]
            #existing entries (uid) and existing timestamps lists
            for row in c:
                existing_entries.append(row[0])
                existing_entries_timestamps.append(row[2])
            
            ##Retrieves removed entries in incremental way: existing_entries=existing-indexed
            c.execute("SELECT uid as entry,revision,timestamp AS t,removed_timestamp AS rt FROM "+self.__removed_db_table_names[i]+' WHERE NOT EXISTS( SELECT uid,rev,timestamp,removed_timestamp FROM '+self.__fts_table_names[i]+' WHERE uid=entry AND rev=revision AND timestamp=t AND removed_timestamp=rt)')
            
            #parallel lists removed_entries[i] was removed at revision removed_entries_revisions[i]
            removed_entries=[]
            removed_entries_revisions=[];
            removed_entries_timestamps=[];
            removed_entries_rem_timestamps=[];
            for row in c:
                removed_entries.append(row[0])
                removed_entries_revisions.append(int(row[1]))
                removed_entries_timestamps.append(row[2])
                removed_entries_rem_timestamps.append(row[3])
            
            #Total Entries to be processed=existing+removed
            entries=removed_entries+existing_entries;
            entries_timestamps=removed_entries_timestamps+existing_entries_timestamps;
            
            
            j=0;
            #for each entry (removed or existing)
            for entry_uid in entries:
                #create available entry revision list
                #print entry_uid 
                start_log_revision="head";
                rem_timestamp="null";
                try:
                    
                    #determines start revision for svn log command
                    start_log_revision=int(removed_entries_revisions[j]);
                    rem_timestamp=str(removed_entries_rem_timestamps[j])
                    #Set state DELETED for removed entry ad start_log_revision
                    c.execute("SELECT state FROM "+self.__fts_table_names[i]+" WHERE uid=? AND rev=?",(entry_uid,start_log_revision))
                    row=c.fetchone()
                    if row!=None:
                        state=row[0]
                        #print "state: ",state
                        if state=='m':
                            #execute the state update to "deleted"
                            c.execute("UPDATE "+self.__fts_table_names[i]+" SET state='d',removed_timestamp=? WHERE uid=? AND rev=?",(rem_timestamp,entry_uid,start_log_revision))
                            self.__fts_db.commit();
                except IndexError:
                        pass
                
                #Create existing entry revision list (considering window size)
                #svnlog=commands.getoutput("svn log svn://"+self.__svnserver+self.__entry_paths[i]+'/'+entry_uid+'/'+entry_uid+'@'+start_log_revision+" --username "+self.__settings.svnUser+" --password "+self.__settings.svnPassword+" | grep ^r[0-9] | awk '{print substr($1,2,9)}'")
                #entry_revisions=svnlog.split("\n")
                entry_revisions=self.__svnclients[i].log(self.__entry_paths[i]+"/"+entry_uid+"/"+entry_uid,start_log_revision)
                
                
                cutted_revision=None
                #if revision list size exceedes window size
                if len(entry_revisions)>self.__fts_db_window_size:
                    #mark as cutted last revision that exceeds window size
                    cutted_revision=entry_revisions[self.__fts_db_window_size-1].revision.number
                else:
                    #update uid revisions as not cutted
                    c.execute("UPDATE "+self.__fts_table_names[i]+ " SET cutted='false' WHERE uid=? AND rev=(SELECT rev FROM "+self.__fts_table_names[i]+" WHERE cutted='true' AND uid=?)",(entry_uid,entry_uid))
                    self.__fts_db.commit();
                #limits revision number to default window size
                entry_revisions=entry_revisions[:self.__fts_db_window_size]
                
                
                #Create indexed entry revision list
                c.execute("SELECT rev FROM "+self.__fts_table_names[i]+" WHERE uid=? ORDER BY rev DESC",(entry_uid,))
                indexed_entry_revisions=[];
                for row in c:
                    indexed_entry_revisions.append(str(row[0]))
                
                #entry_revisions-indexed_entry_revisions= To Be Indexed Revisions
                to_be_indexed_revisions=[rev for rev in entry_revisions if not str(rev.revision.number) in indexed_entry_revisions]
  
                #Check sizes
                to_be_indexed_size=len(to_be_indexed_revisions);
                indexed_size=len(indexed_entry_revisions);
                actual_window_size=to_be_indexed_size+indexed_size;
                
            
                #if actual window size exceeds default window size
                if actual_window_size > int(self.__fts_db_window_size):
                    #remove first exceded entries
                    to_be_removed_index=indexed_size-(actual_window_size-self.__fts_db_window_size);
                    to_be_removed_entry_revisions=indexed_entry_revisions[to_be_removed_index:]
                    #update as cutted last revision that exceed window size
                    c.execute("UPDATE "+self.__fts_table_names[i]+" SET cutted='true' WHERE uid=? AND rev=?",(entry_uid,indexed_entry_revisions[to_be_removed_index-1].revision.number))
                    self.__fts_db.commit();
                    iterator=IterList(to_be_removed_entry_revisions,entry_uid)
                    c.executemany("DELETE FROM "+self.__fts_table_names[i]+" WHERE rev IN (?) AND uid=?",iterator)
                    self.__fts_db.commit();
                
                
                #For each to be indexed revision
                
                for entry_rev in to_be_indexed_revisions:
                    #index these revisions
                    entry_body=self.__svnclients[i].repoCat(self.__entry_paths[i]+"/"+entry_uid+"/"+entry_uid,entry_rev.revision.number)
                    #print entry_body
                    if start_log_revision==entry_rev.revision.number:
                        #deleted entry
                        state='d'
                    else:
                        #modified entry
                        state='m'
                    if entry_rev.revision.number==cutted_revision:
                        cutted='true';
                    else:
                        cutted='false';
                    
                    #insert entry into fts virtual table       
                    c.execute("INSERT INTO "+self.__fts_table_names[i]+"(uid,rev,state,body,cutted,timestamp,removed_timestamp) VALUES(?,?,?,?,?,?,?)",(entry_uid,entry_rev.revision.number,unicode(state,errors='ignore'),entry_body.decode('utf-8'),cutted,entries_timestamps[j],rem_timestamp));
                    self.__fts_db.commit();
                
                j=j+1;
            #corrects fake removed entries
            c.execute("select uid as entry,rev from "+self.__fts_table_names[i]+" where state='d' and not exists (select uid from "+self.__removed_db_table_names[i]+" where uid=entry and revision=rev)");
            for row in c:
                c.execute("update "+self.__fts_table_names[i]+" set removed_timestamp='null',state='m' where uid=? and rev=?",(row[0],row[1],))
            self.__fts_db.commit();
            self.__fts_db.close();
            
            
## @brief sqlite iterator
class IterList:
    '''
    Iterator for sqlite executemany query
    '''
    def __init__(self,list,entry_uid):
        self.list = list
        self.entry_uid=entry_uid;
        self.i=0;

    def __iter__(self):
        return self

    def next(self):
        if self.i > len(self.list)-1:
            raise StopIteration
        self.i =self.i+1;
        return (self.list[self.i - 1].revision.number,self.entry_uid)

