"""
Copyright 2009 Texas A&M University
 
Licensed under the Apache License, Version 2.0 (the "License"); you may not use 
this file except in compliance with the License. You may obtain a copy of the 
License at

http://www.apache.org/licenses/LICENSE-2.0
  
Unless required by applicable law or agreed to in writing, software distributed 
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR 
CONDITIONS OF ANY KIND, either express or implied. See the License for the 
specific language governing permissions and limitations under the License. 
"""
from logging import getLogger
from datetime import datetime

from solr import SolrConnection, SolrException

from zope.interface import implements

from twisted.internet.defer import DeferredList

from meercat.base.metadata import Metadata
from meercat.job.incremental import IncrementalJob
from meercat.job.scheduled import ScheduledJob
from meercat.storage.interfaces import IStorage
from meercat.solr.job import getLocalDictionary
from meercat.solr.interfaces import ISolrDedupeJob



__all__ = ['IncrementalSolrDedupeJob','ScheduledSolrDedupeJob']

log = getLogger('meercat.solr.job.dedupe')

def handleInitialSolrResults(job, result):
    log.debug('handleInitialSolrResults(result=%s, job=%s)' % (str(result),str(job),))
    job = ISolrDedupeJob(job)
    
    conn = job.getSolrConnection()
    storage = job.getStorage()
    
    docs = 0
    idFields = list(job.getIdentifierFields())
    idField = job.getIdentifierField()
    
    try:
        for doc in result.results:
            log.debug('Finding matches for %s' % (str(doc[idField]),))
            queries = []
            
            for field in idFields:
                if not field in doc:
                    continue
                
                if isinstance(doc[field], basestring):
                    doc[field] = [doc[field]]
                
                doc[field] = map(lambda f:f.replace('-','').strip(':'),doc[field])
                
                for value in doc[field]:
                    if len(value) > 4:
                        queries.append('%s:"%s"' % (field,str(value),))
            
            if len(queries) == 0:
                log.debug('No identifier fields found for %s' % (str(doc[idField]),))
                continue
            
            query = '-%s:%s AND (%s)' % (idField, str(doc[idField]),
                                        ' OR '.join(queries))
            
            r = conn.query(q=query, rows=0)
            
            if r.numFound == '0':
                log.debug('No matches found for %s' % (query,))
                continue
            
            r = conn.query(q=query,
                           rows=r.numFound,
                           score=False,
                           fields=[idField] + idFields)
            
            matches = []
            
            for match in r.results:
                matchFound = False
                
                for field in idFields:
                    if not field in match or not field in doc or matchFound:
                        continue
                    
                    if isinstance(match[field], basestring):
                        match[field] = [match[field]]
                    
                    for value in match[field]:
                        if value.replace('-','').strip(':') in doc[field]:
                            matches.append(match[idField])
                            matchFound = True
                            break
            
            if len(matches) == 0:
                log.warning('No exact matches found for %s: %s checked against %s' % (str(doc[idField]),str(doc),str(r.results)))
                continue
            
            if len(storage.getResources(identifier=doc[idField])) != 1:
                log.warning('%s is no longer a valid resource identifier' % (doc[idField],))
                continue
            
            log.debug('Merging %s into %s' % (str(matches),str(doc[idField])))
            docs = docs + len(matches)
            
            storage.merge(doc[idField],matches)
        
        return docs
    finally:
        log.info('handleInitialSolrResults(job=%s,result=%s) finished: %s resources merged.' % (str(job),str(result),str(docs),))

class IncrementalSolrDedupeJob(IncrementalJob):
    implements(ISolrDedupeJob)
    
    def __init__(self, storage, limit=None, offset=0, step=1000,
                 deferredFactory=None, max_workers=None, identifierField=u'id',
                 identifierFields=['isbn','issn'],
                 localFactory=getLocalDictionary, *args, **kwargs):
        """ 
        localFactory should create an object that is thread/process local 
        depending on the architecture being used.
        
        source will be passed on to the storage when determining which resources
        to check for deduping """
        self._storage = IStorage(storage)
        
        if limit is None:
            limit = lambda:storage.getResourceCount() - offset
        
        super(IncrementalSolrDedupeJob,self).__init__(self, limit=limit,
                                                      offset=offset, step=step,
                                                      max_workers=max_workers,
                                                      deferredFactory=deferredFactory)
        self._identifierField = identifierField
        self._identifierFields = identifierFields
        self._localFactory = localFactory
        self._solrArgs = (args, kwargs)
        self._generateLocalStorage()
        # Verify that the connection arguments are valid by attempting to connect
        self.getSolrConnection()
    
    def __getstate__(self):
        log.debug('%s.__getstate__()' % (str(self),))
        state = super(IncrementalSolrDedupeJob,self).__getstate__()
        if '_local' in state:
            del state['_local']
        return state
    
    def __setstate__(self,state):
        log.debug('%s.__setstate__(state=%s)' % (str(self),str(state)))
        super(IncrementalSolrDedupeJob,self).__setstate__(state)
        self._generateLocalStorage()
    
    def _generateLocalStorage(self):
        log.debug('%s._generateLocalStorage()' % (str(self),))
        self._local = self._localFactory()
    
    def getIdentifierField(self):
        return self._identifierField
    
    def getIdentifierFields(self):
        return self._identifierFields
    
    def getSolrConnection(self):
        log.debug('%s.getSolrConnection()' % (str(self),))
        if not 'connection' in self._local:
            self._local['connection'] = SolrConnection(*self._solrArgs[0],
                                                       **self._solrArgs[1])
        
        return self._local['connection']
    
    def getStorage(self):
        return self._storage
    
    def _generateDeferred(self, *args, **kwargs):
        log.debug('%s._generateDeferred(*%s, **%s)' % (str(self), str(args),
                                                       str(kwargs)))
        d = super(IncrementalSolrDedupeJob,self)._generateDeferred(*args, **kwargs)
        #TODO: Add callback for getting identifiers from SOLR
        #TODO: Add callback that searches for matches based on the ID's returned
        return d
    
    def __call__(self, limit, offset):
        log.info('%s.__call__(limit=%s,offset=%s)' % (str(self),str(limit),
                                                      str(offset)))
        try:
            results = self.getSolrConnection().query(q='%s:[* TO *]' % (self.getIdentifierField(),),
                                                     sort=self.getIdentifierField(),
                                                     rows=limit,
                                                     start=offset,
                                                     score=False,
                                                     fields=[self.getIdentifierField()] + 
                                                            list(self.getIdentifierFields()))
            return self.handleInitialSolrResults(results)
        finally:
            log.info('%s.__call__(limit=%s,offset=%s) finished' % (str(self),
                                                                   str(limit),
                                                                   str(offset),))
            
    
    handleInitialSolrResults = handleInitialSolrResults

class ScheduledSolrDedupeJob(ScheduledJob):
    implements(ISolrDedupeJob)
    
    def __init__(self, storage, schedule, localFactory=getLocalDictionary, 
                 from_date=datetime.now(), identifierField=u'id',
                 identifierFields=['isbn','issn'],*args, **kwargs):
    
        initKwargs = {}
        
        if 'deferredFactory' in kwargs:
            initKwargs['deferredFactory'] = kwargs.pop('deferredFactory')
                
        self._storage = IStorage(storage)
        
        if not isinstance(from_date,datetime):
            raise ValueError('from_date must be a datetime.datetime object')
        
        self._called = [from_date]
        self._runs = []
        self._active = False
        self._local = localFactory()
        self._solrArgs = (args, kwargs)
        self._identifierField = identifierField
        self._identifierFields = identifierFields
        
        self._mapReduceArgs['targetFormat'] = targetFormat
        
        # Verify that the connection arguments are valid by attempting to connect
        self.getSolrConnection()
        
        super(ScheduledSolrDedupeJob, self).__init__(schedule, self, **initKwargs)
    
    def getIdentifierField(self):
        return self._identifierField
    
    def getIdentifierFields(self):
        return self._identifierFields
    
    def getSolrConnection(self):
        log.info('%s.getSolrConnection()' % (str(self),))
        if not 'connection' in self._local:
            log.info('Creating local solr connection: %s' % (str(self._solrArgs),))
            self._local['connection'] = SolrConnection(*self._solrArgs[0],
                                                       **self._solrArgs[1])
        
        return self._local['connection']
    
    def getStorage(self):
        return self._storage
    
    def _generateDeferred(self, *args, **kwargs):
        log.info('%s._generateDeferred(*%s, **%s)' % (str(self),str(args),str(kwargs)))
        d = super(ScheduledSolrJob,self)._generateDeferred(*args, **kwargs)
        d.addCallback(mapReduceResources, self)
        d.addCallback(self.addStatistics)
        d.addCallback(self._deactivate)
        return d
    
    def _deactivate(self, *args, **kwargs):
        self._active = False
    
    def __call__(self, *args, **kwargs):
        log.info('%s.__call__(*%s, **%s)' % (str(self),str(args),str(kwargs)))
        if self._active:
            return
        self._active = True
        
        last = self._called[len(self._called) - 1]
        
        self._called.append(datetime.now())
        if len(self._called) > 256:
            self._called = self._called[-32:]
        
        
        return self._storage.getResources(from_date=last,inactive=True)
        
        deferreds = []
        removes = []
        
    def addStatistics(self, results):
        log.info('%s.addStatistics()' % (str(self),))
        try:
            self._runs.append((len(results[0]),len(results[1]),))
        except:
            log.error('Error adding statistics')
        
        if len(self._runs) > 256:
            self._runs = self._runs[-32:]
    
    def start(self):
        if not self.isRunning():
            self._active = False
        return super(ScheduledSolrJob,self).start()
    
    def getStatus(self):
        if not self.isRunning():
            active = u'stopped'
        elif self._active:
            active = u'active'
        else:
            active = u'waiting'
        
        return u'Currently %s. %d items added/updated and %d items removed in ' \
               'the last %d runs (latest at %s)' % (
                    active,
                    sum(map(lambda r: r[0],self._runs)),
                    sum(map(lambda r: r[1], self._runs)),
                    len(self._runs),
                    unicode(self._called[-1]),)
