from logging import getLogger
from datetime import datetime

from solr import SolrConnection, SolrException

from zope.interface import implements

from twisted.internet.defer import DeferredList

from meercat.interfaces import IPickleable
from meercat.base.metadata import Metadata
from meercat.mapreduce.inline import mapreduce
from meercat.job.incremental import IncrementalJob
from meercat.job.scheduled import ScheduledJob
from meercat.storage.interfaces import IStorage
from meercat.solr.job import getLocalDictionary
from meercat.solr.interfaces import ISolrPushJob

try:
    from multiprocessing import current_process
    local_name = lambda :'%s (%s) ' % (current_process().name, str(current_process().pid))
except ImportError:
    local_name = lambda :''



__all__ = ['IncrementalSolrJob','ScheduledSolrJob']

log = getLogger('meercat.solr.job.push')

additional_metadata = u"""
<doc>
    <field name="%(identifierField)s">%(identifier)s</field>
    <field name="updated">%(lastModified)s</field>
</doc> """

def listCallback(results, job, removes=[]):
    log.debug('%slistCallback(job=%s)' % (local_name(), str(job),))
    job = ISolrPushJob(job)
    
    docs = []
    for result in results:
        if result is not None and result.getValue():
            value = result.getValue()
            
            if isinstance(value,unicode):
                docs.append(value)
            else:
                try:
                    docs.append(unicode(value, encoding='utf8',
                                        errors='strict'))
                except ValueError, e:
                    log.error('Invalid unicode encountered: %s' % (unicode(e)))
                    docs.append(unicode(value, encoding='utf8',
                                        errors='replace'))
        else:
            log.error('Empty result in Solr Job list callback')
    
    if len(docs) > 0 or len(removes) > 0:
        conn = job.getSolrConnection()
        
        if len(docs) > 0:
            log.debug('%d documents to be added to solr' % 
                      (len(docs),))
            try:
                conn._update(u'<add>%s</add>' % (u'\n'.join(docs),))
                log.debug('documents added to solr')
            except SolrException, e:
                log.error('SOLR exception encountered: %s' % (str(e),))
                raise e
            except Exception, e:
                log.error('Other exception encountered: %s' % (str(e),))
                import sys, traceback
                for line in traceback.format_exception(*sys.exc_info()):
                    log.error(line)
        
        if len(removes) > 0:
            log.debug('%d documents to be removed from solr' % 
                      (len(removes),))
            try:
                conn.delete_many(removes)
                log.debug('documents removed from solr')
            except SolrException, e:
                log.error('SOLR exception encountered: %s' % (str(e)))
    return len(results), len(removes)

def extractMetadataFromResource(resource, job):
    log.debug('extractMetadataFromResource(%s,%s)' % (str(resource), str(job)))
    job = ISolrPushJob(job)
    
    return list(resource.getMetadata()) + \
        [Metadata(source='solrJob', format='solr',
                  value=additional_metadata % {'identifierField':job.getIdentifierField(),
                                               'identifier': resource.getIdentifier(),
                                               'lastModified': resource.getLastModified().strftime('%Y-%m-%dT%H:%M:%SZ')})]

def mapReduceResources(resources, job):
    log.debug('%smapReduceResources(%s, %s)' % (local_name(),str(len(resources)), str(job)))
    job = ISolrPushJob(job)
    
    results = []
    
    for resource in filter(lambda r: r.isActive(),resources):
        resource = mapreduce(input=extractMetadataFromResource(resource,job),
                             **job.getMapReduceArguments())
        log.debug('%s mapreduced' % (str(resource),))
        if resource is not None:
            results.append(resource)
    
    return listCallback(results,job,map(lambda r: r.getIdentifier(),
                                       filter(lambda r: not r.isActive(),
                                              resources)))

class IncrementalSolrJob(IncrementalJob):
    implements(ISolrPushJob)
    
    def __init__(self, storage, limit=None, offset=0, step=1000, source=None,
                 deferredFactory=None, max_workers=None, localFactory=getLocalDictionary, 
                 mapReduceArguments={}, targetFormat='solr',
                 identifierField=u'id', *args, **kwargs):
        """ 
        localFactory should create an object that is thread/process local 
        depending on the architecture being used.
        
        source will be passed on to the storage when determining which resources
        to push over to SOLR """
        self._storage = IStorage(storage)
        
        if limit is None:
            limit = lambda:storage.getResourceCount() - offset
        
        super(IncrementalSolrJob,self).__init__(self, limit=limit, offset=offset,
                                                step=step, max_workers=max_workers,
                                                deferredFactory=deferredFactory)
        self._source = source
        
        self._localFactory = localFactory
        self._solrArgs = (args, kwargs)
        self._mapReduceArgs = mapReduceArguments
        self._mapReduceArgs['targetFormat'] = targetFormat
        self._identifierField = identifierField
        
        if 'input' in self._mapReduceArgs:
            del self._mapReduceArgs['input']
        
        self._generateLocalStorage()
        # Verify that the connection arguments are valid by attempting to connect
        self.getSolrConnection()
    
    def __getstate__(self):
        state = super(IncrementalSolrJob,self).__getstate__()
        if '_local' in state:
            del state['_local']
        return state
    
    def __setstate__(self,state):
        super(IncrementalSolrJob,self).__setstate__(state)
        self._generateLocalStorage()
    
    def _generateLocalStorage(self):
        self._local = self._localFactory()
    
    def getSolrConnection(self):
        return SolrConnection(*self._solrArgs[0], **self._solrArgs[1])
    
    def getIdentifierField(self):
        return self._identifierField
    
    def getMapReduceArguments(self):
        return self._mapReduceArgs
    
    def getStorage(self):
        return self._storage
    
    def __call__(self, limit, offset):
        log.debug('%s%s.__call__(limit=%s,offset=%s)' % (local_name(),str(self),str(limit),str(offset)))
        return mapReduceResources(map(IPickleable,
                                      self._storage.getResources(limit=limit,
                                                                 offset=offset,
                                                                 source=self._source,
                                                                 inactive=True)),
                                  self)

class ScheduledSolrJob(ScheduledJob):
    implements(ISolrPushJob)
    
    def __init__(self, storage, schedule, localFactory=getLocalDictionary, 
                 from_date=datetime.now(), mapReduceArguments={},
                 targetFormat='solr', identifierField=u'id', *args, **kwargs):
    
        initKwargs = {}
        
        if 'deferredFactory' in kwargs:
            initKwargs['deferredFactory'] = kwargs.pop('deferredFactory')
                
        self._storage = IStorage(storage)
        
        if not isinstance(from_date,datetime):
            raise ValueError('from_date must be a datetime.datetime object')
        
        self._called = [from_date]
        self._runs = []
        self._active = False
        self._local = localFactory()
        self._solrArgs = (args, kwargs)
        self._mapReduceArgs = mapReduceArguments
        self._identifierField = identifierField
        
        self._mapReduceArgs['targetFormat'] = targetFormat
        
        # Verify that the connection arguments are valid by attempting to connect
        self.getSolrConnection()
        
        super(ScheduledSolrJob, self).__init__(schedule, self, **initKwargs)
    
    def getSolrConnection(self):
        log.debug('%s%s.getSolrConnection()' % (local_name(),str(self),))
        log.debug('%s solr arguments %s' % (local_name(),
                                            str(self._solrArgs),))
        
        return SolrConnection(*self._solrArgs[0], **self._solrArgs[1])
    
    def getIdentifierField(self):
        return self._identifierField
    
    def getMapReduceArguments(self):
        log.debug('%s.getMapReduceArguments: %s' % (str(self),
                                                    str(self._mapReduceArgs)))
        return self._mapReduceArgs
    
    def getStorage(self):
        return self._storage
    
    def _generateDeferred(self, *args, **kwargs):
        log.debug('%s%s._generateDeferred(*%s, **%s)' % (local_name(),str(self),str(args),str(kwargs)))
        if self._active:
            log.warning('%s%s._generateDeferred() job currently active, not restarting' % (local_name(),str(self),))
            return
        try:
            self._active = True
            last = self._called[-1]
            self._called.append(datetime.now())
            if len(self._called) > 256:
                self._called = self._called[-32:]
            
            d = super(ScheduledSolrJob,self)._generateDeferred(from_date=last, *args, **kwargs)
            d.addCallback(self.addStatistics)
            d.addCallback(self._commit)
            d.addCallback(self._deactivate)
            d.addErrback(self._deactivate)
            return d
        except Exception, e:
            log.error(str(e))
            self.stop()
            raise e
    
    def _deactivate(self, results):
        if not self._active:
            log.warning('%s%s deactivating an inactive job' % (local_name(),str(self),))
        else:
            self._active = False
        
        return results
    
    def _commit(self, results):
        try:
            if results[0] or results[1]:
                self.getSolrConnection().commit(wait_flush=False,
                                                wait_searcher=False)
        except Exception, e:
            log.error('Exception while committing: %s' % (str(e),))
        
        return results
    
    def __call__(self, from_date):
        log.debug('%s%s.__call__(from_date=%s)' % (local_name(),str(self),str(from_date)))
        return mapReduceResources(map(IPickleable,
                                      self._storage.getResources(from_date=from_date,
                                                                 inactive=True)),
                                  self)
    
    def addStatistics(self, results):
        log.debug('%s%s.addStatistics()' % (local_name(),str(self),))
        try:
            self._runs.append((datetime.now(),results,))
        except Exception, e:
            log.error('%sError adding statistics: %s' % (local_name(), str(e),))
        
        if len(self._runs) > 256:
            self._runs = self._runs[-32:]
        
        return results
    
    def start(self):
        if not self.isRunning():
            self._active = False
        return super(ScheduledSolrJob,self).start()
    
    def getStatus(self):
        if self.getError():
            return 'Error encountered; %s' % (str(self.getError()),)
        
        if not self.isRunning():
            active = u'stopped'
        elif self._active:
            active = u'active'
        else:
            active = u'waiting'
        
        if len(self._runs):
            last_finish = self._runs[-1][0]
        else:
            last_finish = '...'
        
        return u'Currently %s. %d items added/updated and %d items removed in ' \
               'the last %d runs (latest at %s-%s)' % (
                    active,
                    sum(map(lambda r: r[1][0],self._runs)),
                    sum(map(lambda r: r[1][1],self._runs)),
                    len(self._runs),
                    unicode(self._called[-1]),
                    unicode(last_finish),)
