#!/usr/bin/env python
import os, threading, sys, time
from lucene import SimpleAnalyzer, StandardAnalyzer, WhitespaceAnalyzer, \
                   KeywordAnalyzer, StopAnalyzer
from lucene import StringReader
from lucene import QueryParser, IndexSearcher, IndexReader, FSDirectory
from lucene import IndexWriter, Field, Document, RAMDirectory

class Ticker(object):
    """From py-lucene examples"""

    def __init__(self):
        self.tick = True

    def run(self):
        while self.tick:
            sys.stdout.write('.')
            sys.stdout.flush()
            time.sleep(0.5)

class Pluke:
    """$Id$"""

    VERSION = 0.3

    def __init__(self, indexDir=None):
        self._searcher = None
        self._reader = None
        self._writer = None
        self._directory = None
        self._analyzers = {}
        self._defaultAnalyzerName = 'Simple'
        self._fields = []
        self._opened = False
        self._indexPath = None
        self.initialAnalyzers()
        if indexDir:
            self.openIndexDir(indexDir)

    def __del__(self):
        self.finishIndexDir()

    def openIndexDir(self, indexDir):
        """Open a lucene index directory."""
        if self._opened:
            self.finishIndexDir()
        indexPath = os.path.abspath(indexDir)
        try:
            directory = FSDirectory.getDirectory(indexPath, False)
        except Exception:
            print "openIndexDir: %s Not found." % indexPath
            return
        try:
            self._searcher = IndexSearcher(directory)
        except Exception:
            print "openIndexDir: '%s' is not a valid lucene index directory." % indexPath
            return
        self._directory = directory
        self._indexPath = indexPath
        self._reader = IndexReader.open(self._directory)
        self._fields = self.getFieldNames()
        self._opened = True
        return True

    def createIndexDir(self, indexDir, sfx=".txt", analyzerName=None):
        """Create a new lucene index from the directory."""
        if self._opened:
            self.finishIndexDir()
        directory = os.path.abspath(indexDir)
        if not os.path.exists(directory):
            print "createIndexDir: %s Not found." % directory
            return
        try:
            indexPath = directory + ".index"
            if not os.path.exists(indexPath):
                os.mkdir(indexPath)
            indexPath = FSDirectory.getDirectory(indexPath, False)
        except Exception:
            print "createIndexDir: %s Not found." % indexPath
            return
        self._directory = directory
        self._indexPath = indexPath
        self._writer = IndexWriter(self._indexPath, self.getAnalyzer(analyzerName))
        self._indexDocs(sfx)
        ticker = Ticker()
        print 'optimizing index',
        threading.Thread(target=ticker.run).start()
        self._writer.optimize()
        self._writer.close()
        ticker.tick = False
        return True

    def _indexDocs(self, sfx=".txt"):
        writer = self._writer
        for root, dirnames, filenames in os.walk(self._directory):
            for filename in filenames:
                if not filename.endswith(sfx):
                    continue
                print "adding", filename
                try:
                    path = os.path.join(root, filename)
                    file = open(path)
                    contents = unicode(file.read(), 'utf-8')
                    file.close()
                    doc = Document()
                    doc.add(Field("name", filename,
                                         Field.Store.YES,
                                         Field.Index.UN_TOKENIZED))
                    doc.add(Field("path", path,
                                         Field.Store.YES,
                                         Field.Index.UN_TOKENIZED))
                    if len(contents) > 0:
                        doc.add(Field("contents", contents,
                                             Field.Store.NO,
                                             Field.Index.TOKENIZED))
                    else:
                        print "warning: no content in %s" % filename
                    writer.addDocument(doc)
                except Exception, e:
                    print "Failed in _indexDocs:", e

    def connected(self):
        return self._opened

    def finishIndexDir(self):
        """Close a lucene index."""
        if self._searcher is not None:
            self._searcher.close()
            self._searcher = None
        if self._reader is not None:
            self._reader.close()
            self._reader = None
        self._directory = None
        self._fields = []
        self._indexPath = None
        self._opened = False

    def getDirectory(self):
        return self._directory

    def getVersion(self):
        return self._reader.getVersion()

    def isLocked(self):
        return self._reader.isLocked(self._directory)

    def hasDeletions(self):
        return self._reader.hasDeletions()

    def numDocs(self):
        return self._reader.numDocs()

    def maxDoc(self):
        return self._reader.maxDoc()

    def getFieldNames(self):
        fldOption = IndexReader.FieldOption.ALL
        return self._reader.getFieldNames(fldOption)

    def getFields(self, docNum=None):
        if docNum is None:
            docNum = self._reader.maxDoc() - 1
        doc = self._reader.document(docNum)
        return doc.fields()

    def getDoc(self, docNum=None):
        if docNum is None:
            docNum = self._reader.maxDoc() - 1
        return self._reader.document(docNum)

    def getFieldInfos(self, docNum=None):
        fields = []
        doc = self.getDoc(docNum)
        for name in self._fields:
            mfields= doc.getFields(name)
            if not mfields:
                fields.append((name, False, False, False, False, 'N/A'))
                continue
            for field in mfields:
                fields.append((field.name(),
                               field.isIndexed(),
                               field.isTokenized(), field.isBinary(),
                               field.isCompressed(), field.stringValue()))

        return fields

    def search(self, query, fieldName="index", analyzerName=None):
        """Do the lucene search."""
        analyzer = self.getAnalyzer(analyzerName)
        try:
            query = QueryParser(fieldName, analyzer).parse(query)
            return self._searcher.search(query)
        except Exception, e:
            print "search: QueryParser.parse fail", e
        return None

    def initialAnalyzers(self):
        """Initial analyzers"""
        self._analyzers['Keyword'] = KeywordAnalyzer()
        self._analyzers['Simple'] = SimpleAnalyzer()
        self._analyzers['Standard'] = StandardAnalyzer()
        self._analyzers['Stop'] = StopAnalyzer()
        self._analyzers['Whitespace'] = WhitespaceAnalyzer()

    def getAnalyzer(self, analyzerName=None):
        """Return an analyzer by name or default analyzer"""
        try:
            if analyzerName is None:
                analyzerName = self._defaultAnalyzerName
            return self._analyzers.get(analyzerName)
        except Exception, e:
            print "getAnalyzer: ",e

    def getAvailableAnalyzerNames(self):
        return self._analyzers.keys()

    def getTokensByAnalyzer(self, text, fieldName, analyzerName=None):
        """Print analyzed tokens."""
        analyzer = self.getAnalyzer(analyzerName)
        tokens = []
        if analyzerName is None:
            analyzerName = self._defaultAnalyzerName,
        try:
            tokens = [token.termText() for token in analyzer.tokenStream(
                fieldName, StringReader(text))]
            print "  %s analyzer tokens: %s" % ( analyzerName, ", ".join(tokens) )
        except Exception, e:
            print "getTokensByAnalyzer(analyzer:%s): %s" % (analyzerName, e)
        return tokens
