import org.codehaus.groovy.grails.commons.ConfigurationHolder

import org.springframework.beans.factory.InitializingBean
import org.springframework.beans.factory.DisposableBean

import org.apache.lucene.analysis.th.ThaiAnalyzer
import org.apache.lucene.document.Document
import org.apache.lucene.document.Field
import org.apache.lucene.index.IndexWriter
import org.apache.lucene.index.IndexReader
import org.apache.lucene.index.Term
import org.apache.lucene.queryParser.QueryParser
import org.apache.lucene.search.HitCollector
import org.apache.lucene.search.IndexSearcher
import org.apache.lucene.search.TermQuery
import org.apache.lucene.search.BooleanQuery
import org.apache.lucene.search.BooleanClause
import org.apache.lucene.search.similar.MoreLikeThis
import org.apache.lucene.search.QueryFilter
import org.apache.lucene.store.FSDirectory

class IndexService implements InitializingBean, DisposableBean {
    
    static transactional = false

    def directory
    def analyzer
    def stopwords

    
    void afterPropertiesSet() {
        def config = ConfigurationHolder.config
        def indexDir = new java.io.File(config.roti.index.directory)
        def noIndex = false
        if (! indexDir.exists()) {
            indexDir.mkdirs()
            noIndex = true
        }
        directory = FSDirectory.getDirectory(indexDir)
        analyzer = new ThaiAnalyzer()
        if (noIndex) {
            // create index structure by dummy document
            def doc = new Document()
            doc.add(new Field("dummy", "", Field.Store.NO, Field.Index.UN_TOKENIZED))
            index(doc)
        }
    }

    /**
     * experimental feature
     * not integrated yet.
     */
    def initStopWords() {
        def sws = getClass().getResourceAsStream("/mltstopwords.txt")
        def input = new java.io.BufferedReader(new InputStreamReader(sws, 'UTF-8'))
        def line
        stopwords = new java.util.HashSet()

        while (line = input.readLine()) {
            stopwords.add(line)
        }

        sws.close()        
    }
    

    void destroy() {
    }

    def removeIndex(entryId) {
        def r = IndexReader.open(directory)
        def flag = true
        try {
            r.deleteDocuments(new Term("id", entryId as String))
        } catch (e) {
            flag = false
            log.error(e, e)
        }
        r.close()
        return flag
    }

    def index(doc) {
        def writer  = new IndexWriter(directory, analyzer)
        try {
            writer.addDocument(doc)
        } catch (e) {
            log.error(e, e)
        }
        writer.close()
    }

    def search(str) {
        def query = createQuerySimilarity('content', str)
        def searcher = new IndexSearcher(directory)
        def hits = searcher.search(query)
        return hits
    }

    def createExcludeIncludeQuery(code) {
        def account = Account.findByCode(code)
        if (account) {
            def query = new BooleanQuery()
            account.searchControlList.each {
                def termQuery = new TermQuery(new Term('resourceId', it.resource.id.toString()))
                def flag = it.isExclude() ? BooleanClause.Occur.MUST_NOT  : BooleanClause.Occur.MUST
                query.add(termQuery, flag)
            }
            return query
        } else {
            log.error("can' find account entity for code ${code}")
        }
        return null;
    }

    def createQuery4Str(field, text, boost = 0) {
        def parser = new QueryParser(field, analyzer)
        def query =  parser.parse(text)
        if (boost > 0) {
            query.setBoost(boost)
        }
        return query
    }

	def createQuerySimilarity(field, text, boost = 0) {
        def tokens = AnalyzerUtils.tokenFromAnalysis(analyzer, text)
        def map = [:]
        tokens.each {
            def word = new String(it.termBuffer(), 0, it.termLength())
            def val = map[word] ?: 0
            map[word] = val + 1
        }
        def query = new BooleanQuery()
        // TODO take only N tokens
        map.entrySet().sort{it.value}.reverse().each {
            def termQuery = new TermQuery(new Term(field, it.key))
            query.add(termQuery, BooleanClause.Occur.SHOULD)
        }
        return query
	}

    def searchFor(content, title, tags, code=null, cnt=5) {
        def reader =  IndexReader.open(directory)
        def searcher = new IndexSearcher(reader)
        def ret = []
        try {
            def query = createSearchForQuery(content, title, tags)
            def filter = null
            if (code) {
                def tmpQuery = createExcludeIncludeQuery(code)
                if (tmpQuery) {
                    query.add(tmpQuery, BooleanClause.Occur.SHOULD)
                }
            }

            log.debug("Query: ${query}")
            log.debug("Filter: ${filter}")

            def topDocs = searcher.search(query, filter, cnt)
            ret = getEntryForTopDocs(reader, topDocs)
            
        } catch (e) {
            log.error(e, e)
        }
        reader.close()
        return ret
    }

    def explainSearchFor(content, title, tags, code=null, cnt=5) {
        def reader =  IndexReader.open(directory)
        def searcher = new IndexSearcher(reader)
        def ret = []
        try {
            def query = createSearchForQuery(content, title, tags)
            def filter = null
            if (code) {
                def tmpQuery = createExcludeIncludeQuery(code)
                if (tmpQuery) {
                    query.add(tmpQuery, BooleanClause.Occur.SHOULD)
                }
            }
            def topDocs = searcher.search(query, filter, cnt)
            
            for (it in topDocs.scoreDocs) {
                def doc = reader.document(it.doc)
                ret.add([searcher.explain(query, it.doc),
                         Entry.get(doc.get('id')),
                         it.doc
                        ])
            }        
        } catch (e) {
            log.error(e, e)
        }
        reader.close()
        return ret        
    }

    def createSearchForQuery(content, title, tags) {
        def query = new BooleanQuery()
        if (content) {
            query.add(createQuerySimilarity('content', content), BooleanClause.Occur.SHOULD)
        }
        if (title) {
            query.add(createQuerySimilarity('title', title, 1.5f), BooleanClause.Occur.SHOULD)
        }
        if (tags) {
            query.add(createQuery4Str('tags', tags.split(',').join(' '), 2), BooleanClause.Occur.SHOULD)
        }
        return query
    }

    def moreLikeThis(entry, code = null, cnt = 5) {
        def reader = IndexReader.open(directory)
        def searcher = new IndexSearcher(reader)
        def docNo = getDocumentNo(searcher, entry.id)
        def ret = []
        try {

            def query = createMoreLikeThisQuery(entry, docNo, reader)
            def filter = null

            if (code) {
                def tmpQuery = createExcludeIncludeQuery(code)
                if (tmpQuery) {
                    query.add(tmpQuery, BooleanClause.Occur.SHOULD)
                }
            }

            log.debug("Query: ${query}")

            def topDocs = searcher.search(query, filter, cnt)
            ret = getEntryForTopDocs(reader, topDocs)

        } catch (e) {
            log.error(e, e)
        }  
        reader.close()
        return ret
    }

    def explainMoreLikeThis(entry, cnt = 5) {
        def reader = IndexReader.open(directory)
        def searcher = new IndexSearcher(reader)
        def docNo = getDocumentNo(searcher, entry.id)
        def ret = []
        try {
            
            def query = createMoreLikeThisQuery(entry, docNo, reader)
        
            def topDocs = searcher.search(query, null, cnt)
            
            for (it in topDocs.scoreDocs) {
                def doc = reader.document(it.doc)
                ret.add([searcher.explain(query, it.doc),
                         Entry.get(doc.get('id')),
                         it.doc
                        ])
            }

        } catch (e) {
            log.error(e, e)
        }  
        reader.close()
        return ret
    }

    def createMoreLikeThisQuery(entry, docNo, reader) {
        def mlt = new MoreLikeThis(reader)
        mlt.setAnalyzer(analyzer)
        mlt.setFieldNames((String[]) ["content", "title", "tag"])
        mlt.setMinWordLen(3)
        mlt.setMinTermFreq(3)
        mlt.setBoost(true)

        def mltQuery = mlt.like(docNo)

        def query = new BooleanQuery()
        query.add(mltQuery, BooleanClause.Occur.MUST)
        query.add(new TermQuery(new Term("id", entry.id.toString())), BooleanClause.Occur.MUST_NOT)
        return query
    }

    def getEntryForTopDocs(reader, topDocs) {
        def ret = []
        
        for (tmp in topDocs.scoreDocs) {
            def doc = reader.document(tmp.doc)
            ret.add(Entry.get(doc.get('id')));
        }

        return ret
    }

    def getDocumentNo(searcher, id) {
        def qry = new TermQuery(new Term("id", id.toString()))
        def hits = searcher.search(qry)
        return hits.id(0)
    }

}
