package com.ve.searcher.lucutil

import org.apache.lucene.analysis.*
import org.apache.lucene.analysis.core.StopAnalyzer
import org.apache.lucene.analysis.core.WhitespaceAnalyzer
import org.apache.lucene.analysis.en.PorterStemFilter
import org.apache.lucene.analysis.miscellaneous.PerFieldAnalyzerWrapper
import org.apache.lucene.analysis.standard.StandardAnalyzer
import org.apache.lucene.analysis.standard.StandardFilter
import org.apache.lucene.analysis.standard.StandardTokenizer
import org.apache.lucene.analysis.synonym.SynonymGraphFilter
import org.apache.lucene.analysis.synonym.SynonymMap
import org.apache.lucene.analysis.synonym.WordnetSynonymParser

import java.io.FileReader
import java.io.Reader
import java.util.HashMap

object MyAnalyzer {

    fun wrapperAnalyzer(): PerFieldAnalyzerWrapper {

        val analyzerMap = HashMap<String, Analyzer>()
        analyzerMap["body"] = MyAnalyzer.stemAnalyzer()
        //        analyzerMap.put("tags",
        //                new Analyzer() {
        //                    @Override
        //                    protected TokenStreamComponents createComponents(String fieldName) {
        //
        //                        Tokenizer source = new WhitespaceTokenizer();
        //                        TokenStream result = new LowerCaseFilter(source);
        //
        //                        return new TokenStreamComponents(source, result);
        //                    }
        //                });

        //        analyzerMap.put("groupCodes",
        //                new Analyzer() {
        //                    @Override
        //                    protected TokenStreamComponents createComponents(String fieldName) {
        //
        //                        Tokenizer source = new WhitespaceTokenizer();
        //                        TokenStream result = new LowerCaseFilter(source);
        //
        //                        return new TokenStreamComponents(source, result);
        //                    }
        //                });

        val wrapper = PerFieldAnalyzerWrapper(
                WhitespaceAnalyzer(),
                analyzerMap
        )

        return wrapper
    }

    fun testAnalyzer(): Analyzer {

        val analyzer = object : Analyzer() {
            override fun createComponents(fieldName: String): Analyzer.TokenStreamComponents {

                val source = StandardTokenizer()
                var result: TokenStream = StandardFilter(source)

                result = LowerCaseFilter(result)

                try {
                    val parser = WordnetSynonymParser(true, false, StandardAnalyzer(CharArraySet.EMPTY_SET))
                    val reader = FileReader("wn/wn_s.pl")
                    parser.parse(reader)
                    val synonymMap = parser.build()

                    result = SynonymGraphFilter(source, synonymMap, true)
                } catch (e: Exception) {
                    println(e)
                }

                result = PorterStemFilter(result)
                result = StopFilter(result, StopAnalyzer.ENGLISH_STOP_WORDS_SET)

                return Analyzer.TokenStreamComponents(source, result)
            }
        }

        return analyzer
    }

    fun stemAnalyzer(): Analyzer {
        val analyzer = object : Analyzer() {
            override fun createComponents(fieldName: String): Analyzer.TokenStreamComponents {

                val source = StandardTokenizer()
                var result: TokenStream = StandardFilter(source)

                result = LowerCaseFilter(result)

                result = PorterStemFilter(result)
                result = StopFilter(result, StopAnalyzer.ENGLISH_STOP_WORDS_SET)

                return Analyzer.TokenStreamComponents(source, result)
            }
        }

        return analyzer
    }
}
