package org.elasticsearch.plugin.analysis.word;

import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.index.analysis.AnalyzerProvider;
import org.elasticsearch.index.analysis.ChineseWordAnalyzerProvider;
import org.elasticsearch.index.analysis.ChineseWordTokenizerFactory;
import org.elasticsearch.index.analysis.TokenizerFactory;
import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.plugins.AnalysisPlugin;
import org.elasticsearch.plugins.Plugin;

import java.util.HashMap;
import java.util.Map;

/**
 * Created with IntelliJ IDEA.
 * User: Kenn
 * Date: 2017/9/1
 * Time: 下午2:30
 * Description:
 */
public class AnalysisWordPlugin extends Plugin implements AnalysisPlugin {

    public static String PLUGIN_NAME = "analysis-word";

    @Override
    public Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> getTokenizers() {
        Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> extra = new HashMap<>();
        // 默认使用双向最大匹配算法
        extra.put("word", ChineseWordTokenizerFactory::getBidirectionalMaximumMatchingTokenizerFactory);
        // 正向最大匹配算法
        extra.put("word_maximum_matching", ChineseWordTokenizerFactory::getMaximumMatchingTokenizerFactory);
        // 正向最小匹配算法
        extra.put("word_minimum_matching", ChineseWordTokenizerFactory::getMinimumMatchingTokenizerFactory);
        // 逆向最大匹配算法
        extra.put("word_reverse_maximum_matching", ChineseWordTokenizerFactory::getReverseMaximumMatchingTokenizerFactory);
        // 逆向最小匹配算法
        extra.put("word_reverse_minimum_matching", ChineseWordTokenizerFactory::getReverseMinimumMatchingTokenizerFactory);
        // 双向最大匹配算法
        extra.put("word_bidirectional_maximum_matching", ChineseWordTokenizerFactory::getBidirectionalMaximumMatchingTokenizerFactory);
        // 双向最小匹配算法
        extra.put("word_bidirectional_minimum_matching", ChineseWordTokenizerFactory::getBidirectionalMinimumMatchingTokenizerFactory);
        // 双向最大最小匹配算法
        extra.put("word_bidirectional_maximum_minimum_matching", ChineseWordTokenizerFactory::getBidirectionalMaximumMinimumMatchingTokenizerFactory);
        // 全切分算法
        extra.put("word_full_segmentation", ChineseWordTokenizerFactory::getFullSegmentationTokenizerFactory);
        // 最少词数算法
        extra.put("word_minimal_word_count", ChineseWordTokenizerFactory::getMinimalWordCountTokenizerFactory);
        // 最大Ngram分值算法
        extra.put("word_max_Ngram_score", ChineseWordTokenizerFactory::getMaxNgramScoreTokenizerFactory);

        return extra;
    }

    @Override
    public Map<String, AnalysisModule.AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> getAnalyzers() {
        Map<String, AnalysisModule.AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> extra = new HashMap<>();

        // 默认使用双向最大匹配算法
        extra.put("word", ChineseWordAnalyzerProvider::getBidirectionalMaximumMatchingAnalyzerProvider);
        // 正向最大匹配算法
        extra.put("word_maximum_matching", ChineseWordAnalyzerProvider::getMaximumMatchingAnalyzerProvider);
        // 正向最小匹配算法
        extra.put("word_minimum_matching", ChineseWordAnalyzerProvider::getMinimumMatchingAnalyzerProvider);
        // 逆向最大匹配算法
        extra.put("word_reverse_maximum_matching", ChineseWordAnalyzerProvider::getReverseMaximumMatchingAnalyzerProvider);
        // 逆向最小匹配算法
        extra.put("word_reverse_minimum_matching", ChineseWordAnalyzerProvider::getReverseMinimumMatchingAnalyzerProvider);
        // 双向最大匹配算法
        extra.put("word_bidirectional_maximum_matching", ChineseWordAnalyzerProvider::getBidirectionalMaximumMatchingAnalyzerProvider);
        // 双向最小匹配算法
        extra.put("word_bidirectional_minimum_matching", ChineseWordAnalyzerProvider::getBidirectionalMinimumMatchingAnalyzerProvider);
        // 双向最大最小匹配算法
        extra.put("word_bidirectional_maximum_minimum_matching", ChineseWordAnalyzerProvider::getBidirectionalMaximumMinimumMatchingAnalyzerProvider);
        // 全切分算法
        extra.put("word_full_segmentation", ChineseWordAnalyzerProvider::getFullSegmentationAnalyzerProvider);
        // 最少词数算法
        extra.put("word_minimal_word_count", ChineseWordAnalyzerProvider::getMinimalWordCountAnalyzerProvider);
        // 最大Ngram分值算法
        extra.put("word_max_Ngram_score", ChineseWordAnalyzerProvider::getMaxNgramScoreAnalyzerProvider);

        return extra;
    }

}
