package com.wujunshen.plugin;

import com.wujunshen.enumation.SegmentationType;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.index.analysis.AnalyzerProvider;
import org.elasticsearch.index.analysis.TokenizerFactory;
import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.plugins.AnalysisPlugin;
import org.elasticsearch.plugins.Plugin;

/**
 * @author frank woo(吴峻申) <br>
 * email:<a href="mailto:frank_wjs@hotmail.com">frank_wjs@hotmail.com</a> <br>
 * @date 2022/8/24 09:41<br>
 */
public class AsyncAnalysisPlugin extends Plugin implements AnalysisPlugin {
    public static final String PLUGIN_NAME = "hanlp-analyzer";

    @Override
    public Map<String, AnalysisModule.AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> getAnalyzers() {
        Map<String, AnalysisModule.AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> result =
                new HashMap<>(SegmentationType.values().length);

        Arrays.stream(SegmentationType.values())
                .forEach(element -> result.put(
                        element.getTitle(),
                        (indexSettings, environment, name, settings) ->
                                new MyAnalyzerProvider(environment, name, settings, element)));

        return result;
    }

    @Override
    public Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> getTokenizers() {
        Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> result =
                new HashMap<>(SegmentationType.values().length);

        Arrays.stream(SegmentationType.values())
                .forEach(element -> result.put(
                        element.getTitle(),
                        (indexSettings, environment, s, settings) ->
                                new MyTokenizerFactory(indexSettings, environment, s, settings, element)));

        return result;
    }
}
