package com.wurd.wurdAnalysis;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;

public class WurdAnalyzer extends Analyzer {

    public enum type{
        thulac,ik
    }

    private type _type;

    private TokenStreamComponents tokenStreamComponents;

    private boolean ik_useSmart = false;

    private String thulac_user_specified_dict_name = null;
    private Character thulac_separator = '_';

    private boolean thulac_useT2S = false;
    private boolean thulac_seg_only = true;
    private boolean thulac_useFilter = false;
    private String thulac_prefix = null;

    public boolean useSmart() {
        return this.ik_useSmart;
    }

    public void setUseSmart(boolean ik_useSmart) {
        this.ik_useSmart = ik_useSmart;
    }

    public void setThulac(String user_specified_dict_name, Character separator,
                          boolean useT2S, boolean seg_only,
                          boolean useFilter, String prefix) {
        this.thulac_user_specified_dict_name = user_specified_dict_name;
        this.thulac_separator = separator;
        this.thulac_useT2S = useT2S;
        this.thulac_seg_only = seg_only;
        this.thulac_useFilter = useFilter;
        this.thulac_prefix = prefix;
    }

    public void setThulacPrefix(String prefix) {
        this.thulac_prefix = prefix;
    }
    public void setThulacSegOnly(boolean segOnly) {
        this.thulac_seg_only = segOnly;
    }

    public WurdAnalyzer() {

    }

    public WurdAnalyzer(WurdAnalyzer.type type) {
        this._type = type;
    }
    public WurdAnalyzer(TokenStreamComponents tokenStreamComponents) {
        this.tokenStreamComponents = tokenStreamComponents;
    }

    @Override
    protected TokenStreamComponents createComponents(String s) {
        if (_type == type.thulac) {
            final Tokenizer tokenizer = new WurdTokenizer(thulac_user_specified_dict_name,
                    thulac_separator, thulac_useT2S, thulac_seg_only, thulac_useFilter, thulac_prefix);
            TokenStream result = tokenizer;
            return new TokenStreamComponents(tokenizer, result);
        } else if (_type == type.ik) {
            Tokenizer tokenizer = new WurdIkTokenizer(useSmart());
            TokenStream result = tokenizer;
            return new TokenStreamComponents(tokenizer, result);
        }
        if(tokenStreamComponents != null){
            return tokenStreamComponents;
        }
        Tokenizer tokenizer = new WurdIkTokenizer();
        TokenStream result = tokenizer;
        return new TokenStreamComponents(tokenizer, result);
    }
}
