/*
 *
 *  *  Copyright (C) 2018  Wanghaobin<463540703@qq.com>
 *
 *  *  AG-Enterprise 企业版源码
 *  *  郑重声明:
 *  *  如果你从其他途径获取到，请告知老A传播人，奖励1000。
 *  *  老A将追究授予人和传播人的法律责任!
 *
 *  *  This program is free software; you can redistribute it and/or modify
 *  *  it under the terms of the GNU General Public License as published by
 *  *  the Free Software Foundation; either version 2 of the License, or
 *  *  (at your option) any later version.
 *
 *  *  This program is distributed in the hope that it will be useful,
 *  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
 *  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 *  *  GNU General Public License for more details.
 *
 *  *  You should have received a copy of the GNU General Public License along
 *  *  with this program; if not, write to the Free Software Foundation, Inc.,
 *  *  51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 *
 */

package com.github.wxiaoqi.search.lucene.util;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.Tokenizer;

/**
 * @author dzl
 */
public class IKAnalyzer5x extends Analyzer
{
    
    private boolean useSmart;
    
    public boolean useSmart()
    {
        return useSmart;
    }
    
    public void setUseSmart(boolean useSmart)
    {
        this.useSmart = useSmart;
    }
    
    /**
     * IK分词器Lucene Analyzer接口实现类
     * 
     * 默认细粒度切分算法
     */
    public IKAnalyzer5x()
    {
        this(false);
    }
    
    /**
     * IK分词器Lucene Analyzer接口实现类
     * 
     * @param useSmart 当为true时，分词器进行智能切分
     */
    public IKAnalyzer5x(boolean useSmart)
    {
        super();
        this.useSmart = useSmart;
    }
    
    /**
     * 重写最新版本的createComponents 重载Analyzer接口，构造分词组件
     */
    @Override
    protected TokenStreamComponents createComponents(String fieldName)
    {
        Tokenizer tokenizer = new IKTokenizer5x(this.useSmart());
        return new TokenStreamComponents(tokenizer);
    }
}
