package com.fengcone.lucene.analyzer;

import java.io.IOException;
import java.io.InputStream;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenFilter;

public class JiebaAnalyzer extends Analyzer {

	private InputStream userDictIn;

	public JiebaAnalyzer() {
	}

	public JiebaAnalyzer(InputStream userDictIn) {
		setUserDictIn(userDictIn);
	}

	@SuppressWarnings("resource")
	@Override
	protected TokenStreamComponents createComponents(String fieldName) {
		JiebaTokenizer tokenizer = new JiebaTokenizer();
		if (userDictIn != null) {
			try {
				tokenizer.loadUserDict(userDictIn);
			} catch (IOException e) {
				throw new RuntimeException("load user dict error");
			}
		}
		TokenFilter stopFilter = new JiebaStopTokenFilter(tokenizer);
		return new TokenStreamComponents(tokenizer, stopFilter);
	}

	public void setUserDictIn(InputStream userDictIn) {
		if (userDictIn == null) {
			throw new IllegalArgumentException("userDictIn is null");
		}
		this.userDictIn = userDictIn;
	}

}
