package org.nlp.algo.classifier;

import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.Serializable;

import org.apache.lucene.util.Version;
import org.nlp.lucene.BloomAnalyzer;

import com.aliasi.tokenizer.Tokenizer;
import com.aliasi.tokenizer.TokenizerFactory;
import com.aliasi.util.AbstractExternalizable;

/**
 * @author longkeyy
 *
 */
public class LuceneTokenizerFactory implements TokenizerFactory, Serializable {

	private static final long serialVersionUID = 5819149904918759541L;

	public static final LuceneTokenizerFactory INSTANCE = new LuceneTokenizerFactory();

	static final LuceneTokenizerFactory FACTORY = INSTANCE;
	
	static final BloomAnalyzer anaylzer = new BloomAnalyzer(Version.LUCENE_40);//要用不同的分词器修改这行即可
	
	@Override
	public Tokenizer tokenizer(char[] ch, int start, int length) {					
		return new LuceneTokenizer(ch, anaylzer);		
	}

	Object writeReplace() {
		return new Externalizer();
	}

	/**
	 * Returns the name of this class.
	 * 
	 * @return The name of this class.
	 */
	@Override
	public String toString() {
		return getClass().getName();
	}

	private static class Externalizer extends AbstractExternalizable {
		static final long serialVersionUID = 3826670589236636230L;

		public Externalizer() {
			/* do nothing */
		}

		@Override
		public void writeExternal(ObjectOutput objOut) {
			/* do nothing */
		}

		@Override
		public Object read(ObjectInput objIn) {
			return INSTANCE;
		}
	}

}
