package org.apache.lucene.analysis.ngram;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;

import com.libapml.util.Arrays;

public class NGramTokenFilter extends TokenFilter {
	
	private int ngramSize = 1;

	private List<Token> previousTokens;
	
	public NGramTokenFilter(TokenStream input, int ngramSize) {
		
		super(input);

		this.ngramSize = ngramSize;
		previousTokens = new ArrayList<Token>();
	}

	public final Token next() throws IOException {

		Token ngram = null;
		
		List<Token> ngramTokens = new ArrayList<Token>();
		
		Token token;
		if (previousTokens.size() == 0) {
			int count = 0;
			while ( (count++ < ngramSize) && (token = input.next()) != null)
				ngramTokens.add(token);
		} else {
			if ( (token = input.next()) != null) {
				ngramTokens = new ArrayList<Token>(previousTokens);
				ngramTokens.add(token);
			}
		}
		
		if (ngramTokens.size() > 0) {
			String ngramTerm = "";
			for (Token ngramToken : ngramTokens)
				ngramTerm += new String(Arrays.subarray(
						ngramToken.termBuffer(), ngramToken.termLength())) + " ";
			
			ngram = new Token(
					ngramTerm.trim(), 
					ngramTokens.get(0).startOffset(),
					ngramTokens.get(ngramTokens.size() - 1).endOffset());
			
			ngramTokens.remove(0);
			previousTokens = new ArrayList<Token>(ngramTokens);
		}
		
		return ngram;
	}

}
