/*
 * Copyright 2008 Christopher Davies
 * 
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 * 
 *     http://www.apache.org/licenses/LICENSE-2.0
 * 
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package com.googlecode.textish.tokenizer;

import java.util.ArrayList;
import java.util.List;

/**
 * A lexical analyzer that takes a {@link CharSequence} and pushes it through a
 * sequence of {@link ITokenizer}s to build a {@link DocumentToken}.
 * 
 * @author cgdavies
 * 
 */
public class TextishLexicalAnalyzer {

	/**
	 * Produces a {@link DocumentToken} from a {@link CharSequence}.
	 * 
	 * @param source the plaintext to create a document token from
	 * @return a {@link DocumentToken} representing the source
	 */
	public DocumentToken analyze(CharSequence source) {
		List input = CharToken.fromCharSequence(source);
		List output = runTokenizerSequence(source, input, new ITokenizer[] { new WordTokenizer(), new LineTokenizer(),
				new ParagraphTokenizer(), new DocumentTokenizer() });

		return (DocumentToken) output.get(0);
	}

	/**
	 * Runs a list of tokenizers on some input. After each tokenizer is run, the
	 * <code>output</code> is sent as the <code>input</code> to the next.
	 * 
	 * @param context the {@link CharSequence} context
	 * @param input the original input of {@link IToken}s
	 * @param tokenizers the list of tokenizers to run through
	 * @return the final output
	 */
	private List runTokenizerSequence(CharSequence context, List input, ITokenizer[] tokenizers) {
		List output = new ArrayList();

		for (int i = 0; i < tokenizers.length; ++i) {
			if (i > 0)
				output = new ArrayList();

			tokenizers[i].tokenize(context, input, output);
			input = output;
		}

		return output;
	}

}
