import java.util.*;

/**
 * Implements LexicalAnalyzer interface by Josh Dehlinger (2013)
 * chops up a string of markdown language for a syntax analyzer
 * @author Colin Murray
 *
 */
public class MyLexicalAnalyzer implements LexicalAnalyzer{

    /** The index of the start of the current token. */
    private int pos = 0;
    
    /** The current possible token. */
	private String tok = "not_a_token";
	
	/** the input .mkd file as a string */
	private String inStr;
	
	/** the length of inStr */
	private int flen;
	
	/** list of legal lexemes */
	private ArrayList<String> validTokens; 
	
	/**
	 * constructor for MyLexicalAnalyzer object
	 * @param a the input .mkd file in a string
	 * @param b the list of legal lexemes
	 */
	public MyLexicalAnalyzer(String a, ArrayList<String> b) {	
		inStr = a;
		flen = a.length();
		validTokens = b;
	}
	
	public void getNextToken() throws CompilerException {
		int i = 0;
		tok = "";
		if ( pos < flen) {
			if( inStr.charAt( pos ) == '#' ) {
				while ( (pos + i < flen ) && ( !lookupToken() ) ) {
					tok = tok + inStr.charAt( pos + i );
					i++;
				}
				pos = pos + i;
				if( ( pos == flen ) && ( !lookupToken() ) ) {
					throw new CompilerException( "Lexical error! unknown lexeme" );
				}
			}
			else {
				while ( (pos + i < flen ) && ( inStr.charAt( pos + i ) != '#' ) ) {
					tok = tok + inStr.charAt( pos + i );
					i++;
				}
				pos = pos + i;
			}
		}
		else {
			throw new CompilerException( "Next token requested after end of file reached" );
		}
	}
		
	public boolean isSpace(String str) {
		return str.trim().isEmpty();
	}
	
	public boolean lookupToken() {
		return validTokens.contains( tok.toUpperCase() );
	}
	
	/**
	 * accessor for the current token
	 * @return tok the current token as a string
	 */
	public String getTok() {
		return tok;
	}
	
	/**
	 * accessor for the current position of the lexer
	 * @return pos the current position as int
	 */
	public int getPos() {
		return pos;
	}
}