

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;

import IC.Parser.Lexer;
import IC.Parser.LexicalError;
import IC.Parser.Token;
import IC.Parser.sym;

public class LexerTester {
	public static void main(String[] args) throws IOException {
		if (args.length != 2){
			System.err.println("Invalid number of arguments");
			return;
		}
		/* initialize bufferReaders for both input files */
		FileReader txtFile = new FileReader(args[0]);
		FileReader tokensFile = new FileReader(args[1]);
		BufferedReader in = new BufferedReader(tokensFile);

		/* connects the Lexer to the input file */
		Lexer scanner = new Lexer(txtFile);

		String currentExpectedToken;
		Token currToken = null;
		do{
			currentExpectedToken = in.readLine();  // expected token
			try{
				currToken = scanner.next_token();  // actual token generated
				if (currToken.sym == sym.EOF){
					System.out.println("Test passed, yay!");
					return;
				}
				if (!currToken.toString().equals(currentExpectedToken)){  // compares between the actual and expected on-the-fly
					System.err.println("Test failed: expected token: [" + currentExpectedToken
							+ "], current token: [" + currToken + "]");
					return;
				}	
			}
			catch(LexicalError e){  //in case the Lexer throws a Lexical error during the process
				if (!currentExpectedToken.toLowerCase().contains("lexical error")){
					System.err.println("Test failed: expected token: [" + currentExpectedToken
							+ "], current token: [" + e + "]");
				}
			}
		}
		while (currentExpectedToken != null && currToken != null);
	}

}
