package core.testes.lexical;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;

import java.io.EOFException;
import java.io.FileReader;
import java.io.IOException;
import java.util.Scanner;

import org.junit.Test;

import core.lexical.LexicalTokenizer;

public class LexicalTokenizerTest {

	@Test
	public void testNextTokenExample1() {
		try {
			LexicalTokenizer tokenizer = new LexicalTokenizer( new FileReader( "examples/example1.ford" ) );
			Scanner outputFile = new Scanner ( new FileReader( "examples/output1" ) );
			while ( outputFile.hasNext() ) {
				assertEquals( outputFile.nextLine(), tokenizer.nextToken().getToken() );
			}
		} catch ( Exception e ) {
			fail( "Should never launch" );
		}
	}

	@Test
	public void testNextTokenExample2() {
		try {
			LexicalTokenizer tokenizer = new LexicalTokenizer( new FileReader( "examples/example2.ford" ) );
			Scanner outputFile = new Scanner ( new FileReader( "examples/output2" ) );
			while ( outputFile.hasNext() ) {
				assertEquals( outputFile.nextLine(), tokenizer.nextToken().getToken() );
			}
		} catch ( Exception e ) {
			fail( "Should never launch" );
		}
	}

	@Test
	public void testNextTokenExample3() {
		try {
			LexicalTokenizer tokenizer = new LexicalTokenizer( new FileReader( "examples/example3.ford" ) );
			Scanner outputFile = new Scanner ( new FileReader( "examples/output3" ) );
			while ( outputFile.hasNext() ) {
				assertEquals( outputFile.nextLine(), tokenizer.nextToken().getToken() );
			}
		} catch ( Exception e ) {
			e.printStackTrace();
			fail( "Should never launch" );
		}
	}
	
	@Test(expected=EOFException.class)
	public void testEndOfFileExceptionLaunch() throws EOFException, IOException {
		LexicalTokenizer tokenizer = new LexicalTokenizer( new FileReader( "examples/example3.ford" ) );
		while ( true ) {
			assertNotNull( tokenizer.nextToken().getToken() );
		}
	}
	

}
