package inseniarlingua.tokenizer;

import static org.junit.Assert.*;

import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;

class TokenizerBaseTest
{
	TokenizerBase cut

	@BeforeClass
	public static void setUpBeforeClass() throws Exception {
	}

	@AfterClass
	public static void tearDownAfterClass() throws Exception {
	}

	@Before
	public void setUp() throws Exception {
		cut = new TokenizerBase()
	}

	@After
	public void tearDown() throws Exception {
	}

	@Test
	public void testGetToken() {
		def expTokens = [new Word('This'), new Word('is'), new Word('a'), new Word('sentence')]
		def text = expTokens*.value.join(' ')
		println "text=$text"
		cut.text = text
		def gotTokens = cut.tokenize()
		println "gotTokens = $gotTokens"
		assertEquals(expTokens, gotTokens)
	}
	
	@Test
	public void testGetTokenSentences() {
		def expTokensList = [
			[new Word('Hello'), new Word(','), new Word('how'), new Word('are'), new Word('you')],
			[new Word('I'), new Word('didn'), new Word("'"), new Word("t"), new Word('do'), new Word('it')],
			[new Word('Mr'), new Word('.')],
			[new Word('Come'), new Word('!')],
			[new Word('Peter'), new Word(',')]
		]
		for (expTokens in expTokensList) {
			def text = expTokens*.value.join(' ')
			println "text=$text"
			cut.text = text
			def gotTokens = cut.tokenize()
			println "gotTokens = $gotTokens"
			assertEquals(expTokens, gotTokens)
		}
	}

}
