package inseniarlingua.lang.en;

import static org.junit.Assert.*;
import inseniarlingua.tokenizer.Word;
import inseniarlingua.trans.en_ia.Translator;

import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;

class TokenizerENTest {
	TokenizerEN cut
	static translator

	@BeforeClass
	public static void setUpBeforeClass() throws Exception {
		translator = new Translator();
	}

	@AfterClass
	public static void tearDownAfterClass() throws Exception {
	}

	@Before
	public void setUp() throws Exception {
		cut = new TokenizerEN()
		cut.transMap = translator.transMap
	}

	@After
	public void tearDown() throws Exception {
	}
	
	@Test void testTokenizeWithQuote() {
		def expTokens = [new Word('I'), new Word('did'), new Word("not"), new Word('do'), new Word('it')]
		def text = "I didn't do it"
		println "text=$text"
		cut.text = text
		final gotTokens = cut.tokenize()
		println "gotTokens = $gotTokens"
		assertEquals(expTokens, gotTokens)
	}

	@Test
	public void testTokenize() {
		def expTokensSentences = [
			[new Word('This'), new Word('is'), new Word('Joe'), new Word("'s"), new Word('house')],
			[new Word('mr.')],
			[new Word('seven'), new Word("o'clock")],
			[new Word('living room')],
			[new Word('gets up from')],
		]
		for (expTokens in expTokensSentences) {
			def text = expTokens*.value.join(' ').replaceAll('n \'', 'n\'')
			println "text=$text"
			cut.text = text
			def gotTokens = cut.tokenize()
			println "gotTokens = $gotTokens"
			assertEquals(expTokens, gotTokens)
		}
	}
}
