import java.io.*;
import java.util.*;

import junit.framework.TestCase;

public class LexiconTrieTest extends TestCase {

  /**
   * Your test dictionary, containing words in smalltestwords.txt
   */
  private Scanner smallWords;
  private Scanner regularWords;
  private Scanner orderedWords;
  private Scanner emptyWords;

  /**
   * Open various text files and give to the appropriate instance variable. This is
   * automatically run before each test.
   */
  public void setUp() {
    try {
      smallWords = new Scanner(new File("smalltestwords.txt"));
    } catch (FileNotFoundException e) {
      System.out.println(e);
      assertTrue(false);
    }
    
    try {
        regularWords = new Scanner(new File("bogwords.txt"));
      } catch (FileNotFoundException e) {
        System.out.println(e);
        assertTrue(false);
      }
    
    try {
        orderedWords = new Scanner(new File("ospd3.txt"));
      } catch (FileNotFoundException e) {
        System.out.println(e);
        assertTrue(false);
      }      

   try {
       emptyWords = new Scanner(new File("empty.txt"));
     } catch (FileNotFoundException e) {
       System.out.println(e);
       assertTrue(false);
     }       
  }

  /**
   * Closes smalltestwords.txt, bogwords.txt, ospd3.txt, and empty.txt after each test.
   */
  public void tearDown() {
   smallWords.close();
   regularWords.close();
   orderedWords.close();
   emptyWords.close();
  }
  
  /**
   * Tests for all innner private methods in the TrieLexicon and inner class implementation.
   */
  public void testHelpers() {
	  assertTrue(LexiconTrie.privateMethodsWork());
  }

  /**
   * A basic test for the lexicon. You should not assume that passing this test
   * means that your code works.
   */
  public void testLexicon() {
    LexiconTrie l = new LexiconTrie();
    
    assertFalse(l.contains("a"));
    assertFalse(l.contains("big"));
    assertFalse(l.contains("cat"));
    assertFalse(l.contains("dated"));
    assertFalse(l.contains("every"));
    assertFalse(l.contains("fat"));
    assertFalse(l.contains("gore"));
    assertFalse(l.contains("gorilla"));
    assertFalse(l.contains("duck"));
    
    l.load(smallWords);
    
    assertTrue(l.contains("a"));
    assertTrue(l.contains("big"));
    assertTrue(l.contains("cat"));
    assertTrue(l.contains("dated"));
    assertTrue(l.contains("every"));
    assertTrue(l.contains("fat"));
    assertTrue(l.contains("gore"));
    assertTrue(l.contains("gorilla"));
    assertTrue(l.contains("duck"));
    
    assertTrue(l.containsPrefix("a"));
    assertTrue(l.containsPrefix("big"));
    assertTrue(l.containsPrefix("cat"));
    assertTrue(l.containsPrefix("dated"));
    assertTrue(l.containsPrefix("every"));
    assertTrue(l.containsPrefix("fat"));
    assertTrue(l.containsPrefix("gore"));
    assertTrue(l.containsPrefix("gorilla"));
    assertTrue(l.containsPrefix("duck"));
    assertTrue(l.containsPrefix("bi"));
    assertTrue(l.containsPrefix("ca"));
    assertTrue(l.containsPrefix("dat"));
    assertTrue(l.containsPrefix("ev"));
    assertTrue(l.containsPrefix("fa"));
    assertTrue(l.containsPrefix("gor"));
    assertTrue(l.containsPrefix("goril"));
    assertTrue(l.containsPrefix("d"));
    
    assertFalse(l.containsPrefix("bg"));
    assertFalse(l.containsPrefix("ct"));
    assertFalse(l.containsPrefix("dted"));
    assertFalse(l.containsPrefix("evry"));
    assertFalse(l.containsPrefix("ft"));
    assertFalse(l.containsPrefix("gr"));
    assertFalse(l.containsPrefix("grilla"));
    assertFalse(l.containsPrefix("dck"));
    assertFalse(l.containsPrefix("birty"));
    assertFalse(l.containsPrefix("cq"));
    assertFalse(l.containsPrefix("dt"));
    assertFalse(l.containsPrefix("grl"));
    
    assertFalse(l.contains("aaa"));
    assertFalse(l.contains("axay"));
    assertFalse(l.contains("dade"));
    assertFalse(l.contains("eses"));
    assertFalse(l.contains("innn"));
    assertFalse(l.contains("lole"));
    assertFalse(l.contains("penning"));
    assertFalse(l.contains("dor"));
    assertFalse(l.contains("onna"));
    assertFalse(l.contains("uule"));
    assertFalse(l.contains("xxxxx"));
    assertFalse(l.contains("armin"));
    assertFalse(l.contains("abig"));
    assertFalse(l.contains("gorill"));
    assertFalse(l.contains("catcat"));
    assertFalse(l.contains("evry"));
    assertFalse(l.contains("abigcat"));
    assertFalse(l.contains("gated"));
    assertFalse(l.contains("at"));
    assertFalse(l.contains("xgorex"));
    assertFalse(l.contains(""));
    
  }
  
  /**
   * Tests the TrieLexicon implementation's ability to handle loading and searching a normal sized text file.
   */
  public void testRegualrWordsLexicon() {
	  LexiconTrie l = new LexiconTrie();
	  
	  assertFalse(l.contains("aback"));
	  assertFalse(l.contains("abacus"));
	  assertFalse(l.contains("able"));
	  assertFalse(l.contains("added"));
	  assertFalse(l.contains("add"));
	  assertFalse(l.contains("circumlocution"));
	  assertFalse(l.contains("flaw"));
	  assertFalse(l.contains("flax"));
	  assertFalse(l.contains("flexible"));
	  assertFalse(l.contains("zygote"));
	  assertFalse(l.contains("zombie"));
	  assertFalse(l.contains("zig"));
	  assertFalse(l.contains("zigging"));
	  assertFalse(l.contains("zigzagging"));
	  assertFalse(l.contains("zero"));
	  assertFalse(l.contains("zeroes"));
	  assertFalse(l.contains("vaccinate"));
	  assertFalse(l.contains("vacillate"));
	  
	  l.load(regularWords);
	  
	  assertTrue(l.contains("aback"));
	  assertTrue(l.contains("abacus"));
	  assertTrue(l.contains("able"));
	  assertTrue(l.contains("added"));
	  assertTrue(l.contains("add"));
	  assertTrue(l.contains("circumlocution"));
	  assertTrue(l.contains("flaw"));
      assertTrue(l.contains("flax"));
      assertTrue(l.contains("flexible"));
      assertTrue(l.contains("zygote"));
	  assertTrue(l.contains("zombie"));
	  assertTrue(l.contains("zig"));
	  assertTrue(l.contains("zigging"));
	  assertTrue(l.contains("zigzagging"));
	  assertTrue(l.contains("zero"));
	  assertTrue(l.contains("zeroes"));
      assertTrue(l.contains("vaccinate"));
      assertTrue(l.contains("vacillate"));
      
      assertTrue(l.containsPrefix("aba"));
	  assertTrue(l.containsPrefix("aba"));
	  assertTrue(l.containsPrefix("able"));
	  assertTrue(l.containsPrefix("adde"));
	  assertTrue(l.containsPrefix("ad"));
	  assertTrue(l.containsPrefix("circuml"));
	  assertTrue(l.containsPrefix("f"));
      assertTrue(l.containsPrefix("fla"));
      assertTrue(l.containsPrefix("flexibl"));
      assertTrue(l.containsPrefix("zygote"));
	  assertTrue(l.containsPrefix("zombi"));
	  assertTrue(l.containsPrefix("zig"));
	  assertTrue(l.containsPrefix("zi"));
	  assertTrue(l.containsPrefix("zigzaggin"));
	  assertTrue(l.containsPrefix("zer"));
	  assertTrue(l.containsPrefix("zeroes"));
      assertTrue(l.containsPrefix("vaccina"));
      assertTrue(l.containsPrefix("v"));
      
      assertFalse(l.contains("abck"));
      assertFalse(l.contains("abcus"));
      assertFalse(l.contains("blex"));
      assertFalse(l.contains("addd"));
      assertFalse(l.contains("qw"));
      assertFalse(l.contains("cirumlocution"));
      assertFalse(l.contains("flw"));
      assertFalse(l.contains("flx"));
      assertFalse(l.contains("flxible"));
      assertFalse(l.contains("zygte"));
      assertFalse(l.contains("zombe"));
      assertFalse(l.contains("zg"));
      assertFalse(l.contains("zgging"));
      assertFalse(l.contains("zgzagging"));
      assertFalse(l.contains("zro"));
      assertFalse(l.contains("zroes"));
      assertFalse(l.contains("vccinate"));
      assertFalse(l.contains("vaclla"));
      
      assertFalse(l.contains("ck"));
      assertFalse(l.contains("abcus"));
      assertFalse(l.contains("ae"));
      assertFalse(l.contains("ad"));
      assertFalse(l.contains("crcumlocution"));
      assertFalse(l.contains("flw"));
      assertFalse(l.contains("flx"));
      assertFalse(l.contains("flxible"));
      assertFalse(l.contains("zygte"));
      assertFalse(l.contains("zmbe"));
      assertFalse(l.contains("zg"));
      assertFalse(l.contains("zgging"));
	  assertFalse(l.contains("zigzgging"));
	  assertFalse(l.contains("zer"));
	  assertFalse(l.contains("zrs"));
	  assertFalse(l.contains("vccinate"));
	  assertFalse(l.contains("vcillate"));
      
      assertFalse(l.contains("aaa"));
      assertFalse(l.contains("axay"));
      assertFalse(l.contains("dade"));
      assertFalse(l.contains("innn"));
      assertFalse(l.contains("lole"));
      assertFalse(l.contains("onna"));
      assertFalse(l.contains("uule"));
      assertFalse(l.contains("xxxxx"));
      assertFalse(l.contains("a"));
      assertFalse(l.contains("e"));
      assertFalse(l.contains("i"));
      assertFalse(l.contains("o"));
      assertFalse(l.contains("u"));
      assertFalse(l.contains("armin"));
      assertFalse(l.contains("abig"));
      assertFalse(l.contains("gorill"));
      assertFalse(l.contains("catcat"));
      assertFalse(l.contains("evry"));
      assertFalse(l.contains("abigcat"));
      assertFalse(l.contains("xgorex"));
      assertFalse(l.contains(""));	  
  }
  
  /**
   * Tests the TrieLexicon implementation's ability to handle loading and searching an ordered and large sized text file.
   */
  public void testOrderedWordsLexicon() {
	  LexiconTrie l = new LexiconTrie();
	  
	  assertFalse(l.contains("aa"));
	  assertFalse(l.contains("ab"));
	  assertFalse(l.contains("ad"));
	  assertFalse(l.contains("ax"));
	  assertFalse(l.contains("eh"));
	  assertFalse(l.contains("if"));
	  assertFalse(l.contains("ka"));
	  assertFalse(l.contains("no"));
	  assertFalse(l.contains("nu"));
	  assertFalse(l.contains("xi"));
	  assertFalse(l.contains("xu"));
	  assertFalse(l.contains("ya"));
	  assertFalse(l.contains("ye"));
	  assertFalse(l.contains("yo"));
	  assertFalse(l.contains("aah"));
	  assertFalse(l.contains("axe"));
	  assertFalse(l.contains("ash"));
	  assertFalse(l.contains("arc"));
	  assertFalse(l.contains("fad"));
	  assertFalse(l.contains("fen"));
	  assertFalse(l.contains("fez"));
	  assertFalse(l.contains("backdropping"));
	  assertFalse(l.contains("chainsawing"));
	  assertFalse(l.contains("outslicked"));
	  assertFalse(l.contains("prebooking"));
	  assertFalse(l.contains("windsurfing"));
	  assertFalse(l.contains("pinwheeling"));
	  assertFalse(l.contains("kibbitzing"));
	  assertFalse(l.contains("dipnetting"));
	  assertFalse(l.contains("slobbiest"));
	  
	  assertFalse(l.containsPrefix("x"));
	  
	  l.load(orderedWords);
	  
	  assertTrue(l.contains("aa"));
	  assertTrue(l.contains("ab"));
	  assertTrue(l.contains("ad"));
	  assertTrue(l.contains("ax"));
	  assertTrue(l.contains("eh"));
	  assertTrue(l.contains("if"));
	  assertTrue(l.contains("ka"));
	  assertTrue(l.contains("no"));
	  assertTrue(l.contains("nu"));
	  assertTrue(l.contains("xi"));
	  assertTrue(l.contains("xu"));
	  assertTrue(l.contains("ya"));
	  assertTrue(l.contains("ye"));
	  assertTrue(l.contains("yo"));
	  assertTrue(l.contains("aah"));
	  assertTrue(l.contains("axe"));
	  assertTrue(l.contains("ash"));
	  assertTrue(l.contains("arc"));
	  assertTrue(l.contains("fad"));
	  assertTrue(l.contains("fen"));
	  assertTrue(l.contains("fez"));
	  assertTrue(l.contains("backdropping"));
	  assertTrue(l.contains("chainsawing"));
	  assertTrue(l.contains("outslicked"));
	  assertTrue(l.contains("prebooking"));
	  assertTrue(l.contains("windsurfing"));
	  assertTrue(l.contains("pinwheeling"));
	  assertTrue(l.contains("kibbitzing"));
	  assertTrue(l.contains("dipnetting"));
	  assertTrue(l.contains("slobbiest"));
	  
	  
	  assertFalse(l.contains("a"));
      assertFalse(l.contains("e"));
      assertFalse(l.contains("i"));
      assertFalse(l.contains("o"));
      assertFalse(l.contains("u"));
      assertFalse(l.contains("a"));
      assertFalse(l.contains("e"));
      assertFalse(l.contains("i"));
      assertFalse(l.contains("o"));
      assertFalse(l.contains("u"));
      assertFalse(l.contains("armin"));
      assertFalse(l.contains("abig"));
      assertFalse(l.contains("gorill"));
      assertFalse(l.contains("catcat"));
      assertFalse(l.contains("evry"));
      assertFalse(l.contains("abigcat"));
      assertFalse(l.contains("xgorex"));
      assertFalse(l.contains("aaa"));
      assertFalse(l.contains("axay"));
      assertFalse(l.contains("dade"));
      assertFalse(l.contains("innn"));
      assertFalse(l.contains("lole"));
      assertFalse(l.contains("onna"));
      assertFalse(l.contains("uule"));
      assertFalse(l.contains("xxxxx"));
      assertFalse(l.contains("abigsedn"));
      assertFalse(l.contains(""));
  }
  
  /**
   * Tests the TrieLexicon implementation's ability to handle an empty text file.
   */
  public void testEmptyWordsLexicon() {
	  LexiconTrie l = new LexiconTrie();
	  l.load(emptyWords);
	  
	  assertFalse(l.contains("a"));
	  assertFalse(l.contains("big"));
	  assertFalse(l.contains("cat"));
	  assertFalse(l.contains("dated"));
	  assertFalse(l.contains("every"));
	  assertFalse(l.contains("fat"));
	  assertFalse(l.contains("gore"));
	  assertFalse(l.contains("gorilla"));
	  assertFalse(l.contains("duck"));
	  assertFalse(l.contains("aa"));
	  assertFalse(l.contains("ab"));
	  assertFalse(l.contains("ad"));
	  assertFalse(l.contains("ax"));
	  assertFalse(l.contains("eh"));
	  assertFalse(l.contains("if"));
	  assertFalse(l.contains("ka"));
	  assertFalse(l.contains("no"));
	  assertFalse(l.contains("nu"));
	  assertFalse(l.contains("xi"));
	  assertFalse(l.contains("xu"));
	  assertFalse(l.contains("ya"));
	  assertFalse(l.contains("ye"));
	  assertFalse(l.contains("yo"));
	  assertFalse(l.contains("aah"));
	  assertFalse(l.contains("axe"));
	  assertFalse(l.contains("ash"));
	  assertFalse(l.contains("arc"));
	  assertFalse(l.contains("fad"));
	  assertFalse(l.contains("fen"));
	  assertFalse(l.contains("fez"));
	  assertFalse(l.contains("backdropping"));
	  assertFalse(l.contains("chainsawing"));
	  assertFalse(l.contains("outslicked"));
	  assertFalse(l.contains("prebooking"));
	  assertFalse(l.contains("windsurfing"));
	  assertFalse(l.contains("pinwheeling"));
	  assertFalse(l.contains("kibbitzing"));
	  assertFalse(l.contains("dipnetting"));
	  assertFalse(l.contains("slobbiest"));
	  
	  assertFalse(l.containsPrefix("a"));
	  assertFalse(l.containsPrefix("big"));
	  assertFalse(l.containsPrefix("cat"));
	  assertFalse(l.containsPrefix("dated"));
	  assertFalse(l.containsPrefix("every"));
	  assertFalse(l.containsPrefix("fat"));
	  assertFalse(l.containsPrefix("gore"));
	  assertFalse(l.containsPrefix("gorilla"));
	  assertFalse(l.containsPrefix("duck"));
	  assertFalse(l.containsPrefix("aa"));
	  assertFalse(l.containsPrefix("ab"));
	  assertFalse(l.containsPrefix("ad"));
	  assertFalse(l.containsPrefix("ax"));
	  assertFalse(l.containsPrefix("eh"));
	  assertFalse(l.containsPrefix("if"));
	  assertFalse(l.containsPrefix("ka"));
	  assertFalse(l.containsPrefix("no"));
	  assertFalse(l.containsPrefix("nu"));
	  assertFalse(l.containsPrefix("xi"));
	  assertFalse(l.containsPrefix("xu"));
	  assertFalse(l.containsPrefix("ya"));
	  assertFalse(l.containsPrefix("ye"));
	  assertFalse(l.containsPrefix("yo"));
	  assertFalse(l.containsPrefix("aah"));
	  assertFalse(l.containsPrefix("axe"));
	  assertFalse(l.containsPrefix("ash"));
	  assertFalse(l.containsPrefix("arc"));
	  assertFalse(l.containsPrefix("fad"));
	  assertFalse(l.containsPrefix("fen"));
	  assertFalse(l.containsPrefix("fez"));
	  assertFalse(l.containsPrefix("backdropping"));
	  assertFalse(l.containsPrefix("chainsawing"));
	  assertFalse(l.containsPrefix("outslicked"));
	  assertFalse(l.containsPrefix("prebooking"));
	  assertFalse(l.containsPrefix("windsurfing"));
	  assertFalse(l.containsPrefix("pinwheeling"));
	  assertFalse(l.containsPrefix("kibbitzing"));
	  assertFalse(l.containsPrefix("dipnetting"));
	  assertFalse(l.containsPrefix("slobbiest"));
  }
  
  /**
   * Tests the TrieLexicon implementation's ability to fail correctly in the 
   * instance that it should throw an exception.
   */
  public void testThrowsIllegalArgumentException() {
	  LexiconTrie l = new LexiconTrie();
	  l.load(regularWords);

	  boolean exceptionThrown = false;
	  
	  try {
		  l.contains("  ");
	  } catch (IllegalArgumentException e) {
		  exceptionThrown = true;
	  }
	  assertTrue(exceptionThrown);
  }
  
  /**
   * Prints out various timing experiments of the TrieLexicon implementation.
   * 
   * @throws FileNotFoundException
   */
  public void testTrieTime() throws FileNotFoundException {
	  FileReader words = new FileReader("bogwords.txt");
	  BufferedReader in = new BufferedReader(words);
	  Scanner s = new Scanner(in);
	  LexiconTrie test = new LexiconTrie();
	  
	  long total = 0;
	  
	  for (int i = 0; i <= 10000; i++) {
		  long tempStart = System.nanoTime();
		  test.load(s);
		  long tempEnd = System.nanoTime();
		  total = total +  tempEnd - tempStart;
	  }
	  
	  System.out.println("*TRIE TIMES*");
	  System.out.println("-------------");
	  System.out.println("===============================================================================");
	  System.out.println("Loading the 'bogwords.txt' takes an average of: " + total/10000 + " nanoseconds.");
	  System.out.println("===============================================================================");
	  System.out.println("");
	  System.out.println("");
	  
	  System.out.println("--------------------------------------------------------------------------------------------------");
	  long total2 = 0;
	  for (int i = 0; i <= 10000; i++) {
		  long tempStart = System.nanoTime();
		  test.contains("quasistationary");
		  long tempEnd = System.nanoTime();
		  total2 = total2 + tempEnd - tempStart;
	  }
	  assertTrue(test.contains("quasistationary"));
	  System.out.println("Finding the word 'quasistationary' in 'bogwords.txt' takes an average of: " + total2/10000 + " nanoseconds.");
	  System.out.println("--------------------------------------------------------------------------------------------------");
	  
	  long total3 = 0;
	  for (int i = 0; i <= 10000; i++) {
		  long tempStart = System.nanoTime();
		  test.contains("parapsychology");
		  long tempEnd = System.nanoTime();
		  total3 = total3 + tempEnd - tempStart;
	  }
	  assertTrue(test.contains("parapsychology"));
	  System.out.println("Finding the word 'parapsychology' in 'bogwords.txt' takes an average of: " + total3/10000 + " nanoseconds.");
	  System.out.println("--------------------------------------------------------------------------------------------------");
	  
	  long total4 = 0;
	  for (int i = 0; i <= 10000; i++) {
		  long tempStart = System.nanoTime();
		  test.contains("recriminatory");
		  long tempEnd = System.nanoTime();
		  total4 = total4 + tempEnd - tempStart;
	  }
	  assertTrue(test.contains("recriminatory"));
	  System.out.println("Finding the word 'recriminatory' in 'bogwords.txt' takes an average of: " + total4/10000 + " nanoseconds.");
	  System.out.println("--------------------------------------------------------------------------------------------------");
	  
	  long total5 = 0;
	  for (int i = 0; i <= 10000; i++) {
		  long tempStart = System.nanoTime();
		  test.contains("socioeconomic");
		  long tempEnd = System.nanoTime();
		  total5 = total5 + tempEnd - tempStart;
	  }
	  assertTrue(test.contains("socioeconomic"));
	  System.out.println("Finding the word 'socioeconomic' in 'bogwords.txt' takes an average of: " + total5/10000 + " nanoseconds.");
	  System.out.println("--------------------------------------------------------------------------------------------------");
	  
	  long total6 = 0;
	  for (int i = 0; i <= 10000; i++) {
		  long tempStart = System.nanoTime();
		  test.contains("workmanlike");
		  long tempEnd = System.nanoTime();
		  total6 = total6 + tempEnd - tempStart;
	  }
	  assertTrue(test.contains("workmanlike"));
	  System.out.println("Finding the word 'workmanlike' in 'bogwords.txt' takes an average of: " + total6/10000 + " nanoseconds.");
	  System.out.println("--------------------------------------------------------------------------------------------------");
	  
	  System.out.println("");
	  System.out.println("");
	  
	  
	  System.out.println("-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*");
	  long total7 = 0;
	  for (int i = 0; i <= 10000; i++) {
		  long tempStart = System.nanoTime();
		  test.contains("quasistationaryx");
		  long tempEnd = System.nanoTime();
		  total7 = total7 + tempEnd - tempStart;
	  }
	  assertFalse(test.contains("quasistationaryx"));
	  System.out.println("Failing to find the word 'quasistationaryx' in 'bogwords.txt' takes an average of: " + total7/10000 + " nanoseconds");
	  System.out.println("-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*");
	  
	  long total8 = 0;
	  for (int i = 0; i <= 10000; i++) {
		  long tempStart = System.nanoTime();
		  test.contains("parapsychologyx");
		  long tempEnd = System.nanoTime();
		  total8 = total8 + tempEnd - tempStart;
	  }
	  assertFalse(test.contains("parapsychologyx"));
	  System.out.println("Failing to find the word 'parapsychologyx' in 'bogwords.txt' takes an average of: " + total8/10000 + " nanoseconds");
	  System.out.println("-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*");
	  
	  long total9 = 0;
	  for (int i = 0; i <= 10000; i++) {
		  long tempStart = System.nanoTime();
		  test.contains("recriminatoryx");
		  long tempEnd = System.nanoTime();
		  total9 = total9 + tempEnd - tempStart;
	  }
	  assertFalse(test.contains("recriminatoryx"));
	  System.out.println("Failing to find the word 'recriminatoryx' in 'bogwords.txt' takes an average of: " + total9/10000 + " nanoseconds");
	  System.out.println("-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*");
	  
	  long total10 = 0;
	  for (int i = 0; i <= 10000; i++) {
		  long tempStart = System.nanoTime();
		  test.contains("socioeconomicx");
		  long tempEnd = System.nanoTime();
		  total10 = total10 + tempEnd - tempStart;
	  }
	  assertFalse(test.contains("socioeconomicx"));
	  System.out.println("Failing to find the word 'socioeconomicx' in 'bogwords.txt' takes an average of: " + total10/10000 + " nanoseconds");
	  System.out.println("-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*");
	  
	  long total11 = 0;
	  for (int i = 0; i <= 10000; i++) {
		  long tempStart = System.nanoTime();
		  test.contains("workmanlikex");
		  long tempEnd = System.nanoTime();
		  total11 = total11 + tempEnd - tempStart;
	  }
	  assertFalse(test.contains("workmanlikex"));
	  System.out.println("Failing to find the word 'workmanlikex' in 'bogwords.txt' takes an average of: " + total11/10000 + " nanoseconds");
	  System.out.println("-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*");
	  System.out.println("");
	  System.out.println("");
	  System.out.println("");
  }
  
  
  
  
}
