/*
 * File: TestTokenization.c
 * Author: Matt Keeler
 *
 * The main function tests if the tokenization of unsigned long longs written to files
 * with whitespace in between is tokenized properly by the getNextToken function.
 *
 */
 #include <stdio.h>
 #include "tokenize.h"
 
int main()
{
	char name[40];
	unsigned long long nums[10] = {10002345, 210347, 23432978, 3924398, 938201,
											 19234983, 92349320, 1293843, 109238, 489302};
	FILE *fp;
	int counter;
	int error1 = 0;
	int error2 = 0;
	
	printf("\n+- Testing input tokenization\n|\n");
	
	//test with spaces separating the numbers
	printf("|\t+- Testing Tokenization with newlines.\n");
	printf("|\t|\n");
	tmpnam(name);
	fp = fopen((const char *)name, "w");
	printf("|\t|\ttmpfile is : ");
	for(counter=0; counter < 10; counter++)
	{
		fprintf(fp, "%llu ", nums[counter]);
		printf("%llu ", nums[counter]);
	}
	printf("\n");
	fclose(fp);
	
	
	fp = fopen((const char *)name, "r");
	printf("|\t|\ttokens are : ");
	for(counter=0; counter < 10; counter++)
	{
		unsigned long long token;
		if((token = getNextToken(fp)) != nums[counter])
		{
			error1 = 1;
		}
		printf("%llu ", token);
	}
	printf("\n|\t|\n");
	fclose(fp);
	printf("|\t+- Finished Tokenization with spaces.\n|\n");
	
	remove((const char *)name);
	
	//test with newlines separating the numbers
	printf("|\t+- Testing Tokenization with newlines\n");
	printf("|\t|\n");
	tmpnam(name);
	fp = fopen((const char *)name, "w");
	printf("|\t|\ttmpfile is :\n");
	for(counter=0; counter < 10; counter++)
	{
		printf("|\t|\t\t");
		fprintf(fp, "%llu\n", nums[counter]);
		printf("%llu\n", nums[counter]);
	}
	printf("|\t|\n|");
	fclose(fp);
	
	
	fp = fopen((const char *)name, "r");
	printf("\t|\ttokens are : ");
	for(counter=0; counter < 10; counter++)
	{
		unsigned long long token;
		if((token = getNextToken(fp)) != nums[counter])
		{
			error2 = 1;
		}
		printf("%llu ", token);
	}
	printf("\n|\t|\n");
	fclose(fp);
	printf("|\t+- Finished Tokenization with newlines.\n|\n");
	
	remove((const char *)name);
	
	if(error1 && !error2)
	{
		fprintf(stderr, "+- ERROR: Incorrect tokenization of first input.\n\n");
	}
	else if(!error1 && error2)
	{
		fprintf(stderr, "+- ERROR: Incorrect tokenization of second input.\n\n");
	}
	else if(error1 && error2)
	{
		fprintf(stderr, "+- ERROR: Incorrect tokenization of both inputs.\n\n");
	}
	else
	{
		printf("+- Tokenization is correct.\n\n");
	}
	
	return error1 | error2;
}