#include "Tokenizer.h"
#include <iostream>
#include <set>

using namespace std;

#include<string.h>
#include"StringOps.h"
#include"Keywords.h"

Tokenizer::Tokenizer(void)
{
    
}

Tokenizer::~Tokenizer(void)
{
}

void Tokenizer::tokenize(std::string inStream)// = true)
{
    string::size_type nDelimiterPos = 0;
    string::size_type nTokenPos = 0;
	bool bNonDelimiterFound = true; //Compiler warning
    Token xToken;

	std::string sCheck;
	//Using this to avoid Compiler Warning, style followed is if a find fails, break-out and copy the rest
	while(bNonDelimiterFound) 
	{
        nTokenPos = inStream.find_first_not_of(m_sDelimiters, nDelimiterPos);
        if(string::npos == nTokenPos) //At end of string/Not found...
			{ bNonDelimiterFound = false; break;}

        //Split the continuous stream of delimiters
		sCheck="";
		for(signed int i= 0;i<((signed)nTokenPos - (signed)nDelimiterPos);i++)
            sCheck += inStream.substr(nDelimiterPos+ i,1); //TODO: Direct use of substr
		tokenizeDelimiterStream(sCheck);

    	nDelimiterPos = inStream.find_first_of(m_sDelimiters, nTokenPos);
        if (string::npos == nDelimiterPos )
            break;

        //Straight-forward token
		xToken.m_sValue = inStream.substr(nTokenPos,  nDelimiterPos - nTokenPos);
		emitToken(xToken.m_sValue);
		bNonDelimiterFound=true; //to suppress compiler warning only
	}//End of loop

	// Cleanup, if tokens left
	if(!bNonDelimiterFound)
	{
		//At end of string/No non-delimiter found...delimiter search
		sCheck="";
		for(signed int i= 0;i<((signed)inStream.length() - (signed)nDelimiterPos);i++)
            sCheck += inStream.substr(nDelimiterPos+ i,1);//TODO: Direct use of substr
		tokenizeDelimiterStream(sCheck);
	}
	else
	{
		//Copy rest as token
        xToken.m_sValue = inStream.substr(nTokenPos, inStream.length() - nTokenPos);
        m_asTokens.push_back(xToken);
	}

    //TODO Small bug: sometimes extra line inserted at end of the file.
}

void Tokenizer::emitToken(std::string insToken)
{
    Token xToken;

	xToken.m_sValue = insToken; 
	if(Keywords::isKeyword(insToken))			xToken.m_bIsKeyword = true;
	if(Keywords::isDelimiterValid(insToken))	xToken.m_bIsDelimiter = true;
	m_asTokens.push_back(xToken);

	//if(insToken != "\n")
	//	cout<<"~"<<insToken<<"~ "/*<<endl*/;
	//else
	//	cout<<"~\\n~ "/*<<endl*/;
}

std::string Tokenizer::m_sDelimiters("\n\t\r ~!#%^&*()-=+{}\";:,<>/\\?.'[]|");

void Tokenizer::tokenizeDelimiterStream(std::string inDelimiters)
{
	std::string					sMatched = "";
	std::string					sCheck ="";
	char						cPopper = '\0';
	unsigned int				i=0;
	unsigned int				nLength= inDelimiters.length();

	//Optimization: no need to get into loop if sequence is 1
	if(nLength == 1)
	{
		emitToken(inDelimiters);
		return;
	}

	//We pop each character in string and build a delimiter-string. We emit it when unambiguous
	while(i<nLength)
	{
		cPopper = inDelimiters[i++];
		sCheck += cPopper;

		if(Keywords::isDelimiterValid(sCheck)) //Is a valid operator so far
		{
			sMatched = sCheck;
			if(Keywords::isDelimiterAmbiguous(sMatched)) //There could be more chars to match
				continue;
		}

		emitToken(sMatched);
		if(sMatched != sCheck) //Max difference can be one character, hence start again with same cPopper
			i--;

		sMatched = "";
		sCheck="";
	} //while loop

	//For left-overs
	if(sMatched !="")
	{
		emitToken(sMatched);
		if(sMatched!=sCheck)
		{
			sMatched="";
			sMatched+=cPopper;
			emitToken(sMatched);
		}
	}
}

void Tokenizer::print()
{
    int n = (int)m_asTokens.size();
    for(int i = 0; i<n; i++)
    {
        cout<<m_asTokens[i].m_sValue;
    }
}