#include "llToken.hpp"
#include "llReporter.hpp"
using namespace llmake;

#include <iostream>
#include <fstream>
#include <sstream>
using namespace std;

#include <boost/tokenizer.hpp>
using namespace boost;

static int gTokenUnifiedId = 0;



static const string yinHao("\"");
static void spilitYinHao(string var ,size_t found,vector<string> &myToken);



llTokenEngine::llTokenEngine(string input,bool type)
	:m_Input(input)
	,m_Type(type)
	,m_SpilitFirst("\t\n\r|")
	,m_SpilitSecond(" #$(){}")
{
	// read stream from file
	if ( true == type )
	{
		ifstream file(m_Input.c_str());
		if ( file.is_open() )
		{
			m_Stream << file.rdbuf();
			file.close();
		}
		else
		{
			std::ostringstream stm;
			stm << "llmake source file->" << __FILE__ << " line->" << __LINE__ <<":";
			string errorInfo( stm.str() + "Can not open file ");
			errorInfo += m_Input;
			llReporter reporter(errorInfo);
			reporter.Error();
		}
	}
	else
	{
		m_Stream.str(m_Input);
	}
// change line start with "#***" with"#"
	stringstream tmpStringStream;
	while ( false == m_Stream.eof() )
	{
		string lineStr;
		std::getline(m_Stream,lineStr);
//		cout<<"$$$$$$$$$$$$$$$$$$"<<lineStr<<endl;
	    typedef boost::tokenizer<boost::char_separator<char> > 
			tokenizer;
		boost::char_separator<char> sep(" ","#",boost::drop_empty_tokens);
		tokenizer tokens(lineStr, sep);
		tokenizer::iterator tok_iter = tokens.begin();
		if( tokens.end() != tok_iter && "#" == *tok_iter )
		{
//			cout<<"$$$$$$$$$$$$$$$$$$ change "<<*tok_iter<<endl;
			tmpStringStream << "#\n";
		}
		else
		{
//			cout<<"$$$$$$$$$$$$$$$$$$ keep "<<*tok_iter<<endl;
			tmpStringStream << lineStr <<"\n";
		}
	}
	m_Stream.clear();
	m_Stream.str(tmpStringStream.str());
//	cout<<"$$$$$$$$$$$$$$$$$$"<<m_Stream.str()<<endl;
}
void llTokenEngine::getToken(llTokenList &tokenList)
{
    typedef boost::tokenizer<boost::char_separator<char> > 
        tokenizer;
	boost::char_separator<char> sep("","\"",boost::keep_empty_tokens);
	int lineNumber = 0;
	string totalString = m_Stream.str();
	tokenizer tokens(totalString, sep);
	vector<string> myToken;
	tokenizer::iterator tok_iter = tokens.begin();
	for (;tok_iter != tokens.end(); ++tok_iter)
	{
		string var(*tok_iter);
		size_t found = var.find(yinHao);
		if ( string::npos == found )
		{
			myToken.push_back(var);
		}
		else
		{
			spilitYinHao(var,found,myToken);
		}
	}
	bool yinHaoField = false;
	for (vector<string>::iterator str_iter = myToken.begin();str_iter != myToken.end(); ++str_iter)
	{
		if( yinHao == *str_iter)
		{
			yinHaoField = !yinHaoField;
		}
		else
		{
			if ( true == yinHaoField )
			{
				llTokenNode node;
				string groupToken("\"");
				groupToken += *str_iter;
				groupToken += "\"";
				node.m_Token = groupToken;
				node.m_IsGroup = true;
				node.m_Joint = false;
				node.m_Id = gTokenUnifiedId++;
				if(m_Type)
				{
					node.m_Loc.file=m_Input;
				}
				node.m_Loc.line = lineNumber;
				getTokenInner(node.m_Group,*str_iter,true,lineNumber);
				tokenList.push_back(node);
			}
			else
			{
				getTokenInner(tokenList,*str_iter,false,lineNumber);
			}
		}
	}
	if ( true == yinHaoField )
	{
		cout<<" [\"] is in pairs "<<endl;
		abort();
	}
}


void llTokenEngine::getTokenInner( llTokenList &tokenList,
	const string &src,const bool isGroup,int &lineCounter)
{
    typedef boost::tokenizer<boost::char_separator<char> > 
        tokenizer;
	const char *pSpilit;
	const char *pKeep;
	boost::empty_token_policy policy;
	if ( isGroup )
	{
		pSpilit = "";
		pKeep = " \t\n\r$(){}";
		policy = keep_empty_tokens;
	}
	else
	{
		pSpilit = m_SpilitFirst.c_str();
		pKeep = m_SpilitSecond.c_str();
		policy = drop_empty_tokens;
	}
	boost::char_separator<char> sep(pSpilit,pKeep,policy);
//	int lineNumber = lineCounter;
	stringstream tmpStringStream;
	tmpStringStream.str(src);
	while ( false == tmpStringStream.eof() )
	{
		string lineStr;
		std::getline(tmpStringStream,lineStr);
		lineCounter++;
//		cout<<"line number["<<lineNumber<<"]"<<lineStr<<endl;
		tokenizer tokens(lineStr, sep);
		vector<string> myToken;
		tokenizer::iterator tok_iter = tokens.begin();
		if ( tokens.end() != tok_iter && "#" == *tok_iter )
		{
			continue;
		}
		for (;tok_iter != tokens.end(); ++tok_iter)
		{
			string var(*tok_iter);
			size_t found = var.find(yinHao);
			if ( string::npos == found )
			{
				myToken.push_back(var);
			}
			else
			{
				spilitYinHao(var,found,myToken);
			}
		}
		for (vector<string>::iterator str_iter = myToken.begin();str_iter != myToken.end(); ++str_iter)
		{
			if ( false == isGroup)
			{
				if ( " " == *str_iter )
				{
					continue;
				}
			}
			llTokenNode node;
			node.m_Token= *str_iter;
			node.m_Id = gTokenUnifiedId++;
			if(m_Type)
			{
				node.m_Loc.file=m_Input;
			}
			node.m_Loc.line=lineCounter;
			node.m_IsGroup = false;
			node.m_Joint = false;
			if ( "}" == *str_iter )
			{
//				cout<<"} jont settings"<<endl;
				vector<string>::iterator next = str_iter;
				next++;
				if ( myToken.end() != next && " " != *next )
				{
//					cout<<"} jont settings true true true true"<<endl;
					node.m_Joint = true;
				}
			}
			tokenList.push_back(node);
		}
	}
}


static void spilitYinHao(string var ,size_t found,vector<string> &myToken)
{
	string before = var.substr(0,found);
	if ( false == before.empty())
	{
		myToken.push_back(before);
	}
	myToken.push_back(yinHao);
	string after = var.substr(found+1);
	if ( false == after.empty())
	{
		size_t found2 = after.find(yinHao);
		if ( string::npos == found2 )
		{
			myToken.push_back(after);
		}
		else
		{
			spilitYinHao(after,found2,myToken);
		}
	}
}
