////////////////////////////////////////////////////////////////////////////////
// Includes
////////////////////////////////////////////////////////////////////////////////

#include "SCCompiler.hpp"

#include <exception>
#include <utility>
#include "Exception.hpp"
#include "IOFileException.hpp"
#include "ParsingException.hpp"
#include "AssertionException.hpp"
#include "Convert.hpp"
#include "Quadruple.hpp"
#include "Arithmetics.hpp"

////////////////////////////////////////////////////////////////////////////////
// Namespaces
////////////////////////////////////////////////////////////////////////////////

namespace SC {

////////////////////////////////////////////////////////////////////////////////
// Implementation
////////////////////////////////////////////////////////////////////////////////

using namespace std;
using namespace Types;
using namespace Toolkit;
using namespace Exceptions;

void SCCompiler::ReadProgram(
	const string & name,
	SCProgram & prog,
	const string & map_dir
) {
	// Reset compiler data
	this->labels.clear();
	this->nb_comp_errors = 0;
	this->encoding_section = this->program_section = Empty;
	this->char_map = this->func_libs = this->word_length = this->func_offset = false;
	// Read main file
	this->readFile(name,prog,map_dir);
	// Check that program is complete
	string msg = "";
	if (this->encoding_section == Empty)
		msg += "Encoding Section ";
	if (this->program_section == Empty) {
		if (msg != "") msg += "and ";
		msg += "Program Section ";
	}
	if (msg != "") {
		this->ostream << "Syntax error @ End of program : Missing " << msg << endl;
		throw ParsingException("Compilation errors");
	}
	// Remove unused system declarations
	vector< Couple<string,string> >::iterator s_itor = prog.sys_decl.begin();
	unsigned int sys_idx = 0;
	while (s_itor != prog.sys_decl.end()) {
		bool used = false;
		for (unsigned int i=0; (i<prog.sys_inst.size()) && (!used); ++i)
			if (prog.sys_inst[i].GetFirst() == sys_idx) used = true;
		if (!used) {
			s_itor = prog.sys_decl.erase(s_itor);
			for (unsigned int i=0; i<prog.sys_inst.size(); ++i)
				if (prog.sys_inst[i].GetFirst() >= sys_idx)
					--prog.sys_inst[i].First();
		}
		else {
			++s_itor;
			++sys_idx;
		}
	}
}

void SCCompiler::readFile(
	const string & name,
	SCProgram & prog,
	const string & map_dir
) {
	static vector<string> file_pile;
	// If file is already in the pile, error
	if (find(file_pile.begin(), file_pile.end(), name) != file_pile.end()) {
		string msg = "Recursive file import : ";
		for (unsigned int f=0; f<file_pile.size(); ++f)
			msg += file_pile[f] + " -> ";
		msg += name;
		throw IOFileException(msg);
	}
	// Pile up file
	file_pile.push_back(name);
	// Input stream
	ifstream file(name.c_str());
	if (!file.is_open())
		throw IOFileException("Program file \"" + name + "\" doesn't exist");
	// Tokeniser
	Tokeniser tokeniser(file,name);
	// Set up rules
	tokeniser.AddSkipSymbol(" ");
	tokeniser.AddSkipSymbol("\t");
	tokeniser.AddSkipSymbol("\n");
	tokeniser.AddSpecialSymbol(";");
	tokeniser.AddSpecialSymbol(",");
	tokeniser.AddSpecialSymbol(":");
	tokeniser.AddSpecialSymbol("[");
	tokeniser.AddSpecialSymbol("]");
	tokeniser.AddSpecialSymbol("{");
	tokeniser.AddSpecialSymbol("}");
	tokeniser.AddSpecialSymbol("<");
	tokeniser.AddSpecialSymbol(">");
	tokeniser.AddSpecialSymbol("|");
	tokeniser.AddCommentLineSymbol("//");
	tokeniser.AddCommentBlockSymbol(pair<string,string>("/*","*/"));
	tokeniser.AddStringSymbol("\"");
	// Working data
	string token;
	// Get token : needs to be a first level one
	try {
		// While ! EOF
		while (!file.eof() || !tokeniser.PreviewNextToken(false).empty()) {
			// If encoding section is being done, forward to encoding processing
			if (this->encoding_section == Open)
				this->readEncoding(tokeniser,prog,map_dir);
			// If program section is being done
			else if (this->program_section == Open)
				this->readProgram(tokeniser,prog);
			// Otherwise process stream from here
			else {
				token = tokeniser.GetNextToken(false);
				// File import
				if (token == "import")
					this->readImport(tokeniser,prog,map_dir);
				// Macros setting
				else if (token == "set")
					this->readMacro(tokeniser);
				// If encoding is not done
				else if ((this->encoding_section == Empty) && (token == "encoding")) {
					// Set state
					this->encoding_section = Open;
					// Check opening bracket
					tokeniser.AssertNextToken("{");
					// Read section
					this->readEncoding(tokeniser,prog,map_dir);
					// Check closing bracket
					tokeniser.AssertNextToken("}");
					// Assert data is set
					if (!this->char_map || !this->func_libs || !this->word_length || !this->func_offset) {
						string msg = "";
						if (!this->char_map) msg += "\"char_map\"";
						if (!this->func_libs) {
							if (msg != "") msg += ", ";
							msg += "\"func_libs\"";
						}
						if (!this->word_length) {
							if (msg != "") msg += ", ";
							msg += "\"word_length\"";
						}
						if (!this->func_offset) {
							if (msg != "") msg += ", ";
							msg += "\"func_offset\"";
						}
						if (msg != "") {
							this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " Encoding section closed without defining the obligatory fields : " << msg << endl;
							this->addError();
						}
					}
					// Set state
					this->encoding_section = Closed;
				}
				// Once encoding is done
				else if (this->encoding_section == Closed) {
					// Label
					if (token == "label")
						this->readLabel(tokeniser,prog);
					// Function
					else if (token == "function")
						this->readFunction(tokeniser,prog);
					// System definition
					else if (token == "system")
						this->readSystem(tokeniser,prog);
					// Program section
					else if (token == "program") {
						// Set state
						this->program_section = Open;
						// Check opening bracket
						tokeniser.AssertNextToken("{");
						// Read section
						this->readProgram(tokeniser,prog);
						// Check closing bracket
						tokeniser.AssertNextToken("}");
						// Check that a universe (unique system containing everything) does exist
						// i.e there is only one system with no super system
						int universe = -1;
						for (unsigned int i=0; i<prog.sys_inst.size(); ++i) {
							vector<unsigned int> super_sys = prog.GetSuperSystems(i);
							if (super_sys.empty()) {
								if (universe == -1) universe = (int)i;
								else {
									this->ostream << "Multiple universes : \"" << prog.sys_inst[universe].GetSecond() << "\" and \"" << prog.sys_inst[i].GetSecond() << "\"" << endl;
									throw AssertionException("The program cannot have more than one universe (system containing everything and contained by nothing)");
								}
							}
						}
						if (universe == -1) throw AssertionException("The program must have a universe (system containing everything and contained by nothing)");
						/*
						// Check that the graph starting from the universe is directed acyclic
						vector<unsigned int> path;
						path.push_back((unsigned int)universe);
						vector< vector<unsigned int> > graph;
						for (unsigned int i=0; i<prog.sys_inst.size(); ++i)
							graph.push_back(prog.sys_inst[i].GetThird());
						if (!this->checkDAC(path,graph)) {
							string msg = "";
							for (unsigned int i=0; i<path.size()-1; ++i)
								msg += prog.sys_inst[path[i]].GetSecond() + " -> ";
							msg += prog.sys_inst[path[path.size()-1]].GetSecond();
							throw AssertionException("The system hierarchy defined by the program scopes cannot contain a directed acyclic graph (DAT)\nHierarchy [ " + msg + " ] contains a DAT");
						}*/
						// Set state
						this->program_section = Closed;
					}
					else if (!token.empty()) {
						this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : \"" << token << "\" was found where expecting \"set\", \"import\", \"label\", \"function\", \"system\" or \"program\"" << endl;
						this->addError();
					}
				}
				// Error
				else {
					this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : \"" << token << "\" is not an appropriate token or should occur after encoding section" << endl;
					this->addError();
				}
			}
		}
		if (this->nb_comp_errors > 0)
			throw ParsingException("Accumulating errors");
		file.close();
	}
	// If any exception occured then the code can't be compiled and the
	// exception is propagated after closing the file.
	catch (const exception & e) {
		file.close();
		throw;
	}
	// Pop up file
	file_pile.pop_back();
}

////////////////////////////////////////////////////////////////////////////////
// Section functions
////////////////////////////////////////////////////////////////////////////////

void SCCompiler::readImport(
	Tokeniser & tokeniser,
	SCProgram & prog,
	const string & map_dir
) {
	// Read file name
	string token = tokeniser.GetNextToken(true);
	if (!tokeniser.IsString(token)) {
		this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " \"import\" : token \"" << token << "\" found where expecting a string" << endl;
		this->addError();
	}
	string name = token;
	if ((name.size() > 0) && (name[0] == '"')) name = name.substr(1,name.size()-1);
	if ((name.size() > 0) && (name[name.size()-1] == '"')) name = name.substr(0,name.size()-1);
	// Read program
	this->readFile(name,prog,map_dir);
}

void SCCompiler::readMacro(Tokeniser & tokeniser) {
	// Read name
	string name = tokeniser.GetNextToken(true);
	// Update label list
	try {
		this->macros[name] = ParseEvalIntExp(this->getStringExpression(tokeniser),this->macros);
	}
	catch (Exception & e ) {
		this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : " << e.Comment() << endl;
		this->addError();
	}
}

void SCCompiler::readLabel(Tokeniser & tokeniser, SCProgram & prog) {
	// Read name
	string name = tokeniser.GetNextToken(true);
	// Update label list
	try {
		this->labels[name] = this->readLabelValue(tokeniser,prog);
	}
	catch (Exception & e ) {
		this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : " << e.Comment() << endl;
		this->addError();
	}
}

void SCCompiler::readEncoding(
	Tokeniser & tokeniser,
	SCProgram & prog,
	const string & map_dir
) {
	string token;
	// Read all sections
	bool read = true;
	while (read) {
		// Read name
		token = tokeniser.PreviewNextToken(true);
		// Character map
		if (token == "char_map") {
			if (this->char_map) {
				this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " token \"" << token << "\" found whereas \"char_map\" section has already been defined" << endl;
				this->addError();
			}
			tokeniser.GetNextToken(true);
			token = tokeniser.GetNextToken(true);
			if (!tokeniser.IsString(token)) {
				this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " \"char_map\" : token \"" << token << "\" found where expecting a string" << endl;
				this->addError();
			}
			else
				prog.codes = this->readCharMap(token,map_dir);
			this->char_map = true;
		}
		// Function library
		else if (token == "func_libs") {
			if (this->func_libs) {
				this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " token \"" << token << "\" found whereas \"func_libs\" section has already been defined" << endl;
				this->addError();
			}
			tokeniser.GetNextToken(true);
			tokeniser.AssertNextToken("{");
			token = tokeniser.GetNextToken(true);
			bool isSep = false;
			while (token != "}") {
				if (isSep) {
					if (token != ",") {
						this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " \"func_libs\" : token \"" << token << "\" found where expecting separator ','" << endl;
						this->addError();
					}
				}
				else {
					if (!tokeniser.IsString(token)) {
						this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " \"func_libs\" : token \"" << token << "\" found where expecting a string" << endl;
						this->addError();
					}
					else
						prog.func_modules.push_back(tokeniser.GetTrimmedString(token));
				}
				token = tokeniser.GetNextToken(true);
				isSep = !isSep;
			}
			this->func_libs = true;
		}
		// Word length
		else if (token == "word_length") {
			if (this->word_length) {
				this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " token \"" << token << "\" found whereas \"word_length\" section has already been defined" << endl;
				this->addError();
			}
			tokeniser.GetNextToken(true);
			// Get expression, parse it and set the result
			try {
				prog.word_length = ParseEvalIntExp(this->getStringExpression(tokeniser),this->macros);
				this->word_length = true;
			}
			catch (Exception & e ) {
				this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : " << e.Comment() << endl;
				this->addError();
			}
		}
		// Function offset
		else if (token == "func_offset") {
			if (this->func_offset) {
				this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " token \"" << token << "\" found whereas \"func_offset\" section has already been defined" << endl;
				this->addError();
			}
			tokeniser.GetNextToken(true);
			Range<string> range = this->getStringRange(tokeniser);
			try {
				prog.func_offset.Begin() = ParseEvalIntExp(range.GetBegin(),this->macros);
				prog.func_offset.End() = ParseEvalIntExp(range.GetEnd(),this->macros);
				if (prog.func_offset.GetBegin() > prog.func_offset.GetEnd()) {
					this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : Function offset " << prog.func_offset.GetBegin() << " is above the end offset " << prog.func_offset.GetEnd() << endl;
					this->addError();
				}
				this->func_offset = true;	
			}
			catch (Exception & e ) {
				this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : " << e.Comment() << endl;
				this->addError();
			}
		}
		// Check closing bracket
		else if (token ==  "}") read = false;
		// Incorrect token
		else {
			this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : \"" << token << "\" is not an encoding feature nor a closing bracket" << endl;
			this->addError();
			tokeniser.GetNextToken(true);
		}
	}
}
		
void SCCompiler::readFunction(Tokeniser & tokeniser, SCProgram & prog) {
	// Read name
	string name = tokeniser.GetNextToken(true);
	// Read value
	string value = this->readLabelValue(tokeniser,prog);
	// Remove wildcards
	string code = value;
	int idx1 = 0, idx2 = value.size() - 1;
	while ((!code.empty()) && (code[0] == WILDCARD)) {
		code.erase(0,1);
		++idx1;
	}
	while ((!code.empty()) && (code[code.size()-1] == WILDCARD)) {
		code.erase(code.size()-1,1);
		--idx2;
	}
	// Check code length
	if ((int)code.size() != (prog.GetFuncOffset().GetEnd() - prog.GetFuncOffset().GetBegin() + 1)) {
		this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " :  function code value \"" << code << "\" of length " << code.size() << " is longer than the function code range [ " << prog.GetFuncOffset().GetBegin() << " , " << prog.GetFuncOffset().GetEnd() << " ]" << endl;
		this->addError();
	}
	// Check code location
	else if ((idx1 != prog.GetFuncOffset().GetBegin()) || (idx2 != prog.GetFuncOffset().GetEnd())) {
		this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " :  function code \"" << code << "\" is out of the bounds of the function code range [ " << prog.GetFuncOffset().GetBegin() << " , " << prog.GetFuncOffset().GetEnd() << " ]" << endl;
		this->addError();
	}
	// Check code validity
	else {
		string f_code = value.substr(prog.GetFuncOffset().GetBegin(),prog.GetFuncOffset().GetEnd()-prog.GetFuncOffset().GetBegin()+1);
		if (!this->isBinaryWord(f_code)) {
			this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : Function code \"" << f_code << "\" should be a binary string {0,1}* of length " << (prog.GetFuncOffset().GetEnd()-prog.GetFuncOffset().GetBegin()+1) << endl;
			this->addError();
		}
	}
	// Store (bytecode -> name)
	prog.functions.push_back(Couple<string,string>(value,name));
	// Update label list
	this->labels[name] = value;
}
		
void SCCompiler::readSystem(Tokeniser & tokeniser, SCProgram & prog) {
	string token;
	Couple<string,string> system;
	// Read name
	token = tokeniser.GetNextToken(true);
	system.First() = token;
	for (unsigned int i=0; i<prog.sys_decl.size(); ++i) {
		if (prog.sys_decl[i].GetFirst() == system.GetFirst()) {
			this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : System name \"" << system.GetFirst() << "\" is already used" << endl;
			this->addError();
		}
	}
	// Check opening bracket
	tokeniser.AssertNextToken("{");
	string desc;
	// Read left schema
	desc = readSchema(tokeniser,prog);
	system.Second() = desc;
	// Separator
	tokeniser.AssertNextToken(",");
	// Read kernel
	desc = readSystemWord(tokeniser,prog);
	system.Second() += desc;
	// Separator
	tokeniser.AssertNextToken(",");
	// Read right schema
	desc = readSchema(tokeniser,prog);
	system.Second() += desc;
	// Check closing bracket
	tokeniser.AssertNextToken("}");
	// Add system (bytecode -> name)
	prog.sys_decl.push_back(system);
}

void SCCompiler::readProgram(Tokeniser & tokeniser, SCProgram & prog) {
	// Read
	bool read = true;
	string token;
	while (read) {
		try {
			token = tokeniser.PreviewNextToken(false);
			if (token.empty()) break;
			// If closing bracket "}" then end of program declaration
			if (token == "}") {
				read = false;
				continue;
			}
			// If file import, read imported declarations
			else if (token == "import") {
				tokeniser.GetNextToken(true);
				this->readImport(tokeniser,prog,"");
			}
			// Macros setting
			else if (token == "set") {
				tokeniser.GetNextToken(false);
				this->readMacro(tokeniser);
			}
			// If system declaration, then its an instanciation
			else if (this->getDeclaredSystemIdx(token,prog) >= 0)
				this->readInstantiation(tokeniser,prog);
			// Otherwise, read scopes
			else this->readScope(tokeniser,prog);
		}
		catch (Exception & e ) {
			this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : " << e.Comment() << endl;
			this->addError();
		}
	}
}

void SCCompiler::readInstantiation(Tokeniser & tokeniser, SCProgram & prog) {
	// Read instance type
	string type = tokeniser.GetNextToken(true);
	// No array can be defined here
	if (!this->getStringArray(tokeniser).GetBegin().empty()) {
		this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : System instanciation \"" << type << "\" cannot have array brackets" << endl;
		this->addError();
	}
	// Assert it's an instance declaration and get the index
	int type_idx;
	if ((type_idx = this->getDeclaredSystemIdx(type,prog)) < 0) {
		this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : \"" << type << "\" is not a defined system" << endl;
		this->addError();
	}
	// Read instances
	bool exit = false;
	do {
		string name = tokeniser.GetNextToken(true);
		// Create instance
		Triple<unsigned int, string, vector<unsigned int> > instance;
		instance.First()= type_idx;
		// Get the number of instances (in case of array)
		Range<string> range = getStringArray(tokeniser);
		int min = (range.GetBegin().empty()?-1:ParseEvalIntExp(range.GetBegin(),this->macros)),
			max = (range.GetEnd().empty()?-1:ParseEvalIntExp(range.GetEnd(),this->macros));
		if (min > max) {
			this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : Range starting index " << min << " is above the last index " << max << endl;
			this->addError();
		}
		for (int i=min; i<=max; ++i) {
			if (i != -1) {
				stringstream ss; ss << name << "[" << i << "]";
				instance.Second() = ss.str();
			}
			else instance.Second() = name;
			// Check that the name is not already used
			for (unsigned int i=0; i<prog.sys_inst.size(); ++i) {
				if (prog.sys_inst[i].GetSecond() == instance.GetSecond()) {
					this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : Instance name \"" << instance.GetSecond() << "\" is already used" << endl;
					this->addError();
					break;
				}
			}
			// Add instance
			prog.sys_inst.push_back(instance);
		}
		string token = tokeniser.GetNextToken(true);
		// If end of instruction, exit loop
		if (token == ";") exit = true;
		// Else if another instance is not coming, error
		else if (token != ",") {
			this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : \"" << token << "\" found where expecting [ \"[\" , \";\" , \",\" ]" << endl;
			this->addError();
		}
	}
	while (!exit);
}

void SCCompiler::readScope(Tokeniser & tokeniser, SCProgram & prog) {
	// Get instance name
	string super_instance = tokeniser.GetNextToken(true);
	// Get eventual array
	Range<string> loop_range = this->getStringArray(tokeniser);
	const int loop_min = (loop_range.GetBegin().empty()?-1:ParseEvalIntExp(loop_range.GetBegin(),this->macros)),
			  loop_max = (loop_range.GetEnd().empty()?-1:ParseEvalIntExp(loop_range.GetEnd(),this->macros));
	if (loop_min > loop_max) {
		this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : \" Range starting index " << loop_min << " is above the last index " << loop_max << endl;
		this->addError();
	}
	// Check names correctness
	for (int i=loop_min; i<=loop_max; ++i) {
		stringstream ss_super;
		ss_super << super_instance;
		if (i != -1) ss_super << "[" << i << "]";
		if (this->getSystemInstanceIdx(ss_super.str(),prog) < 0) {
			this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : \"" << ss_super.str() << "\" is not a known identifier" << endl;
			this->addError();
		}
	}
	// Assert that the next character is a "{"
	tokeniser.AssertNextToken("{");
	// Read systems to encompass
	bool exit = false;
	do {
		// Get sub instance
		string sub_instance = tokeniser.GetNextToken(true);
		// Check that the name is correct (no special characters)
		if (!tokeniser.ContainsNoSymbol(sub_instance)) {
			this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : sub-instance name \"" << sub_instance << "\" contains forbidden characters" << endl;
			this->addError();
		}
		// Get eventual range
		Range<string> sub_range = this->getStringArray(tokeniser);
		// Add these sub instances to all the super systems
		for (int i=loop_min; i<=loop_max; ++i) {
			stringstream ss_super;
			ss_super << super_instance;
			if (i != -1) {
				ss_super << "[" << i << "]";
				this->macros["idx"] = i;
			}
			int super_idx;
			if ((super_idx = this->getSystemInstanceIdx(ss_super.str(),prog)) < 0) {
				this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : \"" << ss_super.str() << "\" is not a system instance" << endl;
				this->addError();
			}
			else {
				const int sub_min = (sub_range.GetBegin().empty()?-1:ParseEvalIntExp(sub_range.GetBegin(),this->macros)),
				  		  sub_max = (sub_range.GetEnd().empty()?-1:ParseEvalIntExp(sub_range.GetEnd(),this->macros));
				if (sub_min > sub_max) {
					this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : \" Range starting index " << sub_min << " is above the last index " << sub_max << endl;
					this->addError();
				}
				for (int j=sub_min; j<=sub_max; ++j) {
					stringstream ss_sub; ss_sub << sub_instance;
					if (j != -1) ss_sub << "[" << j << "]";
					int sub_idx;
					if ((sub_idx = this->getSystemInstanceIdx(ss_sub.str(),prog)) < 0) {
						this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : \"" << ss_sub.str() << "\" is not a system instance" << endl;
						this->addError();
					}
					else {
						// Add relationship
						vector<unsigned int> & super_sub = prog.sys_inst[super_idx].Third();
						if (std::find(super_sub.begin(),super_sub.end(),(unsigned int)sub_idx) == super_sub.end())
							super_sub.push_back(sub_idx);
						else {
							this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : Double scope declarations - " << ss_super.str() << " already has \"" << ss_sub.str() << "\" in its sub scope" << endl;
							this->addError();
						}
					}
				}
				// If "idx" macro was created, remove it
				if (this->macros.find("idx") != this->macros.end())
					this->macros.erase("idx");
			}
		}
		string token = tokeniser.GetNextToken(true);
		// If end of scope declaration, exit loop
		if (token == "}") exit = true;
		// Else if another instance is not coming, error
		else if (token != ",") {
			this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : \"" << token << "\" found where expecting [ \",\" , \"}\" ]" << endl;
			this->addError();
		}
	}
	while (!exit);
}

////////////////////////////////////////////////////////////////////////////////
// Tool functions
////////////////////////////////////////////////////////////////////////////////

const string SCCompiler::readLabelValue(Tokeniser & tokeniser, const SCProgram & prog) {
	// Remove skip on carriage return to only read on one line
	tokeniser.RemoveSkipSymbol("\n");
	tokeniser.AddSpecialSymbol("\n");
	// Read value on one line
	string token, value;
	while (((token = tokeniser.PreviewNextToken(true)) != "\n") && (token != ",") && (token != "|")) {
		tokeniser.GetNextToken(true);
		// If an expansion operator is starting
		if (token == "<") {
			// Get the sequence to copy
			string seq = tokeniser.GetNextToken(true);
			// Preview next token
			token = tokeniser.PreviewNextToken(true);
			// If a number is indicated
			int nb_copies = -1;
			if (token == ",") {
				// Delete token is ','
				tokeniser.GetNextToken(true);
				// Read the number of times the sequence need to be copied
				vector<string> delimiters;
				delimiters.push_back(">");
				try {
					nb_copies = ParseEvalIntExp(this->getStringExpression(tokeniser,delimiters),this->macros);
				}
				catch (Exception & e) {
					this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : " << e.Comment() << endl;
					this->addError();
				}
			}
			// Check next token is '>'
			tokeniser.AssertNextToken(">");
			// Add to value nb_copies of seq
			if (nb_copies > 0) for (int i=0; i<nb_copies; ++i) value += seq;
			// Else copy until the label has the word size
			else while (value.size() < prog.GetWordLength()) value += seq;
		}
		// Otherwise just add the token to the value
		else value += token;
	}
	// Put back the carriage return skip symbol
	tokeniser.RemoveSpecialSymbol("\n");
	tokeniser.AddSkipSymbol("\n");
	// Check length
	if (value.size() != prog.GetWordLength()) {
		this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : Declaration string \"" << value << "\"'s size (" << value.size() <<") does not match word size (" << prog.word_length << ")" << endl;
		this->addError();
	}
	// Return value
	return value;
}

const Range<string> SCCompiler::getStringRange(Tokeniser & tokeniser) {
	// Remove skip on carriage return to only read on one line
	tokeniser.RemoveSkipSymbol("\n");
	tokeniser.AddSpecialSymbol("\n");
	// Initialise range
	Range<string> result("","");
	// Set delimiters
	vector<string> delimiters;
	delimiters.push_back("\n");
	delimiters.push_back(":");
	// Read the first part
	result.Begin() = this->getStringExpression(tokeniser,delimiters);
	// Read the second part if one
	string token = tokeniser.PreviewNextToken(true);
	if (token == ":") {
		token = tokeniser.GetNextToken(true);
		delimiters.pop_back();
		result.End() = this->getStringExpression(tokeniser,delimiters);
	}
	else
		result.End() = result.Begin();
	tokeniser.AssertNextToken("\n");
	// Put back the carriage return skip symbol
	tokeniser.RemoveSpecialSymbol("\n");
	tokeniser.AddSkipSymbol("\n");
	// Return range
	return result;
}

const Range<string> SCCompiler::getStringArray(Tokeniser & tokeniser) {
	string token = tokeniser.PreviewNextToken(true);
	Range<string> result("","");
	// If "["
	if (token == "[") {
		// Delete "["
		tokeniser.GetNextToken(true);
		// Set delimiters
		vector<string> delimiters;
		delimiters.push_back("]");
		delimiters.push_back(":");
		// Read the first part
		result.Begin() = this->getStringExpression(tokeniser,delimiters);
		// Read the second part if one
		token = tokeniser.PreviewNextToken(true);
		if (token == ":") {
			token = tokeniser.GetNextToken(true);
			delimiters.pop_back();
			result.End() = this->getStringExpression(tokeniser,delimiters);
		}
		else
			result.End() = result.Begin();
		tokeniser.AssertNextToken("]");
	}
	return result;
}

const string SCCompiler::getStringExpression(Tokeniser & tokeniser, const vector<string> & delimiters_init) {
	vector<string> delimiters = delimiters_init;
	// Check that there are delimiters, otherwise use carriage return
	bool rollback = false;
	if (delimiters.empty()) {
		// Remove skip on carriage return to only read on one line
		tokeniser.RemoveSkipSymbol("\n");
		tokeniser.AddSpecialSymbol("\n");
		// Add carriage return
		delimiters.push_back("\n");
		// Set a boolean to rollback these changes at the end
		rollback = true;
	}
	// Initialise
	string token, result = "";
	// Read the first part
	token = tokeniser.PreviewNextToken(true);
	while (find(delimiters.begin(), delimiters.end(), token) == delimiters.end()) {
		token = tokeniser.GetNextToken(true);
		result += token;
		token = tokeniser.PreviewNextToken(true);
	}
	// Eventually rollback
	if (rollback) {
		// Put back the carriage return skip symbol
		tokeniser.RemoveSpecialSymbol("\n");
		tokeniser.AddSkipSymbol("\n");
	}
	// return result
	return result;
}

const string SCCompiler::readSchema(Tokeniser & tokeniser, const SCProgram & prog) {
	// Preview next token
	string token = tokeniser.PreviewNextToken(true);
	// If compression
	if (token == "[") {
		// Delete "[" from buffer
		tokeniser.GetNextToken(true);
		// Read the expression
		string to_compress = "", word;
		for (unsigned int i=1; i<=3; ++i) {
			word = readSystemWord(tokeniser,prog);
			if (i<3) {
				token = tokeniser.GetNextToken(true);
				if (token != ",") {
					this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : \"" << token << "\" found where expecting separator \",\"" << endl;
					this->addError();
				}
			}
			else tokeniser.AssertNextToken("]");
			to_compress += word;
		}
		return this->compress(to_compress,prog.codes,tokeniser.GetLine());
	}
	// If no compression
	else
		return readSystemWord(tokeniser, prog);
}

const string SCCompiler::readSystemWord(Tokeniser & tokeniser, const SCProgram & prog) {
	bool read = true, sep = false;
	string word = "", token;
	while (read) {
		token = tokeniser.PreviewNextToken(true);
		// If a separator is expected
		if (sep) {
			// If OR
			if (token == "|") {
				// Delete "|" from buffer
				tokeniser.GetNextToken(true);
				// Next token should be a word
				sep = false;
			}
			// If special symbol, end of word, thus end of reading
			else if (tokeniser.IsSpecialSymbol(token))
				read = false;
			// Else exception
			else {
				this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : OR separator \"|\" expected" << endl;
				this->addError();
			}
		}
		// Else if a word is expected
		else {
			// If the token is a special character then exception
			if (tokeniser.IsSpecialSymbol(token)) {
				this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : \"" << token << "\" found where expecting a word" << endl;
				this->addError();
			}
			// If the token is a label
			if (this->labels.find(token) != this->labels.end()) {
				// Delete token from buffer
				tokeniser.GetNextToken(true);
				// Replace by its value
				token = this->labels[token];
			}
			// Otherwise read a label value
			else token = this->readLabelValue(tokeniser, prog);
			/*
			// Check it is a ternary string
			if (!this->isTernaryWord(token)) {
				this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : " << token << " found where expecting a ternary string {0,1," << WILDCARD << "}*" << endl;
				this->addError();
			}*/	
			// Sub-Word melting with the whole word (OR operation)
			if (word == "") word = token;
			else word = this->meltWords(word,token,tokeniser);
			// Next token should be a separator
			sep = true;
		}
	}
	return word;
}

const int SCCompiler::getDeclaredSystemIdx(const string & word, const SCProgram & prog) const {
	for (unsigned int i=0; i<prog.sys_decl.size(); ++i)
		 if (word == prog.sys_decl[i].GetFirst())
		 	return i;
	return -1;
}

const int SCCompiler::getSystemInstanceIdx(const string & word, const SCProgram & prog) const {
	for (unsigned int i=0; i<prog.sys_inst.size(); ++i)
		 if (word == prog.sys_inst[i].GetSecond())
		 	return i;
	return -1;
}

const string SCCompiler::compress(const string & word, const vector< Couple<char,string> > & codes, const unsigned int line) {
	// Check there is at least one code
	if (codes.size() == 0) {
		this->ostream << "Code error @ line " << line << " : No code is defined to compress \"" << word << "\"" << endl;
		this->addError();
	}
	// Check word size is compatible with the code
	unsigned int size = codes[0].GetSecond().size();
	if (word.size()%size != 0) {
		this->ostream << "Compression error @ line " << line << " : The code is using " << word.size() << " character(s) long sequences, any word to compress must be a multiple of " << word.size() << endl;
		this->addError();
	}
	// Compress
	string result = "", seq;
	for (unsigned int i=0; i<word.size(); i+=size) {
		seq = word.substr(i,3);
		unsigned int idx = 0;
		while ((idx < codes.size()) && (codes[idx].GetSecond() != seq)) ++idx;
		if (idx == codes.size()) {
			this->ostream << "Compression error @ line " << line << " : No code defined to code the sequence \"" << seq << "\" in the word \"" << word << "\"" << endl;
			this->addError();
		}
		result += codes[idx].GetFirst();
	}
	return result;
}

const string SCCompiler::meltWords(const string & word1, const string & word2, const Tokeniser & tokeniser) {
	// Check size
	if (word1.size() != word2.size()) {
		this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : \"" << word1 << "\" and \"" << word2 << "\" must be same size" << endl;
		this->addError();
	}
	// Melt
	string result = word1;
	for (unsigned int i=0; i<word1.size(); ++i) {
		if (word2[i] != WILDCARD) {
			if (word1[i] == WILDCARD)
				result[i] = word2[i];
			else {
				this->ostream << tokeniser.GetName() << " : Syntax error @ line " << tokeniser.GetLine() << " : \"" << word1 << "\" and \"" << word2 << "\" both define the character at position " << (i+1) << endl;
				this->addError();
			}
		}
	}
	// Return
	return result;
}

const vector< Couple<char,string> > SCCompiler::readCharMap(
	const string & name,
	const string & map_dir
) {
	vector< Couple<char,string> > map;
	// Open the stream
	string n = name;
	if ((n.size() > 0) && (n[0] == '"')) n = n.substr(1,n.size()-1);
	if ((n.size() > 0) && (n[n.size()-1] == '"')) n = n.substr(0,n.size()-1);
	// Input stream
	ifstream file(string(map_dir + "/" + n).c_str());
	if (!file.is_open())
		throw IOFileException("Cannot find or open character map \"" + n + "\" in the directory \"" + map_dir + "\"");
	// Tokeniser
	Tokeniser tokeniser(file);
	// Set up rules
	tokeniser.AddSkipSymbol(" ");
	tokeniser.AddSkipSymbol("\t");
	tokeniser.AddSkipSymbol("\n");
	tokeniser.AddCommentLineSymbol("//");
	// Get token : needs to be a first level one
	Couple<char,std::string> code;
	string token;
	try {
		while (!file.eof()) {
			token = tokeniser.GetNextToken(false);
			if (!file.eof()) {
				if (token.size() != 1) {
					this->ostream << "Char map reading error : Encoding token must be one character long but \"" << token << "\" is " << token.size() << " long" << endl;
					this->addError();
				}
				code.First() = token[0];
				token = tokeniser.GetNextToken(true);
				code.Second() = token;
				map.push_back(code);
			}
		}
		file.close();
	}
	catch(exception & e) {
		file.close();
		throw;
	}
	return map;
}

void SCCompiler::addError() {
	++this->nb_comp_errors;
	if (this->nb_comp_errors >= this->max_comp_errors)
		throw ParsingException("Too many errors, compilation aborting.");
}

const bool SCCompiler::checkDAC(vector<unsigned int> & path, const vector< vector<unsigned int> > & graph) const {
	unsigned int idx = path[path.size()-1];
	const vector<unsigned int> & sub = graph[idx];
	for (unsigned int i=0; i<sub.size(); ++i) {
		vector<unsigned int>::const_iterator itor;
		if (find(path.begin(),path.end(),sub[i]) != path.end()) {
			// Push current item before returning to give the DAT in the path
			path.push_back(sub[i]);
			return false;
		}
		path.push_back(sub[i]);
		if (!this->checkDAC(path,graph))
			return false;
		path.pop_back();
	}
	return true;
}

const bool SCCompiler::isTernaryWord(const std::string & w) const {
	for (unsigned int i=0; i<w.size(); ++i)
		if ((w[i] != '0') && (w[i] != '1') && (w[i] != WILDCARD))
			return false;
	return true;
}

const bool SCCompiler::isBinaryWord(const std::string & w) const {
	for (unsigned int i=0; i<w.size(); ++i)
		if ((w[i] != '0') && (w[i] != '1'))
			return false;
	return true;
}

void SCCompiler::WriteCMacros(const string & filename) const {
	ofstream file(filename.c_str(),ios::out|ios::binary);
	if (!file.is_open())
		throw IOFileException("Cannot write to file \"" + filename + "\"");
	try {
		string name = filename;
		if (name.find_last_of('/') != string::npos)
			name = name.substr(name.find_last_of('/')+1);
		if (name.find_last_of('\\') != string::npos)
			name = name.substr(name.find_last_of('\\')+1);
		if (name.find_first_of('.') != string::npos)
			name = name.substr(0,name.find_first_of('.'));
		file << "/*" << endl;
		file << " * This header file provides the macros corresponding to the labels defined in" << endl;
		file << " * SC source file: " << name.substr(0,name.find_last_of('_')) << " ." << endl;
		file << " */" << endl << endl;
		file << "#ifndef __" << name << "_H__" << endl;
		file << "\t#define __" << name << "_H__" << endl << endl;
		map<string,string>::const_iterator itor;
		for (itor = this->labels.begin(); itor != this->labels.end(); ++itor) {
			string label = itor->second;
			unsigned int offset = 0;
			while (!label.empty() && (label[0] == WILDCARD)) {
				label.erase(label.begin());
				++offset;
			}
			label = label.substr(0,label.find_first_of(WILDCARD));
			if (!label.empty()) {
				file << "\t/* " << itor->first << " */" << endl;
				//file << "\t#define " << itor->first << "_STR \"" << label << "\"" << endl;
				file << "\t#define " << itor->first << "_OFF " << offset << endl;
				file << "\t#define " << itor->first << "_LGH " << label.length() << endl;
				file << endl;
			}
		}
		file << "#endif" << endl;
		// Close file
		file.close();
	}
	catch (exception & e) {
		file.close();
		throw;
	}
}

////////////////////////////////////////////////////////////////////////////////
// Closing Namespaces
////////////////////////////////////////////////////////////////////////////////

}

////////////////////////////////////////////////////////////////////////////////
