/**
 *  Project Vorpal
 *  \file Lexer.h
 *  \author Russ Webb
 *
 *  \brief Contains the Lexer and Tokenizer classes used to segment the lanuage to be interpreted by the compiler.
 *
 */

#ifndef LEXER_H
#define LEXER_H

#include <string.h>
#include <ctype.h>
#include <string>
#include <vector>
#include <iostream>

#include "Common.h"

namespace Vorpal{

	typedef struct{
		const char *str;
		int type;
	} UniqueToken;

	class LexerState;

	typedef bool (*MatchFunction)(const char *data, char nextChar);

	bool contains_char(const char *str, char nextChar);
	bool alpha(const char *str, char nextChar);
	bool otherwise(const char *str, char nextChar);
	bool end_of_file(const char *str, char nextChar);
	
/**  \class StateRule 
 *
 *   \brief StateRules are loaded onto each of Vorpal's LexerStates' rule stacks. They define the tokenizer's behaviour depending on the input it is reading.
 */
    
	class StateRule{
	public:
		StateRule(MatchFunction match, const char *data, LexerState *nextState, int tokenTag, int actionFlags)
		: match(match), data(data), nextState(nextState), tokenTag(tokenTag), actionFlags(actionFlags){ 
			Assert( (actionFlags & defer) == 0 || (actionFlags & discard) == 0 ); // defer and discard are mutually exclusive options
		}
		
		~StateRule(){ }
		
		bool Match(char c) const{
			if(actionFlags & invert){ return !match(data, c); }
			else{                     return  match(data, c); }
		}

		LexerState * operator()(int *currentTag, int *actionFlagsPtr) const{
			*actionFlagsPtr = actionFlags;
			*currentTag = tokenTag;
			return nextState;
		}
		
		int Tag() const{ return tokenTag; }
		
		enum{
			defer = 1,
			discard = 2,
			done = 4,
			invert = 8,
		};
		
	private:
		MatchFunction match;
		const char *data;
		LexerState *nextState;
		int tokenTag;
		int actionFlags;
	};

/**  \class Token 
 *
 *    \brief  Wrapper to contain information about a single piece of parsed code.
 */
    
	class Token{
	public:
		Token(uint32_t lineNumber, const char *s = "", uint32_t length = 0, int type = 0) 
		: str(s, length), type(type), lineNumber(lineNumber){ }
		
		void Print() const{
			cout << str << endl;
		}

		void Debug() const{
			printf("%i[%i] = '", type, int(str.length()));
			cout << str;
			printf("' on line %i\n", lineNumber);
		}
		
		bool Is(int t, const char *s) const{
			return type == t && str == s;
		}

		string str;
		int type;
		uint32_t lineNumber;
	};

/**  \class LexerState 
 *   
 *   \brief LexerStates are defined for any type of token that the compiler may encounter (see compiler.cpp)
 */
    
	class LexerState{
	public:
		LexerState(const char *name) : uniques(0), name(name){ }
		virtual ~LexerState(){ }
		
		void operator()(const char *str, LexerState &nextState, int actionFlags = 0);		
		void operator()(const char *str, int tokenTag, int actionFlags = 0);
		void operator()(MatchFunction m, LexerState &nextState, int actionFlags = 0);		
		void operator()(MatchFunction m, int tokenTag, int actionFlags = 0);
		
		void operator()(const UniqueToken *uniques){ this->uniques = uniques; }

		const LexerState *Step(char c, int *currentTag, int *actionFlags, bool trace = false) const;
			
		void ApplyUnique(Token *t) const;
		
	private:	
		const UniqueToken *uniques;
		vector< StateRule > rules;
		const char *name;
	};


/**  
 * \class Tokenizer
 *
 * \brief Parses the language into tokens to be interpreted by the compiler (variable names and operands).
 */
    
	class Tokenizer{
	public:
		Tokenizer(){ }
		
		void Clear(){
			tokens.clear();
		}

/**
 * \fn Process()
 *
 * \brief Tokenizes an entire source file, pointed to by *source
 */		
		void Process(const char *source, uint32_t length, const LexerState &startState, bool trace = false);
		
		void Remove(uint32_t index){
			Assert(index < tokens.size());
			tokens.erase(tokens.begin() + index);
		}
		
		void Insert(uint32_t index, const Token &t){
			Assert(index <= tokens.size());
			tokens.insert(tokens.begin() + index, t);
		}
		
		uint32_t Size() const{ return tokens.size(); }
		
		const Token & operator[](uint32_t index) const{
			Assert(index < tokens.size());
			return tokens[index];
		}
		
	private:
		vector< Token > tokens;

		Token Tokenize(const char **start, const char *end, const LexerState &startState, uint32_t lineNumber, bool trace);
	};
}

#endif

