/*
 *  Lexer.cpp
 *  vorpal
 *
 *  Created by rwebb on 26/03/08.
 *
 */

#include "Lexer.h"

namespace Vorpal{

	bool contains_char(const char *str, char nextChar){
		return strchr(str, nextChar) != 0;
	}

	bool alpha(const char *str, char nextChar){
		return isalpha(nextChar);
	}

	bool otherwise(const char *str, char nextChar){
		return true;
	}

	bool end_of_file(const char *str, char nextChar){
		return nextChar == EOF;
	}

void LexerState::operator()(const char *str, LexerState &nextState, int actionFlags){
	StateRule r(contains_char, str, &nextState, 0, actionFlags);
	rules.push_back(r);
}

void LexerState::operator()(const char *str, int tokenTag, int actionFlags){
	StateRule r(contains_char, str, this, tokenTag, actionFlags | StateRule::done);
	rules.push_back(r);
}

void LexerState::operator()(MatchFunction m, LexerState &nextState, int actionFlags){
	StateRule r(m, 0, &nextState, 0, actionFlags);
	rules.push_back(r);
}

void LexerState::operator()(MatchFunction m, int tokenTag, int actionFlags){
	StateRule r(m, 0, this, tokenTag, actionFlags | StateRule::done);
	rules.push_back(r);
}

const LexerState *LexerState::Step(char c, int *currentTag, int *actionFlags, bool trace) const{
	loop(i, 0, rules.size()){
		if(rules[i].Match(c)){
			if(trace){
				printf("%s[%i] rule matches for '%c'\n", name ?  name : "??", i, c);
			}
			return rules[i](currentTag, actionFlags);
		}
	}
	Failure(ERR_TOKEN_MATCHED_NO_RULES);
	return this;
}
	
void LexerState::ApplyUnique(Token *t) const{
	for(int i = 0; uniques && uniques[i].str; ++i){
		if(strlen(uniques[i].str) == t->str.length() && 0 == strncmp(uniques[i].str, t->str.c_str(), t->str.length())){
			t->type = uniques[i].type;
			break;
		}
	}
}

void Tokenizer::Process(const char *source, uint32_t length, const LexerState &startState, bool trace){
	uint32_t lineNumber = 1;
	const char *here = source;
	const char *end = source + strlen(source);
	
	while(here < end){
		const char *start = here;
		Token t = Tokenize(&here, end, startState, lineNumber, trace);
		
		// no input consumed == no progress => infinite loop
		if(here == start){
			break;
		}

		// token found
		tokens.push_back(t);
		//cout << t.type << ":" << t.str << endl;
		if(trace){
			t.Debug();
			char buffer[40];
			strncpy(buffer, here, 40);
			buffer[40 - 1] = 0;
			printf(">> %s\n", buffer);
		}
		if(strchr(t.str.c_str(), '\n')){
			lineNumber += 1;
		}
	}
}

Token Tokenizer::Tokenize(const char **start, const char *end, const LexerState &startState, uint32_t lineNumber, bool trace){
	const char *here = *start;
	Token result(lineNumber);
	const LexerState *active = &startState;
	int currentTag = 0;
	bool halt = false;
	const char *first = 0;
	const char *last = 0;
	int actionFlags;
	
	while(!halt){
		char c;
		if(here < end){ c = here[0]; }
		else{ c = EOF;      }
		
		active = active->Step(c, &currentTag, &actionFlags, trace);
		halt = actionFlags & StateRule::done;
		
		if( (actionFlags & StateRule::defer) || (actionFlags & StateRule::discard) ){
			Assert(first == 0 || halt); // do not ignore characters after start unless we are halting
			if( (actionFlags & StateRule::discard) && (here < end) && (c != EOF) ){
				here += 1;
			}
		}
		else{
			if(!first){ 
				first = here; 
			}
			last = here;
			if(here < end){
				here += 1;
			}
		}
	}
	if(first){ // might have a zero length token
		result.str = string(first, last - first + 1);
	}
	result.type = currentTag;
	active->ApplyUnique(&result);
	*start = here; 
	return result;
}

}


