#include "Lexer.h"
#include <sstream>

using namespace std;

Token::Token() {}
Token::Token(string type) : type(type) {}
Token::Token(unsigned int row, unsigned int col) : row(row), column(col) {}

Lexer::Lexer(const char *programfilename, const char *dfaFilename) : currentRow(1), currentColumn(0), programStream(programfilename), characterDfa(dfaFilename) {
	if (programStream.fail()) throw ProgramFileNotFound();
}
Lexer::~Lexer() { programStream.close(); }

bool Lexer::IsLineTerminator(Token t) {
	return (t.type == "lineterminator" || t.type == "lineterminatorcarrier");
}

bool Lexer::IsTokenValid(const Token &t) {
	for (unsigned int i = 0; i < t.lexeme.size(); ++i) {
		if (((unsigned char)t.lexeme[i]) > 127) return false;
	}

	return true;
}

Token Lexer::PopNext() {
	Token result = Token(currentRow, currentColumn);

	//If file is empty (ie. EOF is next, but not seen) must do an operation for the stream to realise it's at EOF
	programStream.peek();
	if (programStream.eof()) {
		result.type = "EOF";
	} else {
		//Loop until when it's in an accepting state and can't find anymore valid transitions
		string &lexref = result.lexeme;
		for (char input = programStream.peek(); !programStream.eof() && characterDfa.TransitionOn(string(1, input)); input = programStream.peek()) {
			lexref.push_back(programStream.get());
			++currentColumn;
		}
		result.type = characterDfa.CurrentStateName();

		if (IsLineTerminator(result)) {
			++currentRow;
			currentColumn = 0;
		}
	}

	//Does an initial look at the token to see if all the characters are in range and some simple weeding
	if (!IsTokenValid(result)) {
		result.type = "invalidtoken";
		return result;
	}

	return result;
}

