
#include "lexer.hpp"
#include "regex_prefix_matcher.hpp"
#include <string>
#include <list>

#include <iostream>

using namespace std;

TokenStream Lexer::getTokenStream(const string &input_line)
{	
	TokenStream tokens;
	string str(input_line);
	size_t curr_pos = 0;
	bool found = false;

	while (!str.empty()) {
		found = false;

		if (Matcher::prefixMatch(WHITESPACE_PATTERN, str)) {
			size_t l = Matcher::getLastMatchLength();
			str.erase(0, l);
			curr_pos += l;
		}

		if (str.empty()) {
			break;
		}

		for (int i = 0; i < LEX_NUMBER; i++) {
			if (Matcher::prefixMatch(LEXEMES[i].pattern, str)) {
				size_t l = Matcher::getLastMatchLength();
				Token t;

				if (LEXEMES[i].type == FLOAT_NUMBER and Matcher::prefixMatch(LEXEMES[4].pattern, str)) {
					int l2 = Matcher::getLastMatchLength();
					if (l == l2) {
						t.type = LEXEMES[4].type;
						t.lexeme = str.substr(0, l);
						tokens.push_back(t);
						curr_pos += l;	
						str.erase(0, l);	
						found = true;		
						break;
					}
				}

				if (LEXEMES[i].type == IDENTIFIER and Matcher::prefixMatch(LEXEMES[2].pattern, str)) {
					int l2 = Matcher::getLastMatchLength();
					if (l == l2) {
						t.type = LEXEMES[2].type;
						t.lexeme = str.substr(0, l);
						tokens.push_back(t);
						curr_pos += l;	
						str.erase(0, l);			
						found = true;
						break;
					}
				}

				t.type = LEXEMES[i].type;
				t.lexeme = str.substr(0, l);

				found = true;
				tokens.push_back(t);
				curr_pos += l;	
				str.erase(0, l);
				break;			
			}
		}

		if (!found) {
			throw Lexer::UnknownLexemeException(curr_pos);
		}
	}
	return tokens;
}