/*  This file is part of -_-.

    -_- is free software: you can redistribute it and/or modify
    it under the terms of the GNU General Public License as published by
    the Free Software Foundation, either version 3 of the License, or
    (at your option) any later version.

    -_- is distributed in the hope that it will be useful,
    but WITHOUT ANY WARRANTY; without even the implied warranty of
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    GNU General Public License for more details.

    You should have received a copy of the GNU General Public License
    along with -_-.  If not, see <http://www.gnu.org/licenses/>.

    Copyright 2012-2013 Oliver Katz */

#ifndef __OPAL_LEXER_H
#define __OPAL_LEXER_H

#include <iostream>
#include <string>
#include <vector>

using namespace std;

//namespace sleepyface::opal
namespace sleepyface
{
	namespace opal
	{
		//class Token: a single token with line and column metadata included
		class Token
		{
		private:
			int line;
			int col;
			string content;

		public:
			Token(); //methodof Token: initializer
			Token(string s, int l, int c); //methodof Token: initializer with meta/data
			int getLine(); //methodof Token: gets line number
			int getColumn(); //methodof Token: gets column number
			string get(); //methodof Token: gets the string value of the token
		};

		//class Lexer: splits a string up into tokens based on rules
		class Lexer
		{
		private:
			string delims;
			string ignores;
			string beginQuote;
			string endQuote;

		public:
			Lexer(); //methodof Lexer: initializer
			Lexer(string d, string ig, string bq, string eq); //methodof Lexer: initializer with rules
			vector<Token> lex(string page, string file); //methodof Lexer: run lexer
		};
	}
}

/// The Opal lexer has four rules: deliminators, ignorables, beginning quotes, and ending quotes. If you want to tokenize a page such as:
/// {{string x = "hello, world";}}
/// you need to have a few deliminators (characters that divide tokens). For this page we need the deliminators " ", "=", and ";". You also need the ignorables; all deliminators, by default, do not simply separate tokens, but add themselves in as tokens themselves. Any deliminators that also appear in the ignorables list won't do this. To tokenize this page we also need to handle quotes. Our beginning quote characters are '"' and our ending quote characters are '"'. The result should be:
/// {{tokens: 'string', 'x', '=', '"hello, world"', ';'}}

#endif