
#ifndef TOKZRGENTR_PARSING_PARSER_H
#define TOKZRGENTR_PARSING_PARSER_H
#include"tokenizer.h"
#include"syntax_trees.h"
#include<vector>
#include"utils.h"
//#	Parser for .tgs(tokenizer generator script)
//# .tgs file format as follow.(only support UTF-8)
//#		[:Tokenizer_Name_1:] 
//#		{
//#			[Token Identifier_1] -> |=Regex_1=| [Rank_1];
//#			[Token Identifier_2] -> |=Regex_2=|;	//this is ok.
//#					...
//#			[Token Identifier_n] -> |=Regex_n=| [Rank_n];
//#		}
//#	Grammer is
//#		[decl] -> [token_identifier] : [regex]
//#		[stmt] -> [decl] ([Rank])?;
//#		[stmts] -> [stmt]+
//#		[roots] -> [Tokenizer_Name]{[stmts]}
namespace TokzrGentr
{
	class TokzrGentrParser
	{
	public:
		std::shared_ptr<root> Parse(const std::u16string &text)
		{
			_tokzr.SetStr(text);
			return do_root();
		}
		bool IsFail() const
		{
			return _is_fail;
		}
		TokzrGentrParser()
			:_is_fail(false) {}
	private:
		std::shared_ptr<decl> do_decl()
		{
			auto tokenid_contents = match(TOKEN_IDENTIFIER);
			match(ARROW_OP);
			auto regex_contents = match(REGULAR_EXPRESSION);

			auto token_idstr = tokenid_contents.substr(1, tokenid_contents.length() - 2);
			auto regex_str = regex_contents.substr(2, regex_contents.length() - 4);
			auto tokid = std::make_shared<token_identifier>(token_idstr);
			auto reg = std::make_shared<regex>(regex_str);
			return std::make_shared<decl>(tokid, reg);
		}
		std::shared_ptr<stmt> do_stmt()
		{
			auto dcl = do_decl();
			std::u16string rank;
			switch (_current_tok._id)
			{
			case RANK:
				rank = match(RANK);
				break;
			default:
				break;
			}
			match(DELIMITER);
			if (rank.length())
			{
				auto num_str = rank.substr(1, rank.length() - 2);
				auto rank_num = to_num(num_str);
				return std::make_shared<stmt>(dcl, rank_num);
			}
			return std::make_shared<stmt>(dcl, 0);
		}
		std::shared_ptr<stmts> do_stmts()
		{
			auto stms = std::make_shared<stmts>();
			while (_current_tok._id != RIGHT_BRACE)
				stms->add_stmt(do_stmt());
			return stms;
		}
		std::shared_ptr<root> do_root()
		{
			while ((_current_tok = _tokzr.NextTok())._id == COMMENT)
			{
				//#	skip the comments
			}
			auto name = match(TOKENIZER_NAME);
			auto name_str = name.substr(2, name.length() - 4);
			match(LEFT_BRACE);
			auto all_stmt = do_stmts();
			match(RIGHT_BRACE);
			return std::make_shared<root>(name_str, all_stmt);
		}
		inline std::u16string match(TokzrGentrTokenTag tag)
		{
			auto old_tok = _current_tok;
			if (!IsFail() && old_tok._id == tag)
			{
				while ((_current_tok = _tokzr.NextTok())._id == COMMENT)
				{
					//#	spik the comment tokens....
				}
				_is_fail = _tokzr.IsFail();
			}
			else
			{
				_is_fail = true;
				printf("Parsing error!");
				exit(1);
			}
			return old_tok._contents;
		}
		TokzrGentrTokenizer _tokzr;
		TokzrGentrTokenizer::TokzrGentrToken _current_tok;
		bool _is_fail;
	};
}

#endif // !TOKZRGENTR_PARSING_PARSER_H
