#ifndef _PARSE_IMPL
#define _PARSE_IMPL
#include "Dlex_parse.h"

using namespace dlex_parse;
/* implement class Parse {
		class Err_input  {};
		class Err_reg {};
		class Out_of_mem {};
		class Undef_err {};
*/
namespace parse_token {
	enum Red_prod {
		L_D_R_C=1, L_D_R_P, L_D_R, L_D, L_R_C, L_R_P, L_R, L_P, D_U_N_R, D_U_N, D_N_R, D_U_R, D_U, D_N, D_R,
		R_R_P_A, R_P_A, U_U_C, U_C, U_U, U_, N_I_C, N_I, N_N_I_C, N_N_I, R_R_I_P, R_I_P, A_C, A_
	};
	enum Red_var { Lex, Def, Rules, Usr_def, New_item, Reg_def, Action };
}
#include <stack>
#include <fstream>
#include "Dfa.h"
#include "Parse_code.h"
#include "Dfa_base_loc.h"
#include "Algorithm.h"
#include "Buf_token.h"
#include "Parse_token.h"
#include "Parse_tab.h"
#include "Build_dfa_tree.h"
#include "Write_dlex_header.h"

namespace parse_loc {
	void get_ret_type(mylib::Buffer &from, std::string &name);
}
Parse::Parse(std::istream &in_file, std::ostream &out, std::ostream &err){
	using namespace parse_token;
	using namespace parse_token;
	using namespace parse_code;
	using namespace std;
	int err_num=0;			// 0 label ok
	stack<string> back_id;		// copy some useful tokens
	stack<automata::Dfa *> back_dfa;
	Item_code item_code_pos;
	stack<size_t> states;
	std::string empty_str;
	int par_count=0;				// to count the partition number
	int dfa_num=1;					// to count dfa number
	dlex::Reg_def defs;
	int new_dfa_num=0;			// to count new_item
	//	//////////////////////////////////////////////////////////
	std::string ret_type="int";				// the default return type is 'int'
	parse_code::Code_pos end_code_pos, usr_data_pos;		// the end code position
	bool end_code_exist=false;				// to label whether the end code is exist
	bool next_token_finished=false;
	////////////////////build output file//////////////////////////////////////////////////////////
	const char _data_file_name[] = "c:\\dlex\\code\\data.txt";
	std::ofstream data(_data_file_name);
	const char _dlex_header_file[] = "dlex.h";
	std::ofstream out_head_file(_dlex_header_file);
	if( !data || !out_head_file ) {
		err << "Build dlex.h file failed.\n";
		throw Err_build();
	}
	////////////////////////////////////////////////////////////////////////////
	try {
		parse_loc::init_dlex_header(out_head_file);				// write to dlex.h file
		mylib::Buffer in(in_file);
		mylib::Buf_token<parse_token::Token, Parse_token> token(in);
		parse_token::Parse_token token_id = token.get();
		states.push(0);
		for( ; ; ) {
			if( token_id == parse_token::Undef ) {
				err << "Undefined character: " << token.token() << '\n';
				throw Err_input();
			} else if( token_id == parse_token::RETURN_TYPE || token_id == parse_token::END_ACTION ) {
				err << token.token();
				err << ": in a wrong position.\n";
				throw Err_input();
			}	
			size_t cur_state = states.top();
			int action = parse_tab::actions[cur_state][token_id];		// get action
			if( action == parse_tab::Acc ) {
				if( !next_token_finished ) {
					parse_code::write_next_token_left(in, out, ((end_code_exist) ? &end_code_pos : 0));
					next_token_finished=true;
				}
				data.close();
				std::ifstream data_file(_data_file_name);
				parse_code::write_data_init(data_file, out, dfa_num-1);	//write left code
				parse_loc::write_dlex_header(out_head_file, ret_type);	// write dlex.h file
				return;
			} else if( action == parse_tab::Err ) {	// error
				err << "Some error in your input file at \'" << token.token() << "\'\n";
				throw Err_input();
			} else if ( action >= 0 ) {	// shift
				switch(token_id) {
				case parse_token::Par_sym : 	// the %%
					par_count++;			/// add the number of partition
					if( par_count == 1 ) {		// %% rules;
						//////////////////////////////////////////////////////////////////////////////////////////
						usr_data_pos = parse_code::get_usr_data_pos(in);				// get data position
						token_id = token.get();				//exist return_type?
						if( token_id == parse_token::RETURN_TYPE ) {		// get the return type
							ret_type.clear();
							parse_loc::get_ret_type(in, ret_type);
						} else {
							token.push(token.token(), token_id);
						}
						token_id = token.get();					// exists end action ?
						if( token_id == parse_token::END_ACTION ) {
							end_code_exist=true;
							end_code_pos = parse_code::get_end_code_pos(in);
							token_id = token.get();			// get the next token
						}
						if( token_id != parse_token::End ) {		// %% rules
							in.ret(token.token().length());		// unget
							std::string name="dfa";
							mylib::utos(name, dfa_num);
							automata::Dfa *dfa = new automata::Dfa(in, defs, std::cerr, dfa_num);
							dfa_num++;
							dfa->write(data, name);
							back_dfa.push(dfa);
							token.push(empty_str, parse_token::Pattern);	// push a pattern 
						} else				// %% end
							token.push(token.token(), token_id);
						parse_code::write_static_code(out);			// write dlex_loc
						if( usr_data_pos.start >= usr_data_pos.end )
							parse_code::write_next_token(in, out, ret_type);		// write next_token function
						else
							parse_code::write_next_token(in, out, ret_type, &usr_data_pos);
					} else if (par_count == 2) {			// %% rules %%
						parse_code::write_next_token_left(in, out, ((end_code_exist) ? &end_code_pos : 0));
						next_token_finished=true;
						parse_code::copycode(in, out);			// copy code
						token.push(empty_str, parse_token::Code);		// %% rules %% code
					} else {
						err << "Error: too much partition in your file.\n";
						throw Err_input();
					}
					states.push(action);			// push action
					break;
				case parse_token::Ucode_start:			// %{
					parse_code::copycode(in, out_head_file, "%}");		// %{ code %}
					token.push(empty_str, parse_token::Code);
					states.push(action);
					break;
				case parse_token::Item_start:	{	// id :{ code }
					std::string &id = back_id.top();			
					parse_code::Code_pos pos = parse_code::item_code(in);				//??
					item_code_pos[id] = pos;				// set position
					token.push(empty_str, parse_token::Code);
					back_id.pop();			// delete it
					states.push(action);
					break;				}
				case parse_token::Lef_big :	{	//action -> { code }			// ??
					automata::Dfa *dfa = back_dfa.top();
					parse_code::action_code(in, out, dfa->attr(), item_code_pos);
					token.push(empty_str, parse_token::Code);
					back_dfa.pop();
					delete dfa;
					states.push(action);
					break;				}	
				case parse_token::ID:			// ID : :{ CODE } || reg_def : id pattern
					back_id.push(token.token());		// save the identifier
					token_id = token.get();			// to detect new item or regular definition
					if( token_id == parse_token::Item_start ) {		/// id :{ code }
						token.push(token.token(), parse_token::Item_start);
					} else {			// id 'pattern'
						in.ret(token.token().length());				// return back
						dfa_base_str::Dfa_tree &tree = automata::build_dfa_tree(in, defs, err);
						if(tree.start || tree.start_line) {
							err << "The the regular definition can't have start condition.\n";
							throw Err_input();
						}
						if( tree.suffix ) {
							err << "The regular definition can't have suffix.\n";
							throw Err_input();
						}
						defs[back_id.top()] = tree.entity;
						back_id.pop();			// delete it
						delete &tree;		// delete the block, not the entire tree
						token.push(empty_str, parse_token::Pattern);
					}
					states.push(action);
					break;
				case parse_token::Rig_big :				// to test whether need another 'pattern'
					if( par_count == 1 ) {
						token_id = token.get();
						if( token_id == parse_token::END_ACTION ) {// exists end action ?
							end_code_exist=true;
							end_code_pos = parse_code::get_end_code_pos(in);
							token_id = token.get();		//get the next token
						}
						if( token_id == parse_token::End || token_id == parse_token::Par_sym )	// needn't
							token.push(empty_str, token_id);
						else {
							in.ret(token.token().length());
							std::string name="dfa";
							mylib::utos(name, dfa_num);
							automata::Dfa *dfa = new automata::Dfa(in, defs, cerr, dfa_num);
							dfa_num++;
							dfa->write(data, name);
							back_dfa.push(dfa);
							token.push(empty_str, parse_token::Pattern);
						}
					}
					states.push(action);
					break;
				default :		// normal shift
					states.push(action);
					break;
				}
				token_id = token.get();			// get next token
			} else if ( action < 0 ) {	// reductioin
				int red_prod = -action;
				int red_var;
				int del_state_num = 0;
				switch(red_prod) {
				case parse_token::L_D_R_C :		//lex -> def %% rules %% code
					red_var = parse_token::Lex;
					del_state_num = 5;
					break;
				case parse_token::L_D_R_P :		// lex -> def %% rules %%
					red_var = parse_token::Lex;
					del_state_num = 4;
					break;
				case parse_token::L_D_R :			// lex -> def %% rules
					red_var = parse_token::Lex;
					del_state_num = 3;
					break;
				case parse_token::L_D :			// lex -> def %%
					red_var = parse_token::Lex;
					del_state_num = 2;
					break;
				case parse_token::L_R_C :			// lex -> %% rules %% code
					red_var = parse_token::Lex;
					del_state_num = 4;
					break;
				case parse_token::L_R_P :			// lex -> %% rules %%
					red_var = parse_token::Lex;
					del_state_num = 3;
					break;
				case parse_token::L_R :			// lex -> %% rules
					red_var = parse_token::Lex;
					del_state_num = 2;
					break;
				case parse_token::L_P :			// lex -> %%
					red_var = parse_token::Lex;
					del_state_num = 1;
					break;
				case parse_token::D_U_N_R :		// def : usr_def new_item reg_def
					red_var = parse_token::Def;
					del_state_num = 3;
					break;
				case parse_token::D_U_N :			// def : usr_def new_item
					red_var = parse_token::Def;
					del_state_num = 2;
					break;
				case parse_token::D_N_R:			// def : new_item reg_def
					red_var = parse_token::Def;
					del_state_num = 2;
					break;
				case parse_token::D_U_R :			// def : user_def reg_def
					red_var = parse_token::Def;
					del_state_num = 2;
					break;
				case parse_token::D_U:			// def : user_def
					red_var = parse_token::Def;
					del_state_num = 1;
					break;
				case parse_token::D_N :			// def : new_item
					red_var = parse_token::Def;
					del_state_num = 1;
					break;
				case parse_token::D_R :			// def : reg_def
					red_var = parse_token::Def;
					del_state_num = 1;
					break;
				case parse_token::R_R_P_A: case parse_token::R_P_A://rules : rules pattern action || rules : pattern action
					red_var = parse_token::Rules;
					if( red_prod == parse_token::R_R_P_A )
						del_state_num = 3;
					else
						del_state_num = 2;
					break;
				case parse_token::U_U_C:			// usr_def : usr_def %{ code %}
					red_var = parse_token::Usr_def;
					del_state_num = 4;
					break;
				case parse_token::U_C :			// usr_def : %{ code %}
					red_var = parse_token::Usr_def;
					del_state_num = 3;
					break;
				case parse_token::U_U :			// usr_def : user_def %{ %}
					red_var = parse_token::Usr_def;
					del_state_num = 3;
					break;
				case parse_token::U_ :			// usr_def : %{ %}
					red_var = parse_token::Usr_def;
					del_state_num = 2;
					break;
				case parse_token::N_I_C :			// new_item : id :{ code }
					red_var = parse_token::New_item;
					del_state_num = 4;
					break;
				case parse_token::N_I :			// new_item : id :{ }
					red_var = parse_token::New_item;
					del_state_num = 3;
					break;
				case parse_token::N_N_I_C:		// new_item : new_item id :{ code }
					red_var = parse_token::New_item;
					del_state_num = 5;
					break;
				case parse_token::N_N_I :			// new_item : new_item id :{ }
					red_var = parse_token::New_item;
					del_state_num = 4;
					break;
				case parse_token::R_R_I_P:		// reg_def : reg_def id pattern
					red_var = parse_token::Reg_def;
					del_state_num = 3;
					break;
				case parse_token::R_I_P :			// reg_def : id pattern
					red_var = parse_token::Reg_def;
					del_state_num = 2;
					break;
				case parse_token::A_C:			// action -> { code }
					red_var = parse_token::Action;
					del_state_num = 3;
					break;
				case parse_token::A_ :			// action : { }
					red_var = parse_token::Action;
					del_state_num = 2;
					break;
				default :
					err << "dlex build error\n";
					throw Undef_err();
				}
				while(del_state_num-- > 0)
					states.pop();
				del_state_num=0;
				states.push(parse_tab::gotos[states.top()][red_var]);
			}
		}
	} catch(Err_input) {
		err_num=1;
	} catch(Err_reg) {
		err_num = 2;
	} catch(Err_copy) {
		err << "Miss needed file for dlex.\n";
		err_num = 3;
	} catch(parse_token::Token::Err_input) {
		err << "Input error.\n";
		err_num = 1;
	} catch(std::bad_alloc) {
		err << "run out of memory.\n";
		err_num = 5;
	} catch(...) {
		err_num=4;
	}
	data.close();			// close the data file
	while(!back_dfa.empty()) {
		automata::Dfa *dfa = back_dfa.top();
		delete dfa;
		back_dfa.pop();
	}
	for(dlex::Reg_def::iterator I=defs.begin(); I!=defs.end(); ) {
		delete I->second;
		dlex::Reg_def::iterator T=I;
		I++;
		defs.erase(T);
	}
	if( err_num ) {
		out_head_file.close();
		DeleteFile(_dlex_header_file);
	}
	if( err_num == 1 || err_num == 3 )
		throw Parse::Err_input();
	else if( err_num == 2 )
		throw Parse::Err_reg();
	else if( err_num == 5 ) 
		throw Parse::Out_of_mem();
	else if( err_num == 4 )
		throw Parse::Undef_err();
}
void parse_loc::get_ret_type(mylib::Buffer &in, std::string &name) {
	mylib::skip_ws(in);
	bool type_end=false;
	char c;
	while( !type_end && in.get(c) ) {
		if( c != ';' && c != '\n' )		// test the end
			name += c;
		else
			type_end=true;
	}
}
#endif
