﻿using System;
using System.Collections.Generic;
using Godot.core.io;
using Godot.core;

namespace Godot.modules.gdscript
{
    public partial class GDParser
    {
        private Node _reduce_unary(Node p_node, OperatorNode op, Variant.Operator m_vop)
        {
            bool valid = false;
	        Variant res = new Variant();
	        Variant.evaluate(m_vop, ((ConstantNode)(op.arguments[0])).value,new Variant(),ref res,ref valid);
	        if (!valid) 
            {
		        _set_error("Invalid operand for unary operator");
		        return p_node;
	        }
	        ConstantNode cn = (ConstantNode) _alloc_node(Node.Type.TYPE_CONSTANT);
	        cn.value = res;
	        return cn;
        }

        private Node _reduce_binary(Node p_node, OperatorNode op, Variant.Operator m_vop)
        {
            bool valid = false;
	        Variant res = new Variant();
	        Variant.evaluate(m_vop, ((ConstantNode)(op.arguments[0])).value, ((ConstantNode)(op.arguments[1])).value, ref res, ref valid);
	        if (!valid) 
            {
		        _set_error("Invalid operands for operator");
		        return p_node;
	        }
            ConstantNode cn = (ConstantNode)_alloc_node(Node.Type.TYPE_CONSTANT);
            cn.value = res;
            return cn;
        }

        private void _parse_block(BlockNode p_block, bool p_static)
        {
            int indent_level = _tab_level[_tab_level.Count - 1];

            while (true)
            {
                GDTokenizer.Token token = _tokenizer.get_token();
                if (_error_set)
                    return;

                if (indent_level > _tab_level[_tab_level.Count - 1])
                {
                    p_block.end_line = _tokenizer.get_token_line();
                    return; //go back a level
                }

                switch (token)
                {
                    #region ____code_block_cases____

                    case GDTokenizer.Token.TK_EOF: {
				        p_block.end_line = _tokenizer.get_token_line();
                    } goto case GDTokenizer.Token.TK_ERROR;

			        case GDTokenizer.Token.TK_ERROR: {
				        return; //go back
				        //end of file!
			        } break;

			        case GDTokenizer.Token.TK_NEWLINE: {
				        NewLineNode nl = (NewLineNode) _alloc_node(Node.Type.TYPE_NEWLINE);
				        nl.line = _tokenizer.get_token_line();
				        p_block.statements.Add(nl);

				        if (!_parse_newline()) 
                        {
					        if (!_error_set) 
                            {
						        p_block.end_line = _tokenizer.get_token_line();
					        }
					        return;
				        }
			        } break;

                    case GDTokenizer.Token.TK_CF_PASS: {
				        if (_tokenizer.get_token(1) != GDTokenizer.Token.TK_SEMICOLON 
                        && _tokenizer.get_token(1) != GDTokenizer.Token.TK_NEWLINE ) 
                        {
					        _set_error("Expected ';' or <NewLine>.");
					        return;
				        }
				        _tokenizer.advance();
			        } break;

                    case GDTokenizer.Token.TK_PR_VAR: {
				        //variale declaration and (eventual) initialization
				        _tokenizer.advance();
				        if (_tokenizer.get_token() != GDTokenizer.Token.TK_IDENTIFIER) 
                        {
					        _set_error("Expected identifier for local variable name.");
					        return;
				        }

				        StringName n = _tokenizer.get_token_identifier();
				        _tokenizer.advance();

				        p_block.variables.Add(n); //line?
				        p_block.variable_lines.Add(_tokenizer.get_token_line());


				        //must know when the local variable is declared
				        LocalVarNode lv = (LocalVarNode) _alloc_node(Node.Type.TYPE_LOCAL_VAR);
				        lv.name = n;
				        p_block.statements.Add(lv);

				        Node assigned = null;

				        if (_tokenizer.get_token() == GDTokenizer.Token.TK_OP_ASSIGN) 
                        {
					        _tokenizer.advance();
					        Node subexpr = null;

					        subexpr = _parse_and_reduce_expression(p_block, p_static);
					        if (subexpr == null)
						        return;

					        lv.assign = subexpr;
					        assigned = subexpr;
				        } 
                        else 
                        {
					        ConstantNode c = (ConstantNode) _alloc_node(Node.Type.TYPE_CONSTANT);
					        c.value = new Variant();
					        assigned = c;

				        }
				        
                        IdentifierNode id = (IdentifierNode) _alloc_node(Node.Type.TYPE_IDENTIFIER);
				        id.name = n;


				        OperatorNode op = (OperatorNode) _alloc_node(Node.Type.TYPE_OPERATOR);
				        op.op = OperatorNode.Operator.OP_ASSIGN;
				        op.arguments.Add(id);
				        op.arguments.Add(assigned);
				        p_block.statements.Add(op);

				        _end_statement();
			        } break;

                    case GDTokenizer.Token.TK_CF_IF: {
				        _tokenizer.advance();
				        Node condition = _parse_and_reduce_expression(p_block, p_static);
				        if (condition == null)
					        return;

				        ControlFlowNode cf_if = (ControlFlowNode) _alloc_node(Node.Type.TYPE_CONTROL_FLOW);

				        cf_if.cf_type = ControlFlowNode.CFType.CF_IF;
				        cf_if.arguments.Add(condition);

				        cf_if.body = (BlockNode) _alloc_node(Node.Type.TYPE_BLOCK);
				        p_block.sub_blocks.Add(cf_if.body);

				        if (!_enter_indent_block(cf_if.body)) 
                        {
					        p_block.end_line = _tokenizer.get_token_line();
					        return;
				        }

				        _parse_block(cf_if.body, p_static);
				        if (_error_set)
					        return;
				        
                        p_block.statements.Add(cf_if);

				        while(true) 
                        {

					        while(_tokenizer.get_token() == GDTokenizer.Token.TK_NEWLINE) 
                            {
						        _tokenizer.advance();
					        }

					        if (_tab_level[_tab_level.Count -1] < indent_level) //not at current indent level 
                            {                                 
						        p_block.end_line = _tokenizer.get_token_line();
						        return;
					        }

					        if (_tokenizer.get_token() == GDTokenizer.Token.TK_CF_ELIF) 
                            {
						        if (_tab_level[_tab_level.Count -1] > indent_level) 
                                {
							        _set_error("Invalid indent");
							        return;
						        }

						        _tokenizer.advance();

						        cf_if.body_else = (BlockNode) _alloc_node(Node.Type.TYPE_BLOCK);
						        p_block.sub_blocks.Add(cf_if.body_else);

						        ControlFlowNode cf_else = (ControlFlowNode) _alloc_node(Node.Type.TYPE_CONTROL_FLOW);
						        cf_else.cf_type = ControlFlowNode.CFType.CF_IF;

						        //condition
						        condition = _parse_and_reduce_expression(p_block,p_static);
						        if (condition == null)
							        return;

						        cf_else.arguments.Add(condition);
						        cf_else.cf_type = ControlFlowNode.CFType.CF_IF;

						        cf_if.body_else.statements.Add(cf_else);
						        cf_if = cf_else;
						        cf_if.body = (BlockNode) _alloc_node(Node.Type.TYPE_BLOCK);
						        
                                p_block.sub_blocks.Add(cf_if.body);


						        if (!_enter_indent_block(cf_if.body)) 
                                {
							        p_block.end_line = _tokenizer.get_token_line();
							        return;
						        }

						        _parse_block(cf_else.body, p_static);
						        if (_error_set)
							        return;


					        } 
                            else if (_tokenizer.get_token() == GDTokenizer.Token.TK_CF_ELSE)
                            {
						        if (_tab_level[_tab_level.Count - 1] > indent_level) 
                                {
							        _set_error("Invalid indent");
							        return;
						        }


						        _tokenizer.advance();
						        cf_if.body_else = (BlockNode) _alloc_node(Node.Type.TYPE_BLOCK);
						        p_block.sub_blocks.Add(cf_if.body_else);

						        if (!_enter_indent_block(cf_if.body_else)) 
                                {
							        p_block.end_line = _tokenizer.get_token_line();
							        return;
						        }

						        _parse_block(cf_if.body_else, p_static);
						        if (_error_set)
							        return;

						        break; //after else, exit

					        } else
						        break;
				        }
			        } break;

                    case GDTokenizer.Token.TK_CF_WHILE: {
				        _tokenizer.advance();
				        Node condition = _parse_and_reduce_expression(p_block, p_static);
				        if (condition == null)
					        return;

				        ControlFlowNode cf_while = (ControlFlowNode) _alloc_node(Node.Type.TYPE_CONTROL_FLOW);

				        cf_while.cf_type = ControlFlowNode.CFType.CF_WHILE;
				        cf_while.arguments.Add(condition);

				        cf_while.body = (BlockNode) _alloc_node(Node.Type.TYPE_BLOCK);
				        p_block.sub_blocks.Add(cf_while.body);

				        if (!_enter_indent_block(cf_while.body)) 
                        {
					        p_block.end_line = _tokenizer.get_token_line();
					        return;
				        }

				        _parse_block(cf_while.body, p_static);
				        if (_error_set)
					        return;
				        p_block.statements.Add(cf_while);
			        } break;
                    
                    case GDTokenizer.Token.TK_CF_FOR: {
				        _tokenizer.advance();

				        if (_tokenizer.get_token() != GDTokenizer.Token.TK_IDENTIFIER) 
                        {
					        _set_error("identifier expected after 'for'");
				        }

				        IdentifierNode id = (IdentifierNode) _alloc_node(Node.Type.TYPE_IDENTIFIER);
				        id.name = _tokenizer.get_token_identifier();

				        _tokenizer.advance();

				        if (_tokenizer.get_token() != GDTokenizer.Token.TK_OP_IN) 
                        {
					        _set_error("'in' expected after identicfier");
					        return;
				        }

				        _tokenizer.advance();

				        Node container = _parse_and_reduce_expression(p_block,p_static);
				        if (container == null)
					        return;

				        ControlFlowNode cf_for = (ControlFlowNode) _alloc_node(Node.Type.TYPE_CONTROL_FLOW);

				        cf_for.cf_type = ControlFlowNode.CFType.CF_FOR;
				        cf_for.arguments.Add(id);
				        cf_for.arguments.Add(container);

				        cf_for.body = (BlockNode) _alloc_node(Node.Type.TYPE_BLOCK);
				        p_block.sub_blocks.Add(cf_for.body);

				        if (!_enter_indent_block(cf_for.body)) 
                        {
					        p_block.end_line = _tokenizer.get_token_line();
					        return;
				        }

				        _parse_block(cf_for.body,p_static);
				        if (!_error_set)
					        return;
				        p_block.statements.Add(cf_for);
			        } break;

                    case GDTokenizer.Token.TK_CF_CONTINUE: {

				        _tokenizer.advance();
				        ControlFlowNode cf_continue = (ControlFlowNode) _alloc_node(Node.Type.TYPE_CONTROL_FLOW);
				        cf_continue.cf_type = ControlFlowNode.CFType.CF_CONTINUE;
				        p_block.statements.Add(cf_continue);
				        if (!_end_statement()) 
                        {
					        _set_error("Expected end of statement (continue)");
					        return;
				        }
			        } break;

			        case GDTokenizer.Token.TK_CF_BREAK: {

				        _tokenizer.advance();
				        ControlFlowNode cf_break = (ControlFlowNode) _alloc_node(Node.Type.TYPE_CONTROL_FLOW);
				        cf_break.cf_type = ControlFlowNode.CFType.CF_BREAK;
				        p_block.statements.Add(cf_break);
				        if (!_end_statement()) 
                        {
					        _set_error("Expected end of statement (break)");
					        return;
				        }
			        } break;

                    case GDTokenizer.Token.TK_CF_RETURN: {
				        _tokenizer.advance();
				        ControlFlowNode cf_return = (ControlFlowNode) _alloc_node(Node.Type.TYPE_CONTROL_FLOW);
				        cf_return.cf_type = ControlFlowNode.CFType.CF_RETURN;

				        if (_tokenizer.get_token() == GDTokenizer.Token.TK_SEMICOLON 
                        || _tokenizer.get_token() == GDTokenizer.Token.TK_NEWLINE 
                        || _tokenizer.get_token() == GDTokenizer.Token.TK_EOF) 
                        {
					        //expect end of statement
					        p_block.statements.Add(cf_return);
					        if (!_end_statement()) 
                            {
						        return;
					        }
				        } 
                        else 
                        {
					        //expect expression
					        Node retexpr = _parse_and_reduce_expression(p_block,p_static);
					        if (retexpr == null)
						        return;
					        cf_return.arguments.Add(retexpr);
					        p_block.statements.Add(cf_return);
					        if (!_end_statement()) 
                            {
						        _set_error("Expected end of statement after return expression.");
						        return;
					        }
				        }
			        } break;

                    case GDTokenizer.Token.TK_PR_ASSERT: {

				        _tokenizer.advance();
				        Node condition = _parse_and_reduce_expression(p_block,p_static);
				        if (condition == null)
					        return;
				        AssertNode an = (AssertNode) _alloc_node(Node.Type.TYPE_ASSERT);
				        an.condition = condition;
				        p_block.statements.Add(an);

				        if (!_end_statement()) 
                        {
					        _set_error("Expected end of statement after assert.");
					        return;
				        }
			        } break;

			        default: {

				        Node expression = _parse_and_reduce_expression(p_block, p_static, false, true);
				        if (expression == null)
					        return;

				        p_block.statements.Add(expression);
				        if (!_end_statement()) 
                        {
					        _set_error("Expected end of statement after expression.");
					        return;
				        }
			        } break;

                    #endregion
                }
            }
        }

        private void _parse_class(ClassNode p_class)
        {
            int indent_level = _tab_level[_tab_level.Count - 1];
            while (true)
            {
                GDTokenizer.Token token = _tokenizer.get_token();
                if (_error_set)
                    return;

                if (indent_level > _tab_level[_tab_level.Count - 1])
                {
                    p_class.end_line = _tokenizer.get_token_line();
                    return; //go back a level
                }

                switch (token)
                {
                    case GDTokenizer.Token.TK_EOF: {
				        p_class.end_line = _tokenizer.get_token_line();
                    } goto case GDTokenizer.Token.TK_ERROR;

			        case GDTokenizer.Token.TK_ERROR: {
				        return; //go back
				        //end of file!
			        } break;

			        case GDTokenizer.Token.TK_NEWLINE: {
				        if (!_parse_newline()) {
					        if (!_error_set) {
						        p_class.end_line = _tokenizer.get_token_line();
					        }
					        return;
				        }
			        } break;

			        case GDTokenizer.Token.TK_PR_EXTENDS: {
				        _parse_extends(p_class);
				        if (_error_set)
					        return;
				        _end_statement();
			        } break;

                    case GDTokenizer.Token.TK_PR_TOOL: {
				        if (p_class.tool) {
					        _set_error("tool used more than once");
					        return;
				        }
				        p_class.tool=true;
				        _tokenizer.advance();
			        } break;

			        case GDTokenizer.Token.TK_PR_CLASS: {
				        //class inside class :D

				        StringName name;
				        StringName extends;

				        if (_tokenizer.get_token(1) != GDTokenizer.Token.TK_IDENTIFIER) 
                        {
					        _set_error("'class' syntax: 'class <Name>:' or 'class <Name> extends <BaseClass>:'");
					        return;
				        }
				        name = _tokenizer.get_token_identifier(1);
				        _tokenizer.advance(2);

				        ClassNode newclass = (ClassNode) _alloc_node(Node.Type.TYPE_CLASS);
				        newclass.initializer = (BlockNode) _alloc_node(Node.Type.TYPE_BLOCK);
				        newclass.name = name;

				        p_class.subclasses.Add(newclass);

				        if (_tokenizer.get_token() == GDTokenizer.Token.TK_PR_EXTENDS) 
                        {
					        _parse_extends(newclass);
					        if (_error_set)
						        return;
				        }

				        if (!_enter_indent_block()) 
                        {
					        _set_error("Indented block expected.");
					        return;
				        }
				        _parse_class(newclass);

			        } break;
	
			        case GDTokenizer.Token.TK_PR_STATIC: {
				        _tokenizer.advance();
				        if (_tokenizer.get_token() != GDTokenizer.Token.TK_PR_FUNCTION) 
                        {
					        _set_error("Expected 'func'.");
					        return;
				        }
			        }; goto case GDTokenizer.Token.TK_PR_FUNCTION; 
                    //fallthrough to function

                    case GDTokenizer.Token.TK_PR_FUNCTION: {
                        bool _static = false;

				        if (_tokenizer.get_token(-1) == GDTokenizer.Token.TK_PR_STATIC) 
                        {
					        _static=true;
				        }


				        if (_tokenizer.get_token(1) != GDTokenizer.Token.TK_IDENTIFIER) 
                        {
					        _set_error("Expected identifier after 'func' (syntax: 'func <identifier>([arguments]):' ).");
					        return;
				        }

				        StringName name = _tokenizer.get_token_identifier(1);

				        for(int i = 0; i < p_class.functions.Count; i++) 
                        {
					        if (p_class.functions[i].name == name) 
                            {
						        _set_error("Function '"+ ((string)name) + "' already exists in this class (at line: "+ p_class.functions[i].line + ").");
					        }
				        }

				        for(int i = 0; i < p_class.static_functions.Count; i++) 
                        {
					        if (p_class.static_functions[i].name == name) 
                            {
						        _set_error("Function '" + (string)name + "' already exists in this class (at line: " + p_class.static_functions[i].line + ").");
					        }
				        }
				        
                        _tokenizer.advance(2);

				        if (_tokenizer.get_token() != GDTokenizer.Token.TK_PARENTHESIS_OPEN) 
                        {
					        _set_error("Expected '(' after identifier (syntax: 'func <identifier>([arguments]):' ).");
					        return;
				        }

				        _tokenizer.advance();

				        List<StringName> arguments = new List<StringName>();
				        List<Node> default_values = new List<Node>();

				        int fnline = _tokenizer.get_token_line();

                        if (_tokenizer.get_token() != GDTokenizer.Token.TK_PARENTHESIS_CLOSE) 
                        {
					        //has arguments
					        bool defaulting = false;
					        while(true) 
                            {
						        if (_tokenizer.get_token() == GDTokenizer.Token.TK_PR_VAR) 
                                {
							        _tokenizer.advance(); //var before the identifier is allowed
						        }


						        if (_tokenizer.get_token() != GDTokenizer.Token.TK_IDENTIFIER) 
                                {
							        _set_error("Expected identifier for argument.");
							        return;
						        }

						        StringName argname = _tokenizer.get_token_identifier();
						        arguments.Add(argname);

						        _tokenizer.advance();

						        if (defaulting && _tokenizer.get_token() != GDTokenizer.Token.TK_OP_ASSIGN) 
                                {
							        _set_error("Default parameter expected.");
							        return;
						        }

						        //tokenizer->advance();


						        if (_tokenizer.get_token() == GDTokenizer.Token.TK_OP_ASSIGN) 
                                {
							        defaulting = true;
							        _tokenizer.advance(1);
							        Node defval = null;

							        defval = _parse_and_reduce_expression(p_class,_static);
							        if (defval == null || _error_set)
								        return;

							        OperatorNode on = (OperatorNode) _alloc_node(Node.Type.TYPE_OPERATOR);
							        on.op = OperatorNode.Operator.OP_ASSIGN;

							        IdentifierNode in_ = (IdentifierNode) _alloc_node(Node.Type.TYPE_IDENTIFIER);
							        in_.name=argname;

							        on.arguments.Add(in_);
							        on.arguments.Add(defval);
							        /* no ..
							        if (defval->type!=Node::TYPE_CONSTANT) {

								        _set_error("default argument must be constant");
							        }
							        */
							        default_values.Add(on);
						        }

						        if (_tokenizer.get_token() == GDTokenizer.Token.TK_COMMA) 
                                {
							        _tokenizer.advance();
							        continue;
						        } 
                                else if (_tokenizer.get_token() != GDTokenizer.Token.TK_PARENTHESIS_CLOSE) 
                                {
							        _set_error("Expected ',' or ')'.");
							        return;
						        }
						        break;
					        }
				        }

                        _tokenizer.advance();

                        BlockNode block = (BlockNode) _alloc_node(Node.Type.TYPE_BLOCK);

                        if (name =="_init") 
                        {
					        if (p_class.extends_used) 
                            {
						        OperatorNode cparent = (OperatorNode) _alloc_node(Node.Type.TYPE_OPERATOR);
						        cparent.op = OperatorNode.Operator.OP_PARENT_CALL;
						        block.statements.Add(cparent);

						        IdentifierNode id = (IdentifierNode) _alloc_node(Node.Type.TYPE_IDENTIFIER);
						        id.name="_init";
						        cparent.arguments.Add(id);

						        if (_tokenizer.get_token() == GDTokenizer.Token.TK_PERIOD) 
                                {
							        _tokenizer.advance();
							        if (_tokenizer.get_token() != GDTokenizer.Token.TK_PARENTHESIS_OPEN) 
                                    {
								        _set_error("expected '(' for parent constructor arguments.");
							        }
							        _tokenizer.advance();

							        if (_tokenizer.get_token() != GDTokenizer.Token.TK_PARENTHESIS_CLOSE) 
                                    {
								        //has arguments
								        while(true) 
                                        {
									        Node arg = _parse_and_reduce_expression(p_class,_static);
									        cparent.arguments.Add(arg);

									        if (_tokenizer.get_token() == GDTokenizer.Token.TK_COMMA) 
                                            {
										        _tokenizer.advance();
										        continue;
									        } 
                                            else if (_tokenizer.get_token() != GDTokenizer.Token.TK_PARENTHESIS_CLOSE) 
                                            {
										        _set_error("Expected ',' or ')'.");
										        return;
									        }

									        break;

								        }
							        }

							        _tokenizer.advance();
						        }
					        } 
                            else 
                            {
						        if (_tokenizer.get_token() == GDTokenizer.Token.TK_PERIOD) 
                                {
							        _set_error("Parent constructor call found for a class without inheritance.");
							        return;
						        }
					        }
				        }
                        if (!_enter_indent_block(block))
                        {

                            _set_error("Indented block expected.");
                            return;
                        }

                        FunctionNode function = (FunctionNode) _alloc_node(Node.Type.TYPE_FUNCTION);
                        function.name = name;
                        function.arguments = arguments;
                        function.default_values = default_values;
                        function.is_static = _static;
                        function.line = fnline;


                        if (_static)
                            p_class.static_functions.Add(function);
                        else
                            p_class.functions.Add(function);


                        _parse_block(block, _static);
                        function.body = block;
                        //arguments
                    } break;

                    case GDTokenizer.Token.TK_PR_EXPORT: {
                        _tokenizer.advance();
                        
                        if (_tokenizer.get_token() == GDTokenizer.Token.TK_PARENTHESIS_OPEN)
                        {
                            _tokenizer.advance();
                            if (_tokenizer.get_token() == GDTokenizer.Token.TK_BUILT_IN_TYPE)
                            {
                                Variant.Type type = _tokenizer.get_token_type();
						        if (type == Variant.Type.NIL) 
                                {
							        _set_error("Can't export null type.");
							        return;
						        }
						        _current_export.type = type;
						        _tokenizer.advance();

                                if (_tokenizer.get_token() == GDTokenizer.Token.TK_COMMA) 
                                {
                                    // hint expected next!
                                    _tokenizer.advance();
                                    switch (_current_export.type)
                                    {
                                        case Variant.Type.INT: {
									        if (_tokenizer.get_token() == GDTokenizer.Token.TK_CONSTANT && _tokenizer.get_token_constant().type == Variant.Type.STRING) 
                                            {
										        //enumeration
										        _current_export.hint = PropertyHint.PROPERTY_HINT_ENUM;
										        bool first = true;
										        while(true) 
                                                {
											        if (_tokenizer.get_token() != GDTokenizer.Token.TK_CONSTANT || _tokenizer.get_token_constant().type != Variant.Type.STRING) 
                                                    {
												        _current_export = new PropertyInfo();
												        _set_error("Expected a string constant in enumeration hint.");
											        }

											        string c = _tokenizer.get_token_constant();
											        if (!first)
												        _current_export.hint_string += ",";
											        else
												        first=false;

											        _current_export.hint_string += c.xml_escape();

											        _tokenizer.advance();
											        if (_tokenizer.get_token() == GDTokenizer.Token.TK_PARENTHESIS_CLOSE)
												        break;

											        if (_tokenizer.get_token()!= GDTokenizer.Token.TK_COMMA) 
                                                    {
												        _current_export = new PropertyInfo();
												        _set_error("Expected ')' or ',' in enumeration hint.");
											        }

											        _tokenizer.advance();

										        }
										        break;
									        }
								        } goto case Variant.Type.REAL;

                                        case Variant.Type.REAL: {
									        if (_tokenizer.get_token() != GDTokenizer.Token.TK_CONSTANT || !_tokenizer.get_token_constant().is_num) 
                                            {
										        _current_export = new PropertyInfo();
										        _set_error("Expected a range in numeric hint.");
									        }
										    
                                            //enumeration
									        _current_export.hint = PropertyHint.PROPERTY_HINT_RANGE;

									        _current_export.hint_string = (string)_tokenizer.get_token_constant();
									        _tokenizer.advance();

									        if (_tokenizer.get_token() == GDTokenizer.Token.TK_PARENTHESIS_CLOSE) 
                                            {
										        _current_export.hint_string = "0," + _current_export.hint_string;
										        break;
									        }

									        if (_tokenizer.get_token() != GDTokenizer.Token.TK_COMMA) 
                                            {
										        _current_export = new PropertyInfo();
										        _set_error("Expected ',' or ')' in numeric range hint.");
									        }

									        _tokenizer.advance();

									        if (_tokenizer.get_token() != GDTokenizer.Token.TK_CONSTANT || !_tokenizer.get_token_constant().is_num) 
                                            {
										        _current_export = new PropertyInfo();
										        _set_error("Expected a number as upper bound in numeric range hint.");
									        }

									        _current_export.hint_string += "," + (string)_tokenizer.get_token_constant();
									        _tokenizer.advance();

									        if (_tokenizer.get_token() == GDTokenizer.Token.TK_PARENTHESIS_CLOSE)
										        break;

									        if (_tokenizer.get_token() != GDTokenizer.Token.TK_COMMA) 
                                            {
										        _current_export = new PropertyInfo();
										        _set_error("Expected ',' or ')' in numeric range hint.");
									        }

									        _tokenizer.advance();

									        if (_tokenizer.get_token() != GDTokenizer.Token.TK_CONSTANT || !_tokenizer.get_token_constant().is_num) 
                                            {
										        _current_export = new PropertyInfo();
										        _set_error("Expected a number as step in numeric range hint.");
									        }

									        _current_export.hint_string += "," + (string)_tokenizer.get_token_constant();
									        _tokenizer.advance();

								        } break;

                                        case Variant.Type.STRING: {

									        if (_tokenizer.get_token() == GDTokenizer.Token.TK_CONSTANT && _tokenizer.get_token_constant().type == Variant.Type.STRING) 
                                            {
										        //enumeration
										        _current_export.hint = PropertyHint.PROPERTY_HINT_ENUM;
										        bool first = true;
										        while(true) 
                                                {
											        if (_tokenizer.get_token() != GDTokenizer.Token.TK_CONSTANT || _tokenizer.get_token_constant().type != Variant.Type.STRING) 
                                                    {
												        _current_export = new PropertyInfo();
												        _set_error("Expected a string constant in enumeration hint.");
											        }

											        string c = _tokenizer.get_token_constant();
											        if (!first)
												        _current_export.hint_string += ",";
											        else
												        first = false;

											        _current_export.hint_string += c.xml_escape();
											        _tokenizer.advance();
											        if (_tokenizer.get_token() == GDTokenizer.Token.TK_PARENTHESIS_CLOSE)
												        break;

											        if (_tokenizer.get_token() != GDTokenizer.Token.TK_COMMA) 
                                                    {
												        _current_export = new PropertyInfo();
												        _set_error("Expected ')' or ',' in enumeration hint.");
												        return;
											        }
											        _tokenizer.advance();
										        }
										        break;
									        }

									        if (_tokenizer.get_token() == GDTokenizer.Token.TK_IDENTIFIER && _tokenizer.get_token_identifier() == "DIR") 
                                            {
										        _current_export.hint = PropertyHint.PROPERTY_HINT_DIR;
										        _tokenizer.advance();
										        if (_tokenizer.get_token() != GDTokenizer.Token.TK_PARENTHESIS_CLOSE) 
                                                {
											        _set_error("Expected ')' in hint.");
											        return;
										        }
										        break;
									        }

									        if (_tokenizer.get_token() == GDTokenizer.Token.TK_IDENTIFIER && _tokenizer.get_token_identifier() == "FILE") 
                                            {
										        _current_export.hint = PropertyHint.PROPERTY_HINT_FILE;
										        _tokenizer.advance();

										        if (_tokenizer.get_token() == GDTokenizer.Token.TK_COMMA) 
                                                {
											        _tokenizer.advance();
											        if (_tokenizer.get_token() != GDTokenizer.Token.TK_CONSTANT || _tokenizer.get_token_constant().type != Variant.Type.STRING) 
                                                    {
												        _set_error("Expected string constant with filter");
												        return;
											        }
											        _current_export.hint_string = _tokenizer.get_token_constant();
											        _tokenizer.advance();

										        }

										        if (_tokenizer.get_token() != GDTokenizer.Token.TK_PARENTHESIS_CLOSE) 
                                                {
											        _set_error("Expected ')' in hint.");
											        return;
										        }
										        break;
									        }
								        } break;

                                        case Variant.Type.COLOR: {

									        if (_tokenizer.get_token() != GDTokenizer.Token.TK_IDENTIFIER ) 
                                            {
										        _current_export = new PropertyInfo();
										        _set_error("Color type hint expects RGB or RGBA as hints");
										        return;
									        }

									        string identifier = _tokenizer.get_token_identifier();
									        if (identifier == "RGB") 
                                            {
										        _current_export.hint = PropertyHint.PROPERTY_HINT_COLOR_NO_ALPHA;
									        } 
                                            else if (identifier == "RGBA") 
                                            {
										        //none
									        } 
                                            else 
                                            {
										        _current_export = new PropertyInfo();
										        _set_error("Color type hint expects RGB or RGBA as hints");
										        return;
									        }
									        _tokenizer.advance();

								        } break;

								        default: {
									        _current_export = new PropertyInfo();
									        _set_error("Type '" + Variant.get_type_name(type)+ "' can't take hints.");
									        return;
								        } break;
                                    }
                                }
                            }
                            else if (_tokenizer.get_token() == GDTokenizer.Token.TK_IDENTIFIER) 
                            {
						        string identifier = _tokenizer.get_token_identifier();
						        if (!ObjectTypeDB.is_type(identifier,"Resource")) 
                                {

							        _current_export = new PropertyInfo();
							        _set_error("Export hint not a type or resource.");
						        }

						        _current_export.type = Variant.Type.OBJECT;
						        _current_export.hint = PropertyHint.PROPERTY_HINT_RESOURCE_TYPE;
						        _current_export.hint_string = identifier;

						        _tokenizer.advance();
					        }

					        if (_tokenizer.get_token() != GDTokenizer.Token.TK_PARENTHESIS_CLOSE) 
                            {
						        _current_export = new PropertyInfo();
						        _set_error("Expected ')' or ',' after export hint.");
						        return;
					        }

					        _tokenizer.advance();
                        }

                        if (_tokenizer.get_token() != GDTokenizer.Token.TK_PR_VAR) 
                        {
					        _current_export = new PropertyInfo();
					        _set_error("Expected 'var'.");
					        return;
				        }
                    } goto case GDTokenizer.Token.TK_PR_VAR;

                    case GDTokenizer.Token.TK_PR_VAR: {
				        //variale declaration and (eventual) initialization

                        ClassNode.Member member = new ClassNode.Member();
				        bool autoexport = _tokenizer.get_token(-1) == GDTokenizer.Token.TK_PR_EXPORT;
				        if (_current_export.type != Variant.Type.NIL) 
                        {
					        member.export = _current_export;
					        _current_export = new PropertyInfo();
				        }

				        _tokenizer.advance();
				        if (_tokenizer.get_token() != GDTokenizer.Token.TK_IDENTIFIER) 
                        {
					        _set_error("Expected identifier for member variable name.");
					        return;
				        }

				        member.identifier = _tokenizer.get_token_identifier();
				        member.export.name = member.identifier;
				        _tokenizer.advance();

				        p_class.variables.Add(member);

				        if (_tokenizer.get_token() != GDTokenizer.Token.TK_OP_ASSIGN) 
                        {
					        if (autoexport) 
                            {
						        _set_error("Type-less export needs a constant expression assigned to infer type.");
						        return;
					        }
					        break;
				        }

				        _tokenizer.advance();

				        Node subexpr = null;

				        subexpr = _parse_and_reduce_expression(p_class,false);
				        if (subexpr == null)
					        return;

				        if (autoexport) 
                        {
					        if (subexpr.type == Node.Type.TYPE_ARRAY) 
                            {
						        p_class.variables[p_class.variables.Count - 1].export.type = Variant.Type.ARRAY;
					        } 
                            else if (subexpr.type == Node.Type.TYPE_DICTIONARY) 
                            {
						        p_class.variables[p_class.variables.Count - 1].export.type = Variant.Type.DICTIONARY;
					        } 
                            else 
                            {
						        if (subexpr.type != Node.Type.TYPE_CONSTANT) 
                                {
							        _set_error("Type-less export needs a constant expression assigned to infer type.");
							        return;
						        }

						        ConstantNode cn = (ConstantNode)(subexpr);
						        if (cn.value.type == Variant.Type.NIL) 
                                {
							        _set_error("Can't accept a null constant expression for infering export type.");
							        return;
						        }
						        p_class.variables[p_class.variables.Count - 1].export.type = cn.value.type;
					        }
				        }




				        IdentifierNode id = (IdentifierNode) _alloc_node(Node.Type.TYPE_IDENTIFIER);
				        id.name = member.identifier;

				        OperatorNode op = (OperatorNode) _alloc_node(Node.Type.TYPE_OPERATOR);
				        op.op = OperatorNode.Operator.OP_ASSIGN;
				        op.arguments.Add(id);
				        op.arguments.Add(subexpr);


				        _end_statement();
			        } break;

                    case GDTokenizer.Token.TK_PR_CONST: {
				        //variale declaration and (eventual) initialization

				        ClassNode.Constant constant;

				        _tokenizer.advance();
				        if (_tokenizer.get_token() != GDTokenizer.Token.TK_IDENTIFIER) 
                        {
					        _set_error("Expected name (identifier) for constant.");
					        return;
				        }

				        constant.identifier = _tokenizer.get_token_identifier();
				        _tokenizer.advance();

				        if (_tokenizer.get_token()!= GDTokenizer.Token.TK_OP_ASSIGN) 
                        {
					        _set_error("Constant expects assignment.");
					        return;
				        }

				        _tokenizer.advance();

				        Node subexpr = null;

				        subexpr = _parse_and_reduce_expression(p_class, true, true);
				        if (subexpr == null)
					        return;

				        if (subexpr.type != Node.Type.TYPE_CONSTANT) 
                        {
					        _set_error("Expected constant expression");
				        }
				        constant.expression=subexpr;

				        p_class.constant_expressions.Add(constant);

				        _end_statement();


			        } break;

                    default: {
				        _set_error("Unexpected token: " + GDTokenizer.get_token_name(_tokenizer.get_token()) + ":" + _tokenizer.get_token_identifier());
				        return;
			        } break;
                }//____end switch(token)
            }
        }

        private Node _parse_expression(Node p_parent, bool p_static, bool p_allow_assign = false)
        {
            List<Expression> expression = new List<Expression>();

            Node expr = null;

            while (true)
            {
                /*****************/
                /* Parse Operand */
                /*****************/
                #region ____parse_operand____
                if (_tokenizer.get_token() == GDTokenizer.Token.TK_PARENTHESIS_OPEN)
                {
                    //subexpression ()
                    _tokenizer.advance();
                    Node subexpr = _parse_expression(p_parent, p_static);
                    if (subexpr == null)
                        return null;

                    if (_tokenizer.get_token() != GDTokenizer.Token.TK_PARENTHESIS_CLOSE)
                    {
                        _set_error("Expected ')' in expression");
                        return null;
                    }

                    _tokenizer.advance();
                    expr = subexpr;

                }
                else if (_tokenizer.get_token() == GDTokenizer.Token.TK_CONSTANT)
                {
                    //constant defined by tokenizer
                    ConstantNode constant = (ConstantNode)_alloc_node(Node.Type.TYPE_CONSTANT);
                    constant.value = _tokenizer.get_token_constant();
                    _tokenizer.advance();
                    expr = constant;
                }
                else if (_tokenizer.get_token() == GDTokenizer.Token.TK_PR_PRELOAD)
                {
                    //constant defined by tokenizer
                    _tokenizer.advance();
                    if (_tokenizer.get_token() != GDTokenizer.Token.TK_PARENTHESIS_OPEN)
                    {
                        _set_error("Expected '(' after 'preload'");
                        return null;
                    }
                    _tokenizer.advance();
                    if (_tokenizer.get_token() != GDTokenizer.Token.TK_CONSTANT || _tokenizer.get_token_constant().type != Variant.Type.STRING)
                    {
                        _set_error("Expected string constant as 'preload' argument.");
                        return null;
                    }


                    string path = _tokenizer.get_token_constant();
                    if (!path.is_abs_path() && _base_path != "")
                        path = _base_path + "/" + path;

                    Resource res = ResourceLoader.load(path);
                    if (res == null)
                    {
                        _set_error("Can't preload resource at path: " + path);
                        return null;
                    }

                    _tokenizer.advance();

                    if (_tokenizer.get_token() != GDTokenizer.Token.TK_PARENTHESIS_CLOSE)
                    {
                        _set_error("Expected ')' after 'preload' path");
                        return null;
                    }

                    ConstantNode constant = (ConstantNode)_alloc_node(Node.Type.TYPE_CONSTANT);
                    constant.value = res;
                    _tokenizer.advance();

                    expr = constant;

                }
                else if (_tokenizer.get_token() == GDTokenizer.Token.TK_SELF)
                {
                    if (!p_static)
                    {
                        _set_error("'self'' not allowed in static function or constant expression");
                        return null;
                    }

                    //constant defined by tokenizer
                    SelfNode self = (SelfNode)_alloc_node(Node.Type.TYPE_SELF);
                    _tokenizer.advance();
                    expr = self;
                }
                else if (_tokenizer.get_token() == GDTokenizer.Token.TK_BUILT_IN_TYPE && _tokenizer.get_token(1) == GDTokenizer.Token.TK_PERIOD)
                {
                    Variant.Type bi_type = _tokenizer.get_token_type();
                    _tokenizer.advance(2);
                    if (_tokenizer.get_token() != GDTokenizer.Token.TK_IDENTIFIER)
                    {
                        _set_error("Built-in type constant expected after '.'");
                        return null;
                    }
                    StringName identifier = _tokenizer.get_token_identifier();
                    if (!Variant.has_numeric_constant(bi_type, identifier))
                    {
                        _set_error("Static constant  '" + ((string)identifier) + "' not present in built-in type " + Variant.get_type_name(bi_type) + ".");
                        return null;
                    }

                    ConstantNode cn = (ConstantNode)_alloc_node(Node.Type.TYPE_CONSTANT);
                    cn.value = Variant.get_numeric_constant_value(bi_type, identifier);
                    expr = cn;
                    _tokenizer.advance();

                }
                else if (_tokenizer.get_token(1) == GDTokenizer.Token.TK_PARENTHESIS_OPEN
                && (_tokenizer.get_token() == GDTokenizer.Token.TK_BUILT_IN_TYPE || _tokenizer.get_token() == GDTokenizer.Token.TK_IDENTIFIER || _tokenizer.get_token() == GDTokenizer.Token.TK_BUILT_IN_FUNC))
                {
                    //function or constructor

                    OperatorNode op = (OperatorNode)_alloc_node(Node.Type.TYPE_OPERATOR);
                    op.op = OperatorNode.Operator.OP_CALL;

                    if (_tokenizer.get_token() == GDTokenizer.Token.TK_BUILT_IN_TYPE)
                    {
                        TypeNode tn = (TypeNode)_alloc_node(Node.Type.TYPE_TYPE);
                        tn.vtype = _tokenizer.get_token_type();
                        op.arguments.Add(tn);
                    }
                    else if (_tokenizer.get_token() == GDTokenizer.Token.TK_BUILT_IN_FUNC)
                    {
                        BuiltInFunctionNode bn = (BuiltInFunctionNode)_alloc_node(Node.Type.TYPE_BUILT_IN_FUNCTION);
                        bn.function = _tokenizer.get_token_built_in_func();
                        op.arguments.Add(bn);
                    }
                    else
                    {
                        SelfNode self = (SelfNode)_alloc_node(Node.Type.TYPE_SELF);
                        op.arguments.Add(self);

                        IdentifierNode id = (IdentifierNode)_alloc_node(Node.Type.TYPE_IDENTIFIER);
                        id.name = _tokenizer.get_token_identifier();
                        op.arguments.Add(id);
                    }

                    _tokenizer.advance(2);
                    if (!_parse_arguments(op, op.arguments, p_static))
                        return null;

                    expr = op;
                }
                else if (_tokenizer.get_token() == GDTokenizer.Token.TK_IDENTIFIER)
                {
                    //identifier (reference)

                    IdentifierNode id = (IdentifierNode)_alloc_node(Node.Type.TYPE_IDENTIFIER);
                    id.name = _tokenizer.get_token_identifier();
                    _tokenizer.advance();
                    expr = id;
                }
                else if (_tokenizer.get_token() == GDTokenizer.Token.TK_OP_SUB
                || _tokenizer.get_token() == GDTokenizer.Token.TK_OP_NOT
                || _tokenizer.get_token() == GDTokenizer.Token.TK_OP_BIT_INVERT)
                {
                    //single prefix operators like !expr -expr ++expr --expr
                    OperatorNode op = (OperatorNode)_alloc_node(Node.Type.TYPE_OPERATOR);

                    Expression e = new Expression();

                    e.is_op = true;

                    switch (_tokenizer.get_token())
                    {
                        case GDTokenizer.Token.TK_OP_SUB: e.op = OperatorNode.Operator.OP_NEG; break;
                        case GDTokenizer.Token.TK_OP_NOT: e.op = OperatorNode.Operator.OP_NOT; break;
                        case GDTokenizer.Token.TK_OP_BIT_INVERT: e.op = OperatorNode.Operator.OP_BIT_INVERT; break;
                        default: break;
                    }

                    _tokenizer.advance();

                    if (e.op != OperatorNode.Operator.OP_NOT && _tokenizer.get_token() == GDTokenizer.Token.TK_OP_NOT)
                    {
                        _set_error("Misplaced 'not'.");
                        return null;
                    }

                    expression.Add(e);
                    continue; //only exception, must continue...
                }
                else if (_tokenizer.get_token() == GDTokenizer.Token.TK_BRACKET_OPEN)
                {
                    // array
                    _tokenizer.advance();

                    ArrayNode arr = (ArrayNode)_alloc_node(Node.Type.TYPE_ARRAY);
                    bool expecting_comma = false;

                    while (true)
                    {
                        if (_tokenizer.get_token() == GDTokenizer.Token.TK_EOF)
                        {
                            _set_error("Unterminated array");
                            return null;

                        }
                        else if (_tokenizer.get_token() == GDTokenizer.Token.TK_BRACKET_CLOSE)
                        {
                            _tokenizer.advance();
                            break;
                        }
                        else if (_tokenizer.get_token() == GDTokenizer.Token.TK_NEWLINE)
                        {
                            _tokenizer.advance(); //ignore newline
                        }
                        else if (_tokenizer.get_token() == GDTokenizer.Token.TK_COMMA)
                        {
                            if (!expecting_comma)
                            {
                                _set_error("expression or ']' expected");
                                return null;
                            }

                            expecting_comma = false;
                            _tokenizer.advance(); //ignore newline
                        }
                        else
                        {
                            //parse expression
                            if (expecting_comma)
                            {
                                _set_error("',' or ']' expected");
                                return null;
                            }

                            Node n = _parse_expression(arr, p_static);
                            if (n == null)
                                return null;
                            arr.elements.Add(n);
                            expecting_comma = true;
                        }
                    }

                    expr = arr;
                }
                else if (_tokenizer.get_token() == GDTokenizer.Token.TK_CURLY_BRACKET_OPEN)
                {
                    // array
                    _tokenizer.advance();

                    DictionaryNode dict = (DictionaryNode)_alloc_node(Node.Type.TYPE_DICTIONARY);

                    Node key = null;

                    DictExpect expecting = DictExpect.DICT_EXPECT_KEY;

                    while (true)
                    {
                        if (_tokenizer.get_token() == GDTokenizer.Token.TK_EOF)
                        {
                            _set_error("Unterminated dictionary");
                            return null;

                        }
                        else if (_tokenizer.get_token() == GDTokenizer.Token.TK_CURLY_BRACKET_CLOSE)
                        {
                            if (expecting == DictExpect.DICT_EXPECT_COLON)
                            {
                                _set_error("':' expected");
                                return null;
                            }
                            if (expecting == DictExpect.DICT_EXPECT_VALUE)
                            {
                                _set_error("value expected");
                                return null;
                            }
                            _tokenizer.advance();
                            break;
                        }
                        else if (_tokenizer.get_token() == GDTokenizer.Token.TK_NEWLINE)
                        {
                            _tokenizer.advance(); //ignore newline
                        }
                        else if (_tokenizer.get_token() == GDTokenizer.Token.TK_COMMA)
                        {
                            if (expecting == DictExpect.DICT_EXPECT_KEY)
                            {
                                _set_error("key or '}' expected");
                                return null;
                            }
                            if (expecting == DictExpect.DICT_EXPECT_VALUE)
                            {
                                _set_error("value expected");
                                return null;
                            }
                            if (expecting == DictExpect.DICT_EXPECT_COLON)
                            {
                                _set_error("':' expected");
                                return null;
                            }

                            expecting = DictExpect.DICT_EXPECT_KEY;
                            _tokenizer.advance(); //ignore newline
                        }
                        else if (_tokenizer.get_token() == GDTokenizer.Token.TK_COLON)
                        {
                            if (expecting == DictExpect.DICT_EXPECT_KEY)
                            {
                                _set_error("key or '}' expected");
                                return null;
                            }
                            if (expecting == DictExpect.DICT_EXPECT_VALUE)
                            {
                                _set_error("value expected");
                                return null;
                            }
                            if (expecting == DictExpect.DICT_EXPECT_COMMA)
                            {
                                _set_error("',' or '}' expected");
                                return null;
                            }

                            expecting = DictExpect.DICT_EXPECT_VALUE;
                            _tokenizer.advance(); //ignore newline
                        }
                        else
                        {
                            if (expecting == DictExpect.DICT_EXPECT_COMMA)
                            {
                                _set_error("',' or '}' expected");
                                return null;
                            }
                            if (expecting == DictExpect.DICT_EXPECT_COLON)
                            {
                                _set_error("':' expected");
                                return null;
                            }

                            if (expecting == DictExpect.DICT_EXPECT_KEY)
                            {
                                if (_tokenizer.get_token() == GDTokenizer.Token.TK_IDENTIFIER
                                && _tokenizer.get_token(1) == GDTokenizer.Token.TK_OP_ASSIGN)
                                {
                                    //lua style identifier, easier to write
                                    ConstantNode cn = (ConstantNode)_alloc_node(Node.Type.TYPE_CONSTANT);
                                    cn.value = _tokenizer.get_token_identifier();
                                    key = cn;
                                    _tokenizer.advance(2);
                                    expecting = DictExpect.DICT_EXPECT_VALUE;
                                }
                                else
                                {
                                    //python/js style more flexible
                                    key = _parse_expression(dict, p_static);
                                    if (key == null)
                                        return null;
                                    expecting = DictExpect.DICT_EXPECT_COLON;
                                }
                            }

                            if (expecting == DictExpect.DICT_EXPECT_VALUE)
                            {
                                Node value = _parse_expression(dict, p_static);
                                if (value == null)
                                    return null;
                                expecting = DictExpect.DICT_EXPECT_COMMA;

                                KeyValuePair<Node, Node> pair = new KeyValuePair<Node, Node>(key, value);
                                dict.elements.Add(pair);
                                key = null;
                            }

                        }
                    }
                    expr = dict;
                }
                else if (_tokenizer.get_token() == GDTokenizer.Token.TK_PERIOD
                && _tokenizer.get_token(1) == GDTokenizer.Token.TK_IDENTIFIER
                && _tokenizer.get_token(2) == GDTokenizer.Token.TK_PARENTHESIS_OPEN)
                {
                    // parent call

                    _tokenizer.advance(); //goto identifier
                    OperatorNode op = (OperatorNode)_alloc_node(Node.Type.TYPE_OPERATOR);
                    op.op = OperatorNode.Operator.OP_PARENT_CALL;


                    IdentifierNode id = (IdentifierNode)_alloc_node(Node.Type.TYPE_IDENTIFIER);
                    id.name = _tokenizer.get_token_identifier();
                    op.arguments.Add(id);

                    _tokenizer.advance(2);
                    if (!_parse_arguments(op, op.arguments, p_static))
                        return null;

                    expr = op;

                }
                else
                {

                    //find list [ or find dictionary {

                    //print_line("found bug?");

                    _set_error("Error parsing expression, misplaced: " + GDTokenizer.get_token_name(_tokenizer.get_token()));
                    return null;		//nothing
                }

                if (expr == null)
                {
                    //ERR_EXPLAIN("GDParser bug, couldn't figure out what expression is..");
                    //ERR_FAIL_COND_V(!expr,NULL);
                    return null;
                }
                #endregion

                /******************/
                /* Parse Indexing */
                /******************/
                #region ____parse_indexing____
                while (true)
                {
                    //expressions can be indexed any number of times

                    if (_tokenizer.get_token() == GDTokenizer.Token.TK_PERIOD)
                    {
                        //indexing using "."

                        if (_tokenizer.get_token(1) != GDTokenizer.Token.TK_IDENTIFIER
                        && _tokenizer.get_token(1) != GDTokenizer.Token.TK_BUILT_IN_FUNC)
                        {
                            _set_error("Expected identifier as member");
                            return null;
                        }
                        else if (_tokenizer.get_token(2) == GDTokenizer.Token.TK_PARENTHESIS_OPEN)
                        {
                            //call!!
                            OperatorNode op = (OperatorNode)_alloc_node(Node.Type.TYPE_OPERATOR);
                            op.op = OperatorNode.Operator.OP_CALL;

                            IdentifierNode id = (IdentifierNode)_alloc_node(Node.Type.TYPE_IDENTIFIER);
                            if (_tokenizer.get_token(1) == GDTokenizer.Token.TK_BUILT_IN_FUNC)
                            {
                                //small hack so built in funcs don't obfuscate methods

                                id.name = GDFunctions.get_func_name(_tokenizer.get_token_built_in_func(1));
                            }
                            else
                            {
                                id.name = _tokenizer.get_token_identifier(1);
                            }

                            op.arguments.Add(expr); // call what
                            op.arguments.Add(id); // call func
                            //get arguments
                            _tokenizer.advance(3);
                            if (!_parse_arguments(op, op.arguments, p_static))
                                return null;
                            expr = op;

                        }
                        else
                        {
                            //simple indexing!
                            OperatorNode op = (OperatorNode)_alloc_node(Node.Type.TYPE_OPERATOR);
                            op.op = OperatorNode.Operator.OP_INDEX_NAMED;

                            IdentifierNode id = (IdentifierNode)_alloc_node(Node.Type.TYPE_IDENTIFIER);
                            id.name = _tokenizer.get_token_identifier(1);

                            op.arguments.Add(expr);
                            op.arguments.Add(id);

                            expr = op;

                            _tokenizer.advance(2);
                        }
                    }
                    else if (_tokenizer.get_token() == GDTokenizer.Token.TK_BRACKET_OPEN)
                    {
                        //indexing using "[]"
                        OperatorNode op = (OperatorNode)_alloc_node(Node.Type.TYPE_OPERATOR);
                        op.op = OperatorNode.Operator.OP_INDEX;

                        _tokenizer.advance(1);

                        Node subexpr = _parse_expression(op, p_static);
                        if (subexpr == null)
                        {
                            return null;
                        }

                        if (_tokenizer.get_token() != GDTokenizer.Token.TK_BRACKET_CLOSE)
                        {
                            _set_error("Expected ']'");
                            return null;
                        }

                        op.arguments.Add(expr);
                        op.arguments.Add(subexpr);
                        _tokenizer.advance(1);
                        expr = op;
                    }
                    else
                    {
                        break;
                    }
                }
                #endregion

                /******************/
                /* Parse Operator */
                /******************/
                #region ____parse_operator___
                {
                    Expression e = new Expression();
                    e.is_op = false;
                    e.node = expr;
                    expression.Add(e);

                    // determine which operator is next

                    OperatorNode.Operator op = (OperatorNode.Operator)0;
                    bool valid = true;

                    switch (_tokenizer.get_token()) //see operator
                    {
                        case GDTokenizer.Token.TK_OP_IN: op = OperatorNode.Operator.OP_IN; break;
                        case GDTokenizer.Token.TK_OP_EQUAL: op = OperatorNode.Operator.OP_EQUAL; break;
                        case GDTokenizer.Token.TK_OP_NOT_EQUAL: op = OperatorNode.Operator.OP_NOT_EQUAL; break;
                        case GDTokenizer.Token.TK_OP_LESS: op = OperatorNode.Operator.OP_LESS; break;
                        case GDTokenizer.Token.TK_OP_LESS_EQUAL: op = OperatorNode.Operator.OP_LESS_EQUAL; break;
                        case GDTokenizer.Token.TK_OP_GREATER: op = OperatorNode.Operator.OP_GREATER; break;
                        case GDTokenizer.Token.TK_OP_GREATER_EQUAL: op = OperatorNode.Operator.OP_GREATER_EQUAL; break;
                        case GDTokenizer.Token.TK_OP_AND: op = OperatorNode.Operator.OP_AND; break;
                        case GDTokenizer.Token.TK_OP_OR: op = OperatorNode.Operator.OP_OR; break;
                        case GDTokenizer.Token.TK_OP_ADD: op = OperatorNode.Operator.OP_ADD; break;
                        case GDTokenizer.Token.TK_OP_SUB: op = OperatorNode.Operator.OP_SUB; break;
                        case GDTokenizer.Token.TK_OP_MUL: op = OperatorNode.Operator.OP_MUL; break;
                        case GDTokenizer.Token.TK_OP_DIV: op = OperatorNode.Operator.OP_DIV; break;
                        case GDTokenizer.Token.TK_OP_MOD: op = OperatorNode.Operator.OP_MOD; break;
                        //case GDTokenizer::TK_OP_NEG: op=OperatorNode::OP_NEG ; break;
                        case GDTokenizer.Token.TK_OP_SHIFT_LEFT: op = OperatorNode.Operator.OP_SHIFT_LEFT; break;
                        case GDTokenizer.Token.TK_OP_SHIFT_RIGHT: op = OperatorNode.Operator.OP_SHIFT_RIGHT; break;
                        case GDTokenizer.Token.TK_OP_ASSIGN: if (!p_allow_assign) { _set_error("Unexpected assign."); return null; } p_allow_assign = false; op = OperatorNode.Operator.OP_ASSIGN; break;
                        case GDTokenizer.Token.TK_OP_ASSIGN_ADD: if (!p_allow_assign) { _set_error("Unexpected assign."); return null; } p_allow_assign = false; op = OperatorNode.Operator.OP_ASSIGN_ADD; break;
                        case GDTokenizer.Token.TK_OP_ASSIGN_SUB: if (!p_allow_assign) { _set_error("Unexpected assign."); return null; } p_allow_assign = false; op = OperatorNode.Operator.OP_ASSIGN_SUB; break;
                        case GDTokenizer.Token.TK_OP_ASSIGN_MUL: if (!p_allow_assign) { _set_error("Unexpected assign."); return null; } p_allow_assign = false; op = OperatorNode.Operator.OP_ASSIGN_MUL; break;
                        case GDTokenizer.Token.TK_OP_ASSIGN_DIV: if (!p_allow_assign) { _set_error("Unexpected assign."); return null; } p_allow_assign = false; op = OperatorNode.Operator.OP_ASSIGN_DIV; break;
                        case GDTokenizer.Token.TK_OP_ASSIGN_MOD: if (!p_allow_assign) { _set_error("Unexpected assign."); return null; } p_allow_assign = false; op = OperatorNode.Operator.OP_ASSIGN_MOD; break;
                        case GDTokenizer.Token.TK_OP_ASSIGN_SHIFT_LEFT: if (!p_allow_assign) { _set_error("Unexpected assign."); return null; } p_allow_assign = false; op = OperatorNode.Operator.OP_ASSIGN_SHIFT_LEFT; ; break;
                        case GDTokenizer.Token.TK_OP_ASSIGN_SHIFT_RIGHT: if (!p_allow_assign) { _set_error("Unexpected assign."); return null; } p_allow_assign = false; op = OperatorNode.Operator.OP_ASSIGN_SHIFT_RIGHT; ; break;
                        case GDTokenizer.Token.TK_OP_ASSIGN_BIT_AND: if (!p_allow_assign) { _set_error("Unexpected assign."); return null; } p_allow_assign = false; op = OperatorNode.Operator.OP_ASSIGN_BIT_AND; break;
                        case GDTokenizer.Token.TK_OP_ASSIGN_BIT_OR: if (!p_allow_assign) { _set_error("Unexpected assign."); return null; } p_allow_assign = false; op = OperatorNode.Operator.OP_ASSIGN_BIT_OR; break;
                        case GDTokenizer.Token.TK_OP_ASSIGN_BIT_XOR: if (!p_allow_assign) { _set_error("Unexpected assign."); return null; } p_allow_assign = false; op = OperatorNode.Operator.OP_ASSIGN_BIT_XOR; break;
                        case GDTokenizer.Token.TK_OP_BIT_AND: op = OperatorNode.Operator.OP_BIT_AND; break;
                        case GDTokenizer.Token.TK_OP_BIT_OR: op = OperatorNode.Operator.OP_BIT_OR; break;
                        case GDTokenizer.Token.TK_OP_BIT_XOR: op = OperatorNode.Operator.OP_BIT_XOR; break;
                        case GDTokenizer.Token.TK_PR_EXTENDS: op = OperatorNode.Operator.OP_EXTENDS; break;
                        default: valid = false; break;
                    }

                    if (valid)
                    {
                        e.is_op = true;
                        e.op = op;
                        expression.Add(e);
                        _tokenizer.advance();
                    }
                    else
                    {
                        break;
                    }
                }
                #endregion
            }

            /* Reduce the set set of expressions and place them in an operator tree, respecting precedence */
            while (expression.Count > 1)
            {

                int next_op = -1;
                int min_priority = 0xFFFFF;
                bool is_unary = false;

                for (int i = 0; i < expression.Count; i++)
                {
                    if (!expression[i].is_op)
                    {
                        continue;
                    }

                    int priority;

                    bool unary = false;

                    switch (expression[i].op)
                    {

                        case OperatorNode.Operator.OP_EXTENDS: priority = -1; break; //before anything

                        case OperatorNode.Operator.OP_BIT_INVERT: priority = 0; unary = true; break;
                        case OperatorNode.Operator.OP_NEG: priority = 1; unary = true; break;

                        case OperatorNode.Operator.OP_MUL: priority = 2; break;
                        case OperatorNode.Operator.OP_DIV: priority = 2; break;
                        case OperatorNode.Operator.OP_MOD: priority = 2; break;

                        case OperatorNode.Operator.OP_ADD: priority = 3; break;
                        case OperatorNode.Operator.OP_SUB: priority = 3; break;

                        case OperatorNode.Operator.OP_SHIFT_LEFT: priority = 4; break;
                        case OperatorNode.Operator.OP_SHIFT_RIGHT: priority = 4; break;

                        case OperatorNode.Operator.OP_BIT_AND: priority = 5; break;
                        case OperatorNode.Operator.OP_BIT_XOR: priority = 6; break;
                        case OperatorNode.Operator.OP_BIT_OR: priority = 7; break;

                        case OperatorNode.Operator.OP_LESS: priority = 8; break;
                        case OperatorNode.Operator.OP_LESS_EQUAL: priority = 8; break;
                        case OperatorNode.Operator.OP_GREATER: priority = 8; break;
                        case OperatorNode.Operator.OP_GREATER_EQUAL: priority = 8; break;

                        case OperatorNode.Operator.OP_EQUAL: priority = 8; break;
                        case OperatorNode.Operator.OP_NOT_EQUAL: priority = 8; break;

                        case OperatorNode.Operator.OP_IN: priority = 10; break;

                        case OperatorNode.Operator.OP_NOT: priority = 11; unary = true; break;
                        case OperatorNode.Operator.OP_AND: priority = 12; break;
                        case OperatorNode.Operator.OP_OR: priority = 13; break;

                        // ?:  =  10

                        case OperatorNode.Operator.OP_ASSIGN: priority = 14; break;
                        case OperatorNode.Operator.OP_ASSIGN_ADD: priority = 14; break;
                        case OperatorNode.Operator.OP_ASSIGN_SUB: priority = 14; break;
                        case OperatorNode.Operator.OP_ASSIGN_MUL: priority = 14; break;
                        case OperatorNode.Operator.OP_ASSIGN_DIV: priority = 14; break;
                        case OperatorNode.Operator.OP_ASSIGN_MOD: priority = 14; break;
                        case OperatorNode.Operator.OP_ASSIGN_SHIFT_LEFT: priority = 14; break;
                        case OperatorNode.Operator.OP_ASSIGN_SHIFT_RIGHT: priority = 14; break;
                        case OperatorNode.Operator.OP_ASSIGN_BIT_AND: priority = 14; break;
                        case OperatorNode.Operator.OP_ASSIGN_BIT_OR: priority = 14; break;
                        case OperatorNode.Operator.OP_ASSIGN_BIT_XOR: priority = 14; break;


                        default:
                            {
                                _set_error("GDParser bug, invalid operator in expression: " + expression[i].op.ToString());
                                return null;
                            }

                    }

                    if (priority < min_priority)
                    {
                        // < is used for left to right (default)
                        // <= is used for right to left
                        next_op = i;
                        min_priority = priority;
                        is_unary = unary;
                    }

                }


                if (next_op == -1)
                {
                    _set_error("Yet another parser bug....");
                    return null;
                }


                // OK! create operator..
                if (is_unary)
                {
                    int expr_pos = next_op;
                    while (expression[expr_pos].is_op)
                    {
                        expr_pos++;
                        if (expr_pos == expression.Count)
                        {
                            //can happen..
                            _set_error("Unexpected end of expression..");
                            return null;
                        }
                    }

                    //consecutively do unary opeators
                    for (int i = expr_pos - 1; i >= next_op; i--)
                    {
                        OperatorNode op = (OperatorNode)_alloc_node(Node.Type.TYPE_OPERATOR);
                        op.op = expression[i].op;
                        op.arguments.Add(expression[i + 1].node);
                        expression[i].is_op = false;
                        expression[i].node = op;
                        expression.RemoveAt(i + 1);
                    }

                }
                else
                {

                    if (next_op < 1 || next_op >= (expression.Count - 1))
                    {
                        _set_error("Parser bug..");
                        return null;
                    }

                    OperatorNode op = (OperatorNode)_alloc_node(Node.Type.TYPE_OPERATOR);
                    op.op = expression[next_op].op;

                    if (expression[next_op - 1].is_op)
                    {
                        _set_error("Parser bug..");
                        return null;
                    }

                    if (expression[next_op + 1].is_op)
                    {
                        // this is not invalid and can really appear
                        // but it becomes invalid anyway because no binary op
                        // can be followed by an unary op in a valid combination,
                        // due to how precedence works, unaries will always dissapear first

                        _set_error("Parser bug..");
                    }


                    op.arguments.Add(expression[next_op - 1].node); //expression goes as left
                    op.arguments.Add(expression[next_op + 1].node); //next expression goes as right

                    //replace all 3 nodes by this operator and make it an expression
                    expression[next_op - 1].node = op;
                    expression.RemoveAt(next_op);
                    expression.RemoveAt(next_op);
                }

            }

            return expression[0].node;
        }

        private Node _reduce_expression(Node p_node, bool p_to_const = false)
        {
            switch (p_node.type)
            {
                #region ____node_type_cases____

                case Node.Type.TYPE_BUILT_IN_FUNCTION: {
			        //many may probably be optimizable
			        return p_node;
		        } break;

		        case Node.Type.TYPE_ARRAY: {

			        ArrayNode an = (ArrayNode)(p_node);
			        bool all_constants = true;

			        for(int i = 0; i < an.elements.Count; i++) 
                    {
				        an.elements[i] = _reduce_expression(an.elements[i], p_to_const);
				        if (an.elements[i].type != Node.Type.TYPE_CONSTANT)
					        all_constants = false;
			        }

			        if (all_constants && p_to_const) 
                    {
				        //reduce constant array expression
				        ConstantNode cn = (ConstantNode) _alloc_node(Node.Type.TYPE_CONSTANT);
				        List<Variant> arr = new List<Variant>();
				        for(int i = 0; i < an.elements.Count; i++) 
                        {
					        ConstantNode acn = (ConstantNode) _alloc_node(an.elements[i].type);
                            arr.Insert(i, acn.value);
				        }
				        cn.value = arr;
				        return cn;
			        }

			        return an;

		        } break;

                case Node.Type.TYPE_DICTIONARY: {

		            DictionaryNode dn = (DictionaryNode)(p_node);
		            bool all_constants = true;

		            for(int i = 0; i < dn.elements.Count; i++) 
                    {
                        Node kn =_reduce_expression(dn.elements[i].Key, p_to_const);
                        if (kn.type != Node.Type.TYPE_CONSTANT)
                            all_constants=false;
                        
                        Node vn = _reduce_expression(dn.elements[i].Value, p_to_const);
                        if (vn.type!=Node.Type.TYPE_CONSTANT)
                            all_constants=false;

                        dn.elements[i] = new KeyValuePair<Node, Node>(kn, vn);
		            }

		            if (all_constants && p_to_const) {
			            //reduce constant array expression

                        ConstantNode cn = (ConstantNode) _alloc_node(Node.Type.TYPE_CONSTANT);
                        Dictionary<Variant, Variant> dict = new Dictionary<Variant, Variant>();
                        for(int i = 0; i < dn.elements.Count; i++) 
                        {
                            ConstantNode key_c = (ConstantNode)(dn.elements[i].Key);
                            ConstantNode value_c = (ConstantNode)(dn.elements[i].Value);

                            dict[key_c.value] = value_c.value;
                        }
                        cn.value = dict;
                        return cn;
		            }

		            return dn;


	            } break;

                case Node.Type.TYPE_OPERATOR: {

			        OperatorNode op = (OperatorNode)(p_node);

			        bool all_constants = true;
			        int last_not_constant = -1;

			        for(int i = 0; i < op.arguments.Count; i++) 
                    {
				        op.arguments[i] =_reduce_expression(op.arguments[i],p_to_const);
				        if (op.arguments[i].type != Node.Type.TYPE_CONSTANT) 
                        {
					        all_constants=false;
					        last_not_constant=i;
				        }
			        }

			        if (op.op == OperatorNode.Operator.OP_EXTENDS) 
                    {
				        //nothing much
				        return op;
			        } 
                    if (op.op == OperatorNode.Operator.OP_PARENT_CALL) 
                    {
				        //nothing much
				        return op;

			        } 
                    else if (op.op == OperatorNode.Operator.OP_CALL) 
                    {
				        //can reduce base type constructors
				        if ((op.arguments[0].type == Node.Type.TYPE_TYPE || (op.arguments[0].type == Node.Type.TYPE_BUILT_IN_FUNCTION && GDFunctions.is_deterministic( ((BuiltInFunctionNode)(op.arguments[0])).function))) && (last_not_constant == 0)) 
                        {
					        //native type constructor or intrinsic function
					        Variant[] vptr = null;
					        List<Variant> ptrs = new List<Variant>();
					        
                            if (op.arguments.Count > 1) 
                            {
						        for(int i = 0; i < op.arguments.Count - 1; i++) 
                                {
							        ConstantNode cn = (ConstantNode)(op.arguments[i + 1]);
							        ptrs.Add(cn.value);
						        }
						        vptr = ptrs.ToArray();
					        }

					        Variant.CallError ce = new Variant.CallError();
					        Variant v = new Variant();

					        if (op.arguments[0].type == Node.Type.TYPE_TYPE) 
                            {
						        TypeNode tn = (TypeNode)op.arguments[0];
						        v = Variant.construct(tn.vtype, vptr, ref ce);

					        } 
                            else 
                            {
						        GDFunctions.Function func =((BuiltInFunctionNode)op.arguments[0]).function;
						        GDFunctions.call(func, vptr, ref v, ref ce);
					        }


					        if (ce.error != Variant.CallError.Error.CALL_OK) 
                            {
						        string errwhere;
						        if (op.arguments[0].type == Node.Type.TYPE_TYPE) 
                                {
							        TypeNode tn = (TypeNode)(op.arguments[0]);
							        errwhere = "'" + Variant.get_type_name(tn.vtype) + "'' constructor" ;

						        } 
                                else 
                                {
							        GDFunctions.Function func = ((BuiltInFunctionNode)(op.arguments[0])).function;
							        errwhere = "'" + GDFunctions.get_func_name(func) + "'' intrinsic function";
						        }

						        switch(ce.error) 
                                {
							        case Variant.CallError.Error.CALL_ERROR_INVALID_ARGUMENT: {
								        _set_error("Invalid argument (#" + (ce.argument + 1) + ") for " + errwhere + ".");
							        } break;

							        case Variant.CallError.Error.CALL_ERROR_TOO_MANY_ARGUMENTS: {
								        _set_error("Too many arguments for " + errwhere + ".");
							        } break;
							        case Variant.CallError.Error.CALL_ERROR_TOO_FEW_ARGUMENTS: {
								        _set_error("Too few arguments for " + errwhere + ".");
							        } break;
							        default: {
								        _set_error("Invalid arguments for " + errwhere + ".");
							        } break;
						        }

						        return p_node;
					        }

					        ConstantNode csn = (ConstantNode) _alloc_node(Node.Type.TYPE_CONSTANT);
					        csn.value = v;
					        return csn;

				        } 
                        else if (op.arguments[0].type == Node.Type.TYPE_BUILT_IN_FUNCTION && last_not_constant == 0) 
                        {
				        }

				        return op; //don't reduce yet
			        }
                    else if (op.op == OperatorNode.Operator.OP_INDEX) 
                    {
				        //can reduce indices into constant arrays or dictionaries
                        if (all_constants)
                        {
                            ConstantNode ca = (ConstantNode)(op.arguments[0]);
                            ConstantNode cb = (ConstantNode)(op.arguments[1]);

                            bool valid = false;

                            Variant v = ca.value.get(cb.value,ref valid);
                            if (!valid)
                            {
                                _set_error("invalid index in constant expression");
                                return op;
                            }

                            ConstantNode cn = (ConstantNode)_alloc_node(Node.Type.TYPE_CONSTANT);
                            cn.value = v;
                            return cn;

                        }
                        else if (op.arguments[0].type == Node.Type.TYPE_CONSTANT && op.arguments[1].type == Node.Type.TYPE_IDENTIFIER) 
                        {
					        ConstantNode ca = (ConstantNode)(op.arguments[0]);
					        IdentifierNode ib = (IdentifierNode)(op.arguments[1]);

					        bool valid = false;
					        Variant v = ca.value.get_named(ib.name, ref valid);
					        if (!valid) 
                            {
						        _set_error("invalid index '" + (string)ib.name + "' in constant expression");
						        return op;
					        }

					        ConstantNode cn = (ConstantNode) _alloc_node(Node.Type.TYPE_CONSTANT);
					        cn.value = v;
					        return cn;
				        }
				        return op;
			        }

                    //validate assignment (don't assign to cosntant expression
			        switch(op.op) 
                    {
				        case OperatorNode.Operator.OP_ASSIGN:
				        case OperatorNode.Operator.OP_ASSIGN_ADD:
				        case OperatorNode.Operator.OP_ASSIGN_SUB:
				        case OperatorNode.Operator.OP_ASSIGN_MUL:
				        case OperatorNode.Operator.OP_ASSIGN_DIV:
				        case OperatorNode.Operator.OP_ASSIGN_MOD:
				        case OperatorNode.Operator.OP_ASSIGN_SHIFT_LEFT:
				        case OperatorNode.Operator.OP_ASSIGN_SHIFT_RIGHT:
				        case OperatorNode.Operator.OP_ASSIGN_BIT_AND:
				        case OperatorNode.Operator.OP_ASSIGN_BIT_OR:
                        case OperatorNode.Operator.OP_ASSIGN_BIT_XOR: {
					        if (op.arguments[0].type == Node.Type.TYPE_CONSTANT) 
                            {
						        _set_error("Can't assign to constant");
						        return op;
					        }
				        } break;

				        default: break;
			        }

                    //now se if all are constants
                    if (!all_constants)
                        return op; //nothing to reduce from here on

                    switch(op.op) 
                    {
			            //unary operators
			            case OperatorNode.Operator.OP_NEG: { return _reduce_unary(p_node, op, Variant.Operator.OP_NEGATE); } break;
			            case OperatorNode.Operator.OP_NOT: { return _reduce_unary(p_node, op, Variant.Operator.OP_NOT); } break;
			            case OperatorNode.Operator.OP_BIT_INVERT: { return _reduce_unary(p_node, op, Variant.Operator.OP_BIT_NEGATE); } break;
				
                        //binary operators (in precedence order)
			            case OperatorNode.Operator.OP_IN: { return _reduce_binary(p_node, op, Variant.Operator.OP_IN); } break;
			            case OperatorNode.Operator.OP_EQUAL: { return _reduce_binary(p_node, op, Variant.Operator.OP_EQUAL); } break;
			            case OperatorNode.Operator.OP_NOT_EQUAL: { return _reduce_binary(p_node, op, Variant.Operator.OP_NOT_EQUAL); } break;
			            case OperatorNode.Operator.OP_LESS: { return _reduce_binary(p_node, op, Variant.Operator.OP_LESS); } break;
			            case OperatorNode.Operator.OP_LESS_EQUAL: {  return _reduce_binary(p_node, op, Variant.Operator.OP_LESS_EQUAL); } break;
			            case OperatorNode.Operator.OP_GREATER: { return _reduce_binary(p_node, op, Variant.Operator.OP_GREATER); } break;
			            case OperatorNode.Operator.OP_GREATER_EQUAL: { return _reduce_binary(p_node, op, Variant.Operator.OP_GREATER_EQUAL); } break;
			            case OperatorNode.Operator.OP_AND: { return _reduce_binary(p_node, op, Variant.Operator.OP_AND); } break;
			            case OperatorNode.Operator.OP_OR: { return _reduce_binary(p_node, op, Variant.Operator.OP_OR); } break;
			            case OperatorNode.Operator.OP_ADD: { return _reduce_binary(p_node, op, Variant.Operator.OP_ADD); } break;
			            case OperatorNode.Operator.OP_SUB: { return _reduce_binary(p_node, op, Variant.Operator.OP_SUBSTRACT); } break;
			            case OperatorNode.Operator.OP_MUL: { return _reduce_binary(p_node, op, Variant.Operator.OP_MULTIPLY); } break;
			            case OperatorNode.Operator.OP_DIV: { return _reduce_binary(p_node, op, Variant.Operator.OP_DIVIDE); } break;
			            case OperatorNode.Operator.OP_MOD: { return _reduce_binary(p_node, op, Variant.Operator.OP_MODULE); } break;
			            case OperatorNode.Operator.OP_SHIFT_LEFT: { return _reduce_binary(p_node, op, Variant.Operator.OP_SHIFT_LEFT); } break;
			            case OperatorNode.Operator.OP_SHIFT_RIGHT: { return _reduce_binary(p_node, op, Variant.Operator.OP_SHIFT_RIGHT); } break;
			            case OperatorNode.Operator.OP_BIT_AND: { return _reduce_binary(p_node, op, Variant.Operator.OP_BIT_AND); } break;
			            case OperatorNode.Operator.OP_BIT_OR: { return _reduce_binary(p_node, op, Variant.Operator.OP_BIT_OR); } break;
                        case OperatorNode.Operator.OP_BIT_XOR: { return _reduce_binary(p_node, op, Variant.Operator.OP_BIT_XOR); } break;
			            default: return op;
		            }

		            return op;

                } break;

                default:{
                    return p_node;
                } break;

                #endregion
            }
            return null;
        }

        private Node _parse_and_reduce_expression(Node p_parent, bool p_static, bool p_reduce_const = false, bool p_allow_assign = false)
        {
            Node expr = _parse_expression(p_parent, p_static, p_allow_assign);
            if (expr == null || _error_set)
                return null;

            expr = _reduce_expression(expr, p_reduce_const);
            if (expr == null || _error_set)
                return null;
            return expr;
        }

        private Error _parse(string p_base_path)
        {
            _base_path = p_base_path;

            clear();

            //assume class
            ClassNode main_class = (ClassNode)_alloc_node(Node.Type.TYPE_CLASS);
            main_class.initializer = (BlockNode)_alloc_node(Node.Type.TYPE_BLOCK);

            _parse_class(main_class);

            if (_tokenizer.get_token() == GDTokenizer.Token.TK_ERROR)
            {
                _error_set = false;
                _set_error("Parse Error: " + _tokenizer.get_token_error());
            }

            if (_error_set)
            {
                return Error.ERR_PARSE_ERROR;
            }
            return Error.OK;
        }
    }
}
