using System;
using System.Collections; 
using Parseq.Core; 
using Parseq.Util; 

namespace Parseq.Compiler
{
    public class TokenReaderParser : BaseParser
    {
        private BaseParser inner;
        private BaseParser TokenParser2;
        private Hashtable skipList;
        public TokenReaderParser(BaseParser inner, BaseParser TokenParser2, Hashtable skipList)
            : base()
        {
            this.inner = inner;
            this.TokenParser2 = TokenParser2;
            this.skipList = skipList;
        }

        public override Result InnerParse(IReader reader, Location pos)
        {
            TokenReader2 tokenReader = new TokenReader2(this.TokenParser2, this.skipList, (CharReader)reader);
            return this.inner.Parse(tokenReader, pos); 
        }
        
    }

    /* the recursive definition of a parser is going to be something that's difficult to create and come up with... I think... let's give it a shot! 
     * rule # 1 - the builder is itself a grammar as well - i.e. if we are going to reuse code we'll have to ensure that the code can be created appropriately.
     * rule # 2 - it's difficult to do this with static - i.e. it's better to create an instance. 
     * rule # 3 - creating an instance means that we'll have to build the code with basic rules. 
     * rule # 4 - it means we need to do this up with some basic parsers that are fundamental to regular parses! 
     * 
     * let's see what Builder's API looks like!. 
     * 
     * Builder b = new Builder(); // there is actually a single builder. 
     * -> this should initiate some basic rules. 
     * 
     */
    
	public class Builder
	{
        // every token has a single parse.
        // every rule has a multiple parse - i.e. a choiceParser. hence we can add additional unresolved symbol to the parser itself. 
        private ChoiceParser tokenGrammar = Combinator.Choice();
        private Hashtable skipList = new Hashtable(); // we should be able to parse the skip list... hmm... 
        private ChoiceParser ruleGrammar = Combinator.Choice();
        private TokenCombinator tokenCombinator = new TokenCombinator();
        private SymbolTable symbolTable = new SymbolTable(); // this combines quite a few things! 

		private Builder ()
		{
            this.init();
		}

        private static Builder inst;

        static Builder()
        {
            inst = new Builder(); 
        }

        public static Builder Instance
        {
            get { return inst; }
        }

        public ChoiceParser Tokens
        {
            get { return this.tokenGrammar; }
        }

        public BaseParser Rules
        {
            get { return new TokenReaderParser(this.ruleGrammar, this.tokenGrammar, this.skipList); }
        }

        public void AddToken(string name, string exp)
        {
            // this.symbolTable.Add(
        }

        public void AddRule(string name, string exp)
        {

        }

        private void init()
        {
            this.initTokenParser2(); 
            this.initRuleParser(); 
        }

        private const string TK_WHITESPACE = "WHITESPACE";
        private const string TK_INLINE_COMMENT = "INLINE_COMMENT";
        private const string TK_BLOCK_COMMENT = "BLOCK_COMMENT";
        private const string TK_LITERAL = "LITERAL";
        private const string TK_TOKEN_IDENTIFIER = "TOKEN_IDENTIFIER";
        private const string TK_RULE_IDENTIFIER = "RULE_IDENTIFIER"; 
        private const string TK_DEFINE = "DEFINE"; 
        private const string TK_ONE_MANY = "ONE_MANY"; 
        private const string TK_ZERO_MANY = "ZERO_MANY"; 
        private const string TK_ZERO_ONE = "ZERO_ONE"; 
        private const string TK_TERMINATOR = "TERMINATOR";
        private const string TK_OPEN_PAREN = "OPEN_PAREN";
        private const string TK_CLOSE_PAREN = "CLOSE_PAREN";
        private const string TK_OPEN_BRACKET = "OPEN_BRACKET";
        private const string TK_CLOSE_BRACKET = "CLOSE_BRACKET";
        private const string TK_OPEN_SQ_BRACKET = "OPEN_SQ_BRACKET";
        private const string TK_CLOSE_SQ_BRACKET = "CLOSE_SQ_BRACKET";
        private const string TK_NEGATE = "NEGATE";
        private const string TK_CHOICE = "CHOICE";
        private const string TK_DELIM = "DELIM"; 

        private void initTokenParser2()
        {
            //**********************************************************************
            // handling of whitespaces 
            BaseParser whitespace = Combinator.CharIn(' ', '\r', '\n', '\t', '\f');
            BaseParser whitespaces = Combinator.OneMany(whitespace).SetTransformArray(delegate(object[] result)
            {
                return new Token(TK_WHITESPACE, Util.String.ObjectCharsToString(result));
            });
            this.addTokenParser2(TK_WHITESPACE, whitespaces, true);

            //**********************************************************************
            // handling of inline comments. 
            BaseParser newline = Combinator.Choice(Combinator.Chars("\r\n"), Combinator.Chars("\r"), Combinator.Chars("\n")); 
            BaseParser inlineCommentChars = Combinator.ZeroMany(Combinator.CharNotIn('\r', '\n'))
                .SetTransformArray(delegate(object[] result) {
                    return Util.String.ObjectCharsToString(result);
                }); 
            BaseParser inlineComment = Combinator.Sequence(Combinator.Chars("//"), inlineCommentChars, Combinator.ZeroOne(newline, "\r\n"))
                .SetTransformArray(delegate(object[] result) {
                    return new Token(TK_INLINE_COMMENT, result[1]); 
                });
            this.addTokenParser2(TK_INLINE_COMMENT, inlineComment, true);

            //**********************************************************************
            // handling of block comments. this one is interesting... basically we'll take a character if it's not followed by another character. 
            // not sure how to write this just yet... hmm... (this requires a peek). 
            // so a char not followed by another char is required to do the test. 
            //BaseParser blockCommentChars = Combinator.ZeroMany(Combinator.Choice(Combinator.CharNotEquals('*'), Combinator.CharNotFollowed('*', '/')))
            BaseParser blockCommentChars = Combinator.ZeroMany(Combinator.Choice(Combinator.CharNotEquals('*')
                , Combinator.CharFollowedBy(Combinator.CharEquals('*'), Combinator.CharNotEquals('/'))))
            .SetTransformArray(delegate(object[] result)
            {
                return Util.String.ObjectCharsToString(result); 
            });

            BaseParser blockComment = Combinator.Sequence(Combinator.Chars("/*"), blockCommentChars, Combinator.ZeroOne(Combinator.Chars("*/"), "*/"))
                .SetTransformArray(delegate(object[] result)
            {
                return new Token(TK_BLOCK_COMMENT, result[1]);
            });
            this.addTokenParser2(TK_BLOCK_COMMENT, blockComment, true); 

            //**********************************************************************
            // handling of literal strings = single quoted string. 
            BaseParser escapedChar = Combinator.Sequence(Combinator.CharEquals('\\'), Combinator.CharAny())
                .SetTransformArray(delegate(object[] result)
                {
                    return result[1]; 
                }); 
            BaseParser notSQChar = Combinator.CharNotEquals('\'');
            BaseParser SQChar 
                = Combinator.Choice(Combinator.MapCharsTo("\\r", '\r')
                    , Combinator.MapCharsTo("\\n", '\n')
                    , Combinator.MapCharsTo("\\s", ' ')
                    , Combinator.MapCharsTo("\\t", '\t')
                    , Combinator.MapCharsTo("\\f", '\f')
                    , Combinator.MapCharsTo("\\b", '\b')
                    , escapedChar
                    , notSQChar);
            BaseParser SQChars = Combinator.ZeroMany(SQChar)
                .SetTransformArray(delegate(object[] result) {
                    return Util.String.ObjectCharsToString(result);
                });
            BaseParser SQ = Combinator.CharEquals('\'');
            BaseParser SQString = Combinator.Sequence(SQ, SQChars, SQ)
                .SetTransformArray(delegate(object[] result)
            {
                return new Token(TK_LITERAL, result[1]);
            });
            this.addTokenParser2(TK_LITERAL, SQString); 

            //**********************************************************************
            // handling of identifiers
            BaseParser tokenIdentifier = Combinator.Sequence(Combinator.CharIsUpperAlpha()
                , Combinator.ZeroMany(Combinator.Choice(Combinator.CharIsUpperAlpha(), Combinator.CharIsDigit(), Combinator.CharEquals('_'))))
                .SetTransformArray(delegate(object[] result)
                {
                    return new Token(TK_TOKEN_IDENTIFIER, Util.String.ObjectCharsToString(result)); 
                });
            this.addTokenParser2(TK_TOKEN_IDENTIFIER, tokenIdentifier); 

            BaseParser ruleIdentifier = Combinator.Sequence(Combinator.CharIsLowerAlpha()
                , Combinator.ZeroMany(Combinator.Choice(Combinator.CharIsAlphaNumeric(), Combinator.CharEquals('_'))))
                .SetTransformArray(delegate(object[] result)
                {
                    return new Token(TK_RULE_IDENTIFIER, Util.String.ObjectCharsToString(result)); 
                });
            this.addTokenParser2(TK_RULE_IDENTIFIER, ruleIdentifier); 

            //**********************************************************************
            // special characters. 
            this.addTokenChar(TK_DEFINE, ':');
            this.addTokenChar(TK_ONE_MANY, '+');
            this.addTokenChar(TK_ZERO_MANY, '*');
            this.addTokenChar(TK_ZERO_ONE, '?');
            this.addTokenChar(TK_TERMINATOR, ';');
            this.addTokenChar(TK_OPEN_PAREN, '(');
            this.addTokenChar(TK_CLOSE_PAREN, ')');
            this.addTokenChar(TK_OPEN_BRACKET, '{');
            this.addTokenChar(TK_CLOSE_BRACKET, '}');
            this.addTokenChar(TK_OPEN_SQ_BRACKET, '[');
            this.addTokenChar(TK_CLOSE_SQ_BRACKET, ']');
            this.addTokenChar(TK_NEGATE, '~');
            this.addTokenChar(TK_CHOICE, '|');
            this.addTokenChar(TK_DELIM, ','); 
        }

        private void addTokenChar(string name, char c)
        {
            this.addTokenChar(name, c, false); 
        }

        private void addTokenChar(string name, char c, bool skip)
        {
            this.addTokenParser2(name, Combinator.CharEquals(c)
                .SetTransform(delegate(object result)
                {
                    return new Token(name, result); 
                })
                , skip); 
        }

        private void addTokenParser2(string name, BaseParser parser)
        {
            this.addTokenParser2(name, parser, false); 
        }

        private void addTokenParser2(string name, BaseParser parser, bool skip)
        {
            this.tokenGrammar.Add(parser);
            this.tokenCombinator.AddToken(name); 
            if (skip)
                this.skipList.Add(name, name); 
        }

        private void initRuleParser()
        {
            // perhaps it's easier if we just build out a set of Tokens! 
            // time to build up expressions that are based on tokens!.
            // first one - what if we come across a literal or a token? 
            // a token can look like the following: 
            // a sequence of literal or identifiers that makes up the higher 
            // BaseParser tokenTerm = this.tokenCombinator.Choice(TK_LITERAL, TK_TOKEN_IDENTIFIER); 

            // maybe the combinator concept do not work well. 
            BaseParser literalExp = this.tokenCombinator.Token(TK_LITERAL)
                .SetTransform(delegate(object result)
            {
                return new LiteralExp((string)result);
            });

            BaseParser tokenIDExp = this.tokenCombinator.Token(TK_TOKEN_IDENTIFIER)
                .SetTransform(delegate(object result)
            {
                return new IDExp((string)result); // why am I stepping through the TokenParser2 object twice? 
            });

            // this part is interesting... the choice is either driven by parser top-down or driven by the reader bottom up... hmmm!!!! 
            // doesn't seem like the Token-based idea is the right idea! 
            // okay - this means A LOT OF STRIPPING and CHANGIN OF THE CODE. 
            ChoiceParser tokenTermExp = Combinator.Choice(tokenIDExp, literalExp); // this choice broke down with regards to TokenParser2 hmm!

            BaseParser tokenOneDefExp = Combinator.OneMany(tokenTermExp)
                .SetTransformArray(delegate(object[] result)
                {
                    // there are a set of inner Exps. 
                    return new SequenceExp(Array.ConvertAll<object, IExp>(result, delegate(object e) { return (IExp)e; }));
                });
            
            //BaseParser tokenDefExp = Combinator.Delimited(tokenOneDefExp, this.tokenCombinator.Token(TK_CHOICE))
            //    .SetTransformArray(delegate(object[] result) {
            //        // what we have in this case is something that needs to be added together... 
            //        return new ChoiceExp((IExp[])BaseParser.ObjectArrayJoin(result, typeof(IExp)));
            //    });

            BaseParser tokenExp = Combinator.Sequence(tokenIDExp, this.tokenCombinator.Token(TK_DEFINE), tokenOneDefExp 
                    , Combinator.ZeroOne(this.tokenCombinator.Token(TK_TERMINATOR), new Token(TK_TERMINATOR, ';')))
                .SetTransformArray(delegate(object[] result)
                {
                    return new TokenExp((IDExp)result[0], (SequenceExp)result[2]);
                });

            BaseParser nestedTokenExp = Combinator.Sequence(this.tokenCombinator.Token(TK_OPEN_PAREN)
                , Combinator.Delimited(tokenOneDefExp, this.tokenCombinator.Token(TK_CHOICE))
                    .SetTransformArray(delegate(object[] result)
            {
                return BaseParser.ObjectArrayJoin(result, typeof(IExp));
            })
                , this.tokenCombinator.Token(TK_CLOSE_PAREN))
                .SetTransformArray(delegate(object[] result)
            {
                return new ChoiceExp((IExp[])result[1]);
            });

            BaseParser repeatableExp = Combinator.Choice(nestedTokenExp, tokenIDExp, literalExp);

            BaseParser zeroManyTokenTermExp = Combinator.Sequence(repeatableExp, this.tokenCombinator.Token(TK_ZERO_MANY))
                .SetTransformArray(delegate(object[] result) {
                    return new RepeatExp((IExp)result[0], 0, -1); 
                });

            BaseParser zeroOneTokenTermExp = Combinator.Sequence(repeatableExp, this.tokenCombinator.Token(TK_ZERO_ONE))
                .SetTransformArray(delegate(object[] result)
            {
                return new RepeatExp((IExp)result[0], 0, 1); 
            });

            BaseParser oneManyTokenTermExp = Combinator.Sequence(repeatableExp, this.tokenCombinator.Token(TK_ONE_MANY))
                .SetTransformArray(delegate(object[] result)
            {
                return new RepeatExp((IExp)result[0], 1, -1); 
            });

            tokenTermExp.Prepend(zeroManyTokenTermExp);
            tokenTermExp.Prepend(zeroOneTokenTermExp);
            tokenTermExp.Prepend(oneManyTokenTermExp);

            // up to the above point what we have added are the token parsing rule. 
            // from here on out we are going to deal with the parser rules. 
            this.ruleGrammar.Add(tokenExp); 
        }

        public IExp ParseString(string exp)
        {
            // there might be multiple
            // time to think about how to parse multiple different expressions! 
            // also it's time to build out the symbol table appropriately... 
            // symbol table is only going to be resolved when 
            using (CharReader reader = new CharReader(Util.String.StringToStream(exp))) {
                IExp result = this.ParseOne(reader, exp);
                this.symbolTable.Add((TokenExp)result); 
                return result; 
            }
        }

        public IExp ParseOne(CharReader reader, string exp)
        {
            Result result = this.Rules.Parse(reader, new Location(0));
            if (Result.Succeeded(result))
            {
                // we have an exp that's parsed...
                if (result.Inner is TokenExp)
                {
                    string ID = (string)((TokenExp)result.Inner).ID;
                    // ChoiceExp EXP = (ChoiceExp)((TokenExp)result.Inner).Inner;
                    // determine to see if we already have the definition. 
                    // if we already have the definition then we will determine how to parse it here... 
                    return (IExp)result.Inner; 
                }
                else
                {
                    throw new Exception(string.Format("invalid expression: {0}", exp));
                }
            }
            else
            {
                throw new Exception(string.Format("invalid expresison: {0}", exp));
            }
        }
    }
}

