﻿// Copyright (c) 2009, Heiko Wundram (modelnine@h8me.eu).
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following
// conditions are met:
//
// * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer
//   in the documentation and/or other materials provided with the distribution.
// * Neither the name of Heiko Wundram (h8me.eu) nor the names of its contributors may be used to endorse or promote products
//   derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
// BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
// SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

using System;
using System.Collections.Generic;
using System.Diagnostics;

namespace eu.h8me.Parsing.ParserLib {

    /// <summary>
    /// GLR parser implementation, which executes the state machine specified by a grammar on a sequence of input tokens returned
    /// by a lexer.
    /// </summary>
    public sealed class Parser {

        /// <summary>
        /// Delegate type for retrieving tokens from a lexer. The specified delegate is called exactly once for each lookahead,
        /// and must return the grammar bound EOI token when input runs out.
        /// </summary>
        /// <param name="gram">Grammar that the parser is executing.</param>
        /// <param name="parser">Parser object that is invoking the lexer.</param>
        /// <returns>New token to use as lookahead from input.</returns>
        public delegate Token LexerCallback(Grammar gram, Parser parser);

        // Data.
        private readonly Grammar _Grammar;
        private object _Tag;
        private List<State> _TopMost = new List<State>();
        private readonly ReduceQueue _PathQueue = new ReduceQueue();
        private bool _Started;
        private bool _Finished;
        private object _Result;

        /// <summary>
        /// Initialize a new, independent parser in the start state, bound to the specified grammar.
        /// </summary>
        /// <param name="gram">Grammar to bind the parser to.</param>
        /// <param name="start">Start state for the state machine.</param>
        internal Parser(Grammar gram, int start) {
            // Initialize state.
            _Grammar = gram;
            _TopMost.Add(new State(start));
        }

        /// <summary>
        /// Access the grammar object that the current parser is bound to.
        /// </summary>
        public Grammar Grammar {
            get {
                // Return grammar.
                return _Grammar;
            }
        }

        /// <summary>
        /// Access the tag that is bound to the current parser object. The tag for a parser object may not be reassigned when the
        /// parser has been started by calling Parse().
        /// </summary>
        public object Tag {
            get {
                // Return parser tag.
                return _Tag;
            }
            set {
                // Set parser tag.
                if( _Started )
                    throw new ArgumentException("Trying to set parser tag when started");
                _Tag = value;
            }
        }

        /// <summary>
        /// Return the current list of top most nodes in the parse graph.
        /// </summary>
        internal List<State> TopMost {
            get {
                // Return topmost states.
                return _TopMost;
            }
        }

        /// <summary>
        /// Return the queue of currently executing reduction paths.
        /// </summary>
        internal ReduceQueue PathQueue {
            get {
                // Return path queue.
                return _PathQueue;
            }
        }

        /// <summary>
        /// Check whether the parser has been started. A parser is started when Parse() is called.
        /// </summary>
        public bool IsStarted {
            get {
                // Return started state.
                return _Started;
            }
        }

        /// <summary>
        /// Check whether the parser is finished. A parser is finished when Parse() returns successfully (i.e., not with an
        /// exception).
        /// </summary>
        public bool IsFinished {
            get {
                // Return finished state.
                return _Finished;
            }
        }

        /// <summary>
        /// When a parser has completed successfully, the value bound to the starting token is available as Result.
        /// </summary>
        public object Result {
            get {
                // Return result if present.
                if( !_Finished )
                    throw new InvalidOperationException("Result is not available for Parser");
                return _Result;
            }
        }

        /// <summary>
        /// Start up the parsing of an input token stream delivered by the specified lexer callback. In case the parse completes
        /// successfully, the value of the starting token is returned. A parser may not be restarted; instead, create a new parser
        /// object from the grammar.
        /// </summary>
        /// <param name="lexer">Lexer callback to use.</param>
        /// <returns>On successful completion, the value of the starting token.</returns>
        public object Parse(LexerCallback lexer) {
            // Check whether parser is started.
            if( lexer == null )
                throw new ArgumentNullException("Invalid lexer callback passed to parser, is null");
            else if( _Started )
                throw new InvalidOperationException("Trying to restart started parser");
            _Started = true;

            // Loop while stack tops remain.
            StateAction acts;
            Token lex;
            HashSet<Symbol> expected = new HashSet<Symbol>();
            do {
                // Fetch a token from input.
                lex = lexer(_Grammar, this);
                if( lex == null || lex.Symbol.Grammar != _Grammar )
                    throw new ArgumentException("Invalid lexer symbol received");

                // Loop over current stack nodes, enqueuing possible (firstlevel) reduce actions.
                foreach( State top in _TopMost )
                    if( _Grammar.States[top.StateNum].TryGetValue(lex.Symbol, out acts) )
                        foreach( ReduceAction act in acts.ReduceActions )
                            foreach( StateLink[] path in Paths(top, act.ProductionRule.Count, null) )
                                _PathQueue.Enqueue(new ReducePath(top, path, act, path.Length));

                // Process queued reduce actions.
                ReducePath redpath;
                while( ( redpath = _PathQueue.Dequeue() ) != null )
                    // Fetch a path, and if there are no more paths to dequeue, finish loop.
                    redpath.Execute(this, lex);

                // Perform shifts.
                List<State> newtopmost = new List<State>();
                expected.Clear();
                foreach( State top in _TopMost ) {
                    // Check whether this top has a shift.
                    Dictionary<Symbol, StateAction> stateacts = _Grammar.States[top.StateNum];
                    if( stateacts.TryGetValue(lex.Symbol, out acts) && acts.HasAction ) {
                        // There is a shift action here, try to find an existing state in topmost that references it.
                        StateLink newlink = new StateLink(lex, -1, top);
                        int newstate = acts.Action;
                        bool found = false;
                        foreach( State newtop in newtopmost ) {
                            // Check whether this state matches.
                            if( newtop.StateNum == newstate ) {
                                // Yes, Queue a link, and signal we created a link already.
                                newtop.DownLinks.Add(newlink);
                                found = true;
                                break;
                            }
                        }

                        // Check whether we found a state. If not, create one.
                        if( !found ) {
                            // Create a new node.
                            State newtop = new State(newstate);
                            newtop.DownLinks.Add(newlink);
                            newtopmost.Add(newtop);
                        }
                    } else
                        // Attach expected symbols on this action.
                        foreach( KeyValuePair<Symbol, StateAction> act in stateacts )
                            // Attach as expected symbol if is a terminal.
                            if( act.Key.IsTerminal )
                                expected.Add(act.Key);
                }

                // Finally, switch topmost arrays.
                _TopMost = newtopmost;
            } while( _TopMost.Count > 0 && lex.Symbol != _Grammar.EOI );

            // Check whether we aborted because no states remained.
            Debug.Assert(_TopMost.Count <= 1, "Got more than one parse tree after parse finish, not possible?!");
            if( _TopMost.Count == 0 )
                throw new ParseFailedException("Parse failed; no valid parse tree found", expected, lex);

            // Okay, we terminated, fetch result (current stack contains S$, so the actual data we're interested in is two down).
            _Finished = true;
            _Result = _TopMost[0].DownLinks[0].Parent.DownLinks[0].Token.Data;

            // Return result to caller.
            return _Result;
        }

        /// <summary>
        /// Enumerate available graph paths from the specified top at the specified depth, possibly only including all paths
        /// containing a specified link.
        /// </summary>
        /// <param name="top">Top to start enumeration from.</param>
        /// <param name="depth">Length of path to enumerate.</param>
        /// <param name="link">Link which causes paths to be included in return when non-null.</param>
        /// <returns>List of paths matching the specified criteria that were found.</returns>
        internal List<StateLink[]> Paths(State top, int depth, StateLink link) {
            // Check whether this is a zero depth path; if so, return (one) empty path.
            List<StateLink[]> paths = new List<StateLink[]>();
            if( depth == 0 ) {
                // Check whether we should find a contained link; if not, add the empty path.
                if( link == null )
                    paths.Add(new StateLink[0]);
                return paths;
            }

            // Initialize state parse list from immediate downlinks of top.
            List<bool> contains = new List<bool>();
            foreach( StateLink downlink in top.DownLinks ) {
                // Create new link state list with only the one link.
                StateLink[] clone = new StateLink[depth];
                clone[0] = downlink;

                // Insert it into the path and update contains.
                paths.Add(clone);
                contains.Add(downlink == link);
            }

            // Loop until depth is reached, walking all states from previous installment and forking to new set.
            for( int i = 1; i < depth; ++i ) {
                // Store count of old paths.
                int maxoldpath = paths.Count;
                for( int j = 0; j < maxoldpath; ) {
                    // Check whether the topmost state in this element has multiple parents.
                    List<StateLink> downlinks = paths[j][i-1].Parent.DownLinks;
                    for( int k = 1; k < downlinks.Count; ++k ) {
                        // Clone array and insert new parent.
                        StateLink[] clone = (StateLink[])paths[j].Clone();
                        clone[i] = downlinks[k];

                        // Add new path and create new contains.
                        paths.Add(clone);
                        contains.Add(contains[j] || downlinks[k] == link);
                    }

                    // Check whether element has any parents.
                    if( downlinks.Count > 0 ) {
                        // Set up new element and increment old path count.
                        paths[j][i] = downlinks[0];
                        contains[j] = contains[j] || downlinks[0] == link;
                        ++j;
                    } else {
                        // This path doesn't have enough elements (to satisfy depth), remove it (and decrement max old path count).
                        paths.RemoveAt(j);
                        contains.RemoveAt(j);
                        --maxoldpath;
                    }
                }
            }

            // Filter the links if we have a link set up.
            if( link != null ) {
                // Set up front and back counters to search for paths that match criteria.
                int i = 0, j = paths.Count;
                while( i < j ) {
                    // Check whether path i is present. If so, keep it.
                    if( contains[i] ) {
                        // Increment path count and keep path.
                        ++i;
                        continue;
                    }

                    do {
                        // Path isn't present; find possible path from end that's contained, and move that ahead (if found).
                        --j;
                    } while( !contains[j] && i < j );

                    // Check whether we found a new present path.
                    if( i < j ) {
                        // Yes, add it, and increment path count.
                        paths[i] = paths[j];
                        ++i;
                    }
                }

                // Remove trailing (now empty) paths.
                paths.RemoveRange(j, paths.Count - j);
            }

            // Return list of enumerated paths.
            return paths;
        }

    }

    /// <summary>
    /// Helper class storing a state of the parser.
    /// </summary>
    internal class State {

        // Data.
        private readonly int _StateNum;
        private readonly List<StateLink> _DownLinks = new List<StateLink>();

        /// <summary>
        /// Initialize a new empty state, bound to the specified state of the parser stack machine.
        /// </summary>
        /// <param name="state"></param>
        public State(int state) {
            // Store data.
            _StateNum = state;
        }

        /// <summary>
        /// Fetch the current state number this state is in.
        /// </summary>
        public int StateNum {
            get {
                // Return parser state.
                return _StateNum;
            }
        }

        /// <summary>
        /// Fetch the list of down links in the parse graph that are bound to this state.
        /// </summary>
        public List<StateLink> DownLinks {
            get {
                // Return parents list.
                return _DownLinks;
            }
        }

    }

    /// <summary>
    /// State link representing a downlink from a state to another state, specifying a non-terminal and a 
    /// </summary>
    internal class StateLink {

        // Data.
        private Token _Token;
        private readonly int _Production;
        private readonly State _Parent;

        /// <summary>
        /// Initialize a new state link, bound to the specified token, and pointing to the target parent state.
        /// </summary>
        /// <param name="token">Token that is bound to this link.</param>
        /// <param name="prod">Production of the non-terminal symbol creating this link that reduced this token.</param>
        /// <param name="parent">Parent state the link points to.</param>
        public StateLink(Token token, int prod, State parent) {
            // Store data.
            _Token = token;
            _Production = prod;
            _Parent = parent;
        }

        /// <summary>
        /// Access the token that contains the data of this link.
        /// </summary>
        public Token Token {
            get {
                // Return token.
                return _Token;
            }
            set {
                // Update token.
                _Token = value;
            }
        }

        /// <summary>
        /// Access the production of the non-terminal bound to token that created this link.
        /// </summary>
        public int Production {
            get {
                // Return related production.
                return _Production;
            }
        }

        /// <summary>
        /// Fetch the actual production rule of the symbol.
        /// </summary>
        public Production ProductionRule {
            get {
                // Return merge precedence.
                return _Token.Symbol[_Production];
            }
        }

        /// <summary>
        /// Return the parent state that this link points to.
        /// </summary>
        public State Parent {
            get {
                // Return parent.
                return _Parent;
            }
        }

    }

    /// <summary>
    /// Helper class to store a reduce path that has not yet been taken on the list of queued reduction paths.
    /// </summary>
    internal class ReducePath {

        // Data.
        private readonly State _Top;
        private readonly StateLink[] _Path;
        private readonly ReduceAction _Action;
        private readonly int _Length;

        /// <summary>
        /// Initialize a new reduction possibility, using the specified path down from the top state, and calling the related
        /// reduction action.
        /// </summary>
        /// <param name="top">Top state to reduce from.</param>
        /// <param name="path">Path to reduce along.</param>
        /// <param name="act">Reduce action to execute.</param>
        /// <param name="len">Length of the reduction path.</param>
        public ReducePath(State top, StateLink[] path, ReduceAction act, int len) {
            // Store data.
            _Top = top;
            _Path = path;
            _Action = act;
            _Length = len;
        }

        /// <summary>
        /// Execute the queued reduction path, executing the reduction action and creating new nodes as appropriate.
        /// </summary>
        /// <param name="parser">Parser executing the reduction.</param>
        /// <param name="lex">Lexeme (lookahead) that caused the reduction to be executed.</param>
        public void Execute(Parser parser, Token lex) {
            // Collect symbols on this path (which are present in the state link path, in reverse order).
            Token[] symbols = new Token[_Path.Length];
            for( int i = 0; i < _Path.Length; ++i )
                symbols[i] = _Path[_Path.Length - i - 1].Token;

            // Execute semantic action associated with production.
            object data;
            try {
                data = _Action.ProductionRule.Execute(parser, _Action.Symbol, symbols);
            } catch( FailReductionException ) {
                // Ignore this reduction possibility (as requested by user).
                return;
            }

            // Initialize new token.
            Token newtoken = new Token(_Action.Symbol, data);
            if( symbols.Length > 0 )
                // We have a defined matching position for symbol; use start/end of tree).
                newtoken.MatchAt(symbols[0].Position + symbols[symbols.Length - 1].Position);
            else
                // We don't have a defined matching position for the token. Use start of lookahead (basically, it's just before that).
                newtoken.MatchAt(lex.Position);

            // Fetch topmost parent and get goto action associated with the reduction.
            State bottom = _Path.Length > 0 ? _Path[_Path.Length - 1].Parent : _Top;
            StateLink newlink = new StateLink(newtoken, _Action.Production, bottom);
            int newstate = parser.Grammar.States[bottom.StateNum][_Action.Symbol].Action;

            // Check whether one of the currently topmost nodes already is in newstate.
            StateAction acts;
            foreach( State matchtop in parser.TopMost ) {
                if( matchtop.StateNum == newstate ) {
                    // This topmost node is in newstate; check whether it already has a link to bottom.
                    foreach( StateLink downlink in matchtop.DownLinks ) {
                        if( downlink.Parent == bottom ) {
                            // Yes, merge the two values and insert the new value. We're done with this reduction.
                            downlink.Token = new Token(_Action.Symbol, _Action.Symbol.Merge(parser, downlink, newlink, lex));
                            return;
                        }
                    }

                    // When we get here, no previous link between the two nodes exists; create one.
                    matchtop.DownLinks.Add(newlink);

                    // Find all paths that go through the newly created link, and add them.
                    foreach( State top in parser.TopMost )
                        if( parser.Grammar.States[top.StateNum].TryGetValue(lex.Symbol, out acts) )
                            foreach( ReduceAction act in acts.ReduceActions )
                                foreach( StateLink[] path in parser.Paths(top, act.ProductionRule.Count, newlink) )
                                    parser.PathQueue.Enqueue(new ReducePath(top, path, act, path.Length + _Length - 1));

                    // Done.
                    return;
                }
            }

            // When we get here, no current state was in newstate, so we need to create a new state for that.
            State newtop = new State(newstate);
            newtop.DownLinks.Add(newlink);
            parser.TopMost.Add(newtop);

            // Enqueue all new reduction paths that have now been created by the new top.
            if( parser.Grammar.States[newstate].TryGetValue(lex.Symbol, out acts) )
                foreach( ReduceAction act in acts.ReduceActions )
                    foreach( StateLink[] path in parser.Paths(newtop, act.ProductionRule.Count, null) )
                        parser.PathQueue.Enqueue(new ReducePath(newtop, path, act, path.Length > 0 ? path.Length + _Length - 1 : 0));
        }

        /// <summary>
        /// Fetch the length of the reduction path.
        /// </summary>
        public int Length {
            get {
                // Return rule token count.
                return _Length;
            }
        }

        /// <summary>
        /// Fetch the precedence of this reduction path as defined by the symbol precedence of the reducing symbol.
        /// </summary>
        public int Precedence {
            get {
                // Return symbol precedence.
                return _Action.Symbol.ReducePrecedence;
            }
        }

    }

    /// <summary>
    /// Helper class to store an ordered list of reduction paths and to enqueue and dequeue new paths. Paths are ordered by
    /// length (increasing length, less priority) and reduction precedence (increasing reduction priority, less priority).
    /// </summary>
    internal class ReduceQueue {

        // Data.
        private readonly List<SortedList<int, Stack<ReducePath>>> _Reductions = new List<SortedList<int, Stack<ReducePath>>>();

        /// <summary>
        /// Initialize a new, empty reduction path.
        /// </summary>
        public ReduceQueue() {
            // No specific construction.
        }

        /// <summary>
        /// Enqueue the specified path to the appropriate bucket in the reduction path queue.
        /// </summary>
        /// <param name="path">Path to enqueue.</param>
        public void Enqueue(ReducePath path) {
            // Add enough lists to finalize pending element and insert path into list element.
            for( int i = _Reductions.Count; i <= path.Length; ++i )
                _Reductions.Add(new SortedList<int, Stack<ReducePath>>());

            // Check whether the corresponding precedence is already contained.
            Stack<ReducePath> paths;
            if( !_Reductions[path.Length].TryGetValue(path.Precedence, out paths) ) {
                // Create new list and add it to set.
                paths = new Stack<ReducePath>();
                _Reductions[path.Length].Add(path.Precedence, paths);
            }

            // Insert the new path.
            paths.Push(path);
        }

        /// <summary>
        /// Fetch the current most prioritized available reduction path and remove it from the reduction queue. Returns null
        /// when no more reduction paths are available.
        /// </summary>
        /// <returns>Reduction path to execute next or null when no more paths are available.</returns>
        public ReducePath Dequeue() {
            // Walk over the lists, finding non-empty one.
            ReducePath rv = null;
            for( int i = 0; i < _Reductions.Count && rv == null; ++i ) {
                // Check whether this reduction set has paths.
                int qlen = _Reductions[i].Count;
                if( qlen == 0 )
                    continue;

                // Pop the last path in the last element in the sorted list.
                Stack<ReducePath> paths = _Reductions[i].Values[qlen - 1];
                rv = paths.Pop();

                // Check whether path list is empty; if so, remove the key for it.
                if( paths.Count == 0 )
                    _Reductions[i].RemoveAt(qlen - 1);
            }

            // Return value.
            return rv;
        }

    }

}
