﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;

namespace GameEngine.PDNParser
{
    /// <summary>
    /// Lexical analyzer for PDN 3.0
    /// </summary>
    public class PDNLexer
    {
        private const char keyMoveSeparator = '-';
        private const char keyLParen = '(';
        private const char keyRParen = ')';
        private const char keyLBracket = '[';
        private const char keyRBracket = ']';
        private const char keyAsterix = '*';
        private const char keyLComment = '{';
        private const char keyRComment = '}';
        private const char keyString = '\"';
        private const char keySetup = '/';
        private const char keyNAG = '$';
        private const char keyCapture = 'x';
        private const char keyDot = '.';
        private const char keyQuestionMark = '?';
        private const char keyExclamationMark = '!';
        private const char keyColon = ':';

        private List<LexToken> _lstLexTokenList;

        public List<LexToken> LexTokenList
        {
            get
            {
                return _lstLexTokenList;
            }
        }

        private ILocalizer _localizer;

        Dictionary<enumLexToken, string> _dicGameEndings;

        /// <summary>
        /// Constructor
        /// </summary>
        public PDNLexer(ILocalizer localizer)
        {
            _lstLexTokenList = new List<LexToken>();
            _localizer = localizer;
            _dicGameEndings = new Dictionary<enumLexToken, string>();

            InitGameEndings();
        }

        /// <summary>
        /// Main parsing method converting scanner tokens to lexical tokens
        /// </summary>
        public List<LexToken> Parse(List<ScannerToken> lstScannerItemList)
        {
            // init stream
            ScannerTokenListStream stream = new ScannerTokenListStream(lstScannerItemList);

            while (stream.EOF == false)
            {
                ScannerToken token = stream.Read();
                ParseOneScannerToken(token, stream);
            }

            AddLexToken(enumLexToken.EOF, string.Empty, 0);

            return _lstLexTokenList;
        }

        /// <summary>
        /// Parses one scanner token
        /// </summary>
        private void ParseOneScannerToken(ScannerToken token, ScannerTokenListStream stream)
        {
        startAgain: ;

            // is first scanner token of the lexical element sign?
            if (token.enumScannerToken == enumScannerToken.Sign)
            {
                // check for single tokens
                switch (token.cSign)
                {
                    case keyColon:
                        AddLexToken(enumLexToken.CaptureSeparator, string.Empty, token.lLineNumber);
                        return;
                    case keyMoveSeparator:
                        AddLexToken(enumLexToken.MoveSeparator, string.Empty, token.lLineNumber);
                        return;
                    case keyRParen:
                        AddLexToken(enumLexToken.RParen, string.Empty, token.lLineNumber);
                        return;
                    case keyLBracket:
                        AddLexToken(enumLexToken.LBracket, string.Empty, token.lLineNumber);
                        return;
                    case keyRBracket:
                        AddLexToken(enumLexToken.RBracket, string.Empty, token.lLineNumber);
                        return;
                    case keyAsterix:
                        AddLexToken(enumLexToken.Asterix, string.Empty, token.lLineNumber);
                        return;
                    case keyDot:
                        ScannerToken peekToken = stream.Peek();
                        if (peekToken.enumScannerToken == enumScannerToken.Sign && peekToken.cSign == keyDot)
                        {
                            stream.Read();
                            peekToken = stream.Peek();
                            if (peekToken.enumScannerToken == enumScannerToken.Sign && peekToken.cSign == keyDot)
                            {
                                stream.Read();
                                AddLexToken(enumLexToken.Ellipses, string.Empty, token.lLineNumber);
                            }
                        }
                        break;
                }

                // no check for combined tokens

                if(token.cSign == keyLParen || token.cSign == keyQuestionMark || token.cSign == keyExclamationMark)
                {
                    ScannerToken nextToken = stream.Peek();
                    if(token.cSign == keyLParen && (nextToken.cSign != keyQuestionMark && nextToken.cSign != keyExclamationMark))
                    {
                        AddLexToken(enumLexToken.LParen, string.Empty, token.lLineNumber);
                        return;
                    }

                    string sMoveStregth = token.cSign.ToString();
                    while (nextToken.cSign == keyRParen || nextToken.cSign == keyQuestionMark || nextToken.cSign == keyExclamationMark)
                    {
                        sMoveStregth += nextToken.cSign;
                        stream.Read();
                        if (stream.EOF)
                            break;
                        nextToken = stream.Peek();
                    }

                    AddLexToken(enumLexToken.MoveStrength, sMoveStregth, token.lLineNumber);
                    return;
                }

                // check comment
                if (token.cSign == keyLComment)
                {
                    ParseTextBlock(stream, enumLexToken.Comment, keyRComment);
                    return;
                }
                // check string
                if (token.cSign == keyString)
                {
                    ParseTextBlock(stream, enumLexToken.String, keyString);
                    return;
                }
                // check setup
                if (token.cSign == keySetup)
                {
                    ParseTextBlock(stream, enumLexToken.Setup, keySetup);
                    return;
                }
                // check NAG
                if (token.cSign == keyNAG)
                {
                    ParseNAG(stream);
                }

            }
            else if (token.enumScannerToken == enumScannerToken.String)
            {
                // check string values
                if (token.sString[0] == keyCapture)
                {
                    AddLexToken(enumLexToken.CaptureSeparator, string.Empty, token.lLineNumber);
                    if (token.sString.Length > 1)
                    {
                        token.sString = token.sString.Substring(1);
                        goto startAgain;
                    }

                    return;
                }
                // check for chess coordinates, first letter small
                if (char.IsLower(token.sString[0]) && char.IsNumber(token.sString[1]))
                {
                    AddLexToken(enumLexToken.AlphaSquare, token.sString.Substring(0, 2), token.lLineNumber);
                    if (token.sString.Length > 2)
                    {
                        token.sString = token.sString.Substring(2);
                        goto startAgain;
                    }
                    return;
                }
                // check for identifier, first letter big
                if (char.IsUpper(token.sString[0]))
                {
                    AddLexToken(enumLexToken.Identifier, token.sString, token.lLineNumber);
                    return;
                }
            }
            else if (token.enumScannerToken == enumScannerToken.Number)
            {
                // first try game endings
                stream.Unread();
                enumLexToken? tokenGameResult = ParseGameEnding(stream);
                if (tokenGameResult.HasValue)
                {
                    AddLexToken(tokenGameResult.Value, string.Empty, token.lLineNumber);
                    return;
                }
                stream.Read();

                ScannerToken? nextToken = null;
                if(stream.EOF == false)
                     nextToken = stream.Peek();
                
                if (nextToken != null && nextToken.Value.enumScannerToken == enumScannerToken.Sign
                        && nextToken.Value.cSign == keyDot)
                {
                    // if move index
                    string sMoveIndex = token.sString;
                    int dotCount = 0;
                    // read all dots
                    while (stream.EOF == false)
                    {
                        nextToken = stream.Read();
                        if (nextToken.Value.enumScannerToken == enumScannerToken.Sign
                            && nextToken.Value.cSign == keyDot)
                        {
                            if(dotCount == 0)
                                sMoveIndex += nextToken.Value.cSign;
                            dotCount++;
                        }
                        else
                        {
                            stream.Unread();
                            AddLexToken(enumLexToken.MoveNumber, sMoveIndex, token.lLineNumber);
                            if (dotCount > 1)
                            {
                                AddLexToken(enumLexToken.Ellipses, string.Empty, token.lLineNumber);
                            }
                            return;
                        }
                    }

                    // unexpected end of file error
//throw new ApplicationException(_localizer.GetStringByKey("PDNUnexpectedEndOfFile"));
                }
                else
                {
                    // it is number square
                    AddLexToken(enumLexToken.NumSquare, token.sString, token.lLineNumber);
                }
            }
        }

        /// <summary>
        /// Parses NAG value
        /// </summary>
        private void ParseNAG(ScannerTokenListStream stream)
        {
            if (stream.EOF)
            {
                // throw new ApplicationException(_localizer.GetStringByKey("PDNUnexpectedEndOfFile"));
            }

            ScannerToken token = stream.Read();
            if (token.enumScannerToken != enumScannerToken.Number)
            {
                //throw new ApplicationException(string.Format(_localizer.GetStringByKey("PDNBadNAG"), token.lLineNumber));
            }

            AddLexToken(enumLexToken.Nag, token.sString, token.lLineNumber);           
        }

        /// <summary>
        /// Parses some text block
        /// </summary>
        /// <param name="stream"></param>
        private void ParseTextBlock(ScannerTokenListStream stream, enumLexToken blockType, char stopChar)
        {
            string sTextBlock = string.Empty;
            ScannerToken token = new ScannerToken();

            while (stream.EOF == false)
            {
                 token = stream.Read();

                // check for the end of comment
                if (token.enumScannerToken == enumScannerToken.Sign && token.cSign == stopChar)
                {
                    AddLexToken(blockType, sTextBlock, token.lLineNumber);
                    return;
                }

                // add comment to the one string
                switch (token.enumScannerToken)
                {
                    case enumScannerToken.Sign:
                        sTextBlock += token.cSign;
                        break;

                    case enumScannerToken.Number:
                        sTextBlock += token.sString;
                        break;

                    case enumScannerToken.String:
                        sTextBlock += token.sString;
                        break;
                }
            }

         //   throw new ApplicationException(
         //           "PDNEndingSignNotFound_" + token.lLineNumber.ToString() + " " + stopChar.ToString());
        }

        /// <summary>
        /// Adds one lexical token to the list
        /// </summary>
        private void AddLexToken(enumLexToken tokenType, string sValue, int lLineNumber)
        {
            LexToken token = new LexToken();
            token.LexTokenType = tokenType;
            token.sValue = sValue;
            token.lLineNumber = lLineNumber;

            _lstLexTokenList.Add(token);
        }

        /// <summary>
        /// Parses game result
        /// </summary>
        private enumLexToken? ParseGameEnding(ScannerTokenListStream stream)
        {
            List<enumLexToken> possibleTokes = _dicGameEndings.Select(p => p.Key).ToList();
            int signIndex = 0;
            int readTokens = 0;

            while (possibleTokes.Count != 0 && stream.EOF == false)
            {
                ScannerToken token = stream.Read();
                readTokens++;
                string sPeekValue = token.enumScannerToken == enumScannerToken.Sign ? token.cSign.ToString() : token.sString;

                List<enumLexToken> tokenToRemove = new List<enumLexToken>();

                foreach (enumLexToken lexToken in possibleTokes)
                {
                    string sGameResult = _dicGameEndings[lexToken];
                    int startSingIndex = signIndex;
                    bool oMissed = false;



                    int idx = signIndex;
                    while (idx < sGameResult.Length && (idx - startSingIndex) < sPeekValue.Length)
                    {
                        if (sGameResult[idx] != sPeekValue[idx - startSingIndex])
                        {
                            tokenToRemove.Add(lexToken);
                            oMissed = true;
                            break;                            
                        }

                        idx++;
                    }

                    if (oMissed == true)
                        continue;

                    // if match
                    if (idx == sGameResult.Length && (idx - startSingIndex) == sPeekValue.Length)
                    {
                        return lexToken;
                    }

                }

                signIndex += sPeekValue.Length;

                possibleTokes = possibleTokes.Where(t => tokenToRemove.Contains(t) == false).ToList();
            }

            for (int i = 0; i < readTokens; i++)
                stream.Unread();

            return null;
        }


        private void InitGameEndings()
        {
            _dicGameEndings[enumLexToken.Win1] = "1-0";
            _dicGameEndings[enumLexToken.Draw1] = "1/2-1/2";
            _dicGameEndings[enumLexToken.Loss1] = "0-1";
            _dicGameEndings[enumLexToken.Win2] = "2-0";
            _dicGameEndings[enumLexToken.Draw2] = "1-1";
            _dicGameEndings[enumLexToken.Loss2] = "0-2";
            _dicGameEndings[enumLexToken.DoubleForfeit] = "0-0";
        }

        /// <summary>
        /// Stream for the list of scanner tokens
        /// </summary>
        private class ScannerTokenListStream
        {
            private List<ScannerToken> _lstScannerTokenList;
            private int _lIndex;

            /// <summary>
            /// Initialization of the stream
            /// </summary>
            /// <param name="list"></param>
            public ScannerTokenListStream(List<ScannerToken> list)
            {
                _lstScannerTokenList = list;
                _lIndex = 0;
            }

            /// <summary>
            /// Returns if we are at the end of stream
            /// </summary>
            public bool EOF
            {
                get
                {
                    return _lIndex >= _lstScannerTokenList.Count;
                }
            }

            /// <summary>
            /// Reads one lexical token from the stream
            /// </summary>
            /// <returns></returns>
            public ScannerToken Read()
            {
                // if (_lIndex >= _lstScannerTokenList.Count)
                    //throw new ApplicationException("Scanner stream is EOF.");
                return _lstScannerTokenList[_lIndex++];
            }

            /// <summary>
            /// Peeks the next token
            /// </summary>
            /// <returns></returns>
            public ScannerToken Peek()
            {
                // if (_lIndex >= _lstScannerTokenList.Count)
                    //throw new ApplicationException("Scanner stream is EOF.");
                return _lstScannerTokenList[_lIndex];
            }

            /// <summary>
            /// Moves one lexical token back
            /// </summary>
            public void Unread()
            {
                _lIndex--;
            }
        }
    }
}
