using System;
using System.Collections.Generic;
using System.Diagnostics;

using RexToy.Compiler.Lexical;

namespace RexToy.ExpressionLanguage
{
    using RexToy.ExpressionLanguage.Tokens;

    internal class LexicalParser : LexicalParser<TokenType, StatusMatrix>
    {
        public override void SetParseContent(string text)
        {
            _text = text.Trim();
            _index = 0;
            _status = ParserStatus.Start;
            _tokens = new List<Token<TokenType>>();
        }

        protected override Token<TokenType> CreateNewToken(int index)
        {
            return new Token(index);
        }

        public override List<Token<TokenType>> Parse()
        {
            curr = new Token(_index);
            char ch = GetNextChar();
            while (ch != (char)0)
            {
                _status = _matrix.GetStatusTransform(this._status, ch);
                if (_status == ParserStatus.Error)
                    ExceptionHelper.ThrowParseError(_index - 1);

                if (_status == ParserStatus.DPeekNext)
                {
                    char next = PeekNextChar();
                    if (_matrix.GetCharType(next) == CharType.DQuot)
                    {
                        GetNextChar();//Note:skip the next double quot.
                        curr.Add(ch);
                        _status = ParserStatus.DQuot;
                    }
                    else
                    {
                        curr.Add(ch);
                        _status = ParserStatus.Start;//Note:accept the close quot and set to Start,so when scan next char,this token will be accepted.
                    }
                }
                else if (_status == ParserStatus.SPeekNext)
                {
                    char next = PeekNextChar();
                    if (_matrix.GetCharType(next) == CharType.SQuot)
                    {
                        GetNextChar();//Note:skip the next single quot.
                        curr.Add(ch);
                        _status = ParserStatus.SQuot;
                    }
                    else
                    {
                        curr.Add(ch);
                        _status = ParserStatus.Start;//Note:accept the close quot and set to Start,so when scan next char,this token will be accepted.
                    }
                }
                else
                {
                    if (!_matrix.TokenTerminated)
                        curr.Add(ch);
                }

                ch = GetNextChar();
            }

            AcceptLastToken(null, new LexicalParseEventArgs(ch, _status));//Note:Last token.

            PostProcess();

            return _tokens;
        }

        //Note: Combine Number.Number into one token
        private void PostProcess()
        {
            List<Token<TokenType>> tokens = new List<Token<TokenType>>();
            for (int index = 0; index < _tokens.Count; index++)
            {
                if (index < _tokens.Count - 2
                    && _tokens[index].TokenType == TokenType.Long
                    && _tokens[index + 1].TokenType == TokenType.Dot
                    && _tokens[index + 2].TokenType == TokenType.Long)
                {
                    Token t = new Token(_tokens[index].Position);
                    t.Add(_tokens[index].TokenValue + _tokens[index + 1].TokenValue + _tokens[index + 2].TokenValue);
                    t.TokenType = TokenType.Decimal;

                    tokens.Add(t);
                    index += 2;
                }
                else if (index < _tokens.Count - 1
                    && _tokens[index].TokenType == TokenType.Colon && _tokens[index + 1].TokenType == TokenType.Colon)
                {
                    Token t = new Token(_tokens[index].Position);
                    t.Add(_tokens[index].TokenValue + _tokens[index + 1].TokenValue);
                    t.TokenType = TokenType.ClassQualifier;
                    tokens.Add(t);
                    index += 1;
                }
                else
                    tokens.Add(_tokens[index]);
            }

            _tokens = tokens;
        }
    }
}
