using System;
using System.Collections.Generic;
using System.Text;

using RexToy.Compiler.Lexical;

namespace RexToy.Template
{
    using RexToy.Template.Tokens;

    internal class LexicalParser : LexicalParser<TokenType, StatusMatrix>
    {
        public LexicalParser()
            : base()
        {
        }

        public override void SetParseContent(string text)
        {
            _text = text;
            _index = 0;
            _status = ParserStatus.Start;
            _tokens = new List<Token<TokenType>>(0);
        }

        protected override Token<TokenType> CreateNewToken(int index)
        {
            return new Token(index);
        }

        public override List<Token<TokenType>> Parse()
        {
            curr = new Token(_index);
            char ch = GetNextChar();
            while (ch != (char)0)
            {
                _status = _matrix.GetStatusTransform(_status, ch);
                if (_status == ParserStatus.Error)
                    ExceptionHelper.ThrowParseError(_index - 1);

                if (!_matrix.TokenTerminated)
                    curr.Add(ch);

                ch = GetNextChar();
            }

            AcceptLastToken(null, new LexicalParseEventArgs(ch, _status));//Note:Last token.

            PostParse();

            return _tokens;
        }

        private void PostParse()
        {
            JoinToken();
            SmartTrim();
        }

        private void JoinToken()
        {
            List<Token<TokenType>> tokens = new List<Token<TokenType>>();
            for (int index = 0; index < _tokens.Count; index++)
            {
                if (index < _tokens.Count - 1
                    && _tokens[index].TokenType == TokenType.Text
                    && _tokens[index + 1].TokenType == TokenType.Text)
                {
                    Token t = new Token(_tokens[index].Position);
                    t.Add(_tokens[index].TokenValue);
                    t.Add(_tokens[index + 1].TokenValue);
                    t.TokenType = TokenType.Text;

                    tokens.Add(t);
                    index++;
                }
                else
                    tokens.Add(_tokens[index]);
            }

            _tokens = tokens;
        }

        private bool IsScript(Token<TokenType> t)
        {
            return t.TokenType != TokenType.None && t.TokenType != TokenType.Text;
        }

        private void SmartTrim()
        {
            char colon = ':';
            char[] spaceTab = new char[] { ' ', '\t' };
            char[] newLine = new char[] { '\r', '\n' };
            for (int i = 0; i < _tokens.Count; i++)
            {
                if (!IsScript(_tokens[i]))
                    continue;

                var t = _tokens[i];
                bool removePrev = t.TokenValue.StartsWith(colon);
                bool removeNext = t.TokenValue.EndsWith(colon);

                Token clean = new Token(t.Position);
                clean.Add(t.TokenValue.RemoveBegin(colon).RemoveEnd(colon));
                clean.TokenType = t.TokenType;
                _tokens[i] = clean;

                if (removePrev && i > 0)
                {
                    var prev = _tokens[i - 1];
                    if (prev.TokenType == TokenType.Text)
                    {
                        Token newPrev = new Token(prev.Position);
                        newPrev.Add(prev.TokenValue.TrimEnd(spaceTab).TrimEnd(newLine));
                        newPrev.TokenType = TokenType.Text;
                        _tokens[i - 1] = newPrev;
                    }
                }

                if (removeNext && i < _tokens.Count - 1)
                {
                    var next = _tokens[i + 1];
                    if (next.TokenType == TokenType.Text)
                    {
                        Token newNext = new Token(next.Position);
                        newNext.Add(next.TokenValue.TrimStart(spaceTab).TrimStart(newLine));
                        newNext.TokenType = TokenType.Text;
                        _tokens[i + 1] = newNext;
                    }
                }
            }
        }
    }
}
