﻿using System.Collections.Generic;
using System.Linq;

namespace KxParser.Lexer
{
    public class Lexer
    {
        private string[] _inputLines;
        private TokenDefinition[] _tokenDefinitions;
        private string[] _errorDelimiters;

        private List<TokenEx> _tokens = new List<TokenEx>();
        public ELexerStatus Status;

        private int _lineNumber;
        private int _lineIndex;
        private int _totalIndex;

        public Lexer(string[] inputLines, TokenDefinition[] tokenDefinitions, string[] errorDelimiters = null)
        {
            Status = ELexerStatus.NotAnalyzed;
            
            _inputLines = inputLines;
            _tokenDefinitions = tokenDefinitions;
            _errorDelimiters = errorDelimiters;

            // Check for reserved words usage
            if (!tokenDefinitions.All(tokenDefinition => !KxParserGlobal.LexerReservedWords.Contains(tokenDefinition.Token)))
                Status = ELexerStatus.ReservedWordViolation;

            // After that, we add ender as token definition also because it's necessary
            tokenDefinitions = new[] { new TokenDefinition(string.Format(@"\{0}", KxParserGlobal.Ender), KxParserGlobal.Ender) }.Concat(tokenDefinitions).ToArray();

            foreach (var tokenDefinition in tokenDefinitions)
            {
                if (string.IsNullOrEmpty(tokenDefinition.Token.ToString()))
                    Status = ELexerStatus.EmptyNameViolation;
            }

            //if (_state != ELexerState.Ok) return;

            //if (!Analyze())
            //    _state = ELexerState.LexicalAnalysisFail;
        }



        public bool Analyze()
        {
            string errorInput = string.Empty;

            _lineIndex = 0;
            _lineNumber = 0;
            _totalIndex = 0;
            _tokens.Clear();

            foreach (string inputText in _inputLines)
            {
                string text = inputText;

                while (!string.IsNullOrEmpty(text))
                {
                    TokenEx token = null;
                    for (int i = 0; i < _tokenDefinitions.Length; i++)
                    {
                        TokenDefinition def = _tokenDefinitions[i];

                        var matched = def.Matcher.Match(text);
                        if (matched > 0)
                        {
                            if (token == null)
                            {
                                token = new TokenEx();
                                token.Names.Add(def.Token.ToString());
                                token.Content = text.Substring(0, matched);
                                token.IsIgnored = def.IsIgnored;
                                token.IsError = false;
                                token.StartIndex = _lineIndex;
                                token.EndIndex = _lineIndex + matched - 1;
                                token.TotalIndex = _totalIndex;
                            }
                            else
                            {
                                token.Names.Add(def.Token.ToString());
                            }
                        }
                    }

                    if (token != null)
                    {
                        _tokens.Add(token);
                        text = text.Substring(token.Content.Length);
                        _lineIndex += token.Content.Length;
                        _totalIndex += token.Content.Length;
                    }

                    // Token not matched
                    if(token==null)
                    {
                        do
                        {
                            errorInput += text[0];
                            text = text.Substring(1);

                            _lineIndex++;
                            _totalIndex++;

                            if (text.Length == 0)
                            {
                                addErrorToken(ref errorInput, false, true);
                                //ReadNextInputLine();
                                break;
                            }

                        } while (_errorDelimiters != null && !string.IsNullOrEmpty(text) && text.Length > 0 && !_errorDelimiters.Contains(text[0].ToString()));
                    }
                    else if (!string.IsNullOrEmpty(errorInput))
                    {
                        addErrorToken(ref errorInput, false, false);
                    }
                }
                
                _lineIndex = 0;
                _lineNumber++;
            }

            if (!string.IsNullOrEmpty(errorInput))
            {
                _lineIndex++;
                _totalIndex++;
                addErrorToken(ref errorInput, true, false);
            }

            var enderFound = _tokens.Any(mxToken => mxToken.Names.Equals(KxParserGlobal.Ender));

            if (!enderFound)
            {
                TokenEx endToken = new TokenEx();
                endToken.Names.Add(KxParserGlobal.Ender);
                endToken.Content = KxParserGlobal.Ender;
                endToken.IsIgnored = false;
                endToken.IsError = false;
                endToken.StartIndex = _lineIndex;
                endToken.EndIndex = _lineIndex;
                endToken.TotalIndex = _totalIndex;
            
                _tokens.Add(endToken);
            }

            if(_tokens.Any(token => token.IsError))
            {
                Status = ELexerStatus.LexicalAnalysisFail;
                return false;
            }

            Status = ELexerStatus.Ok;
            return true;
        }

        void addErrorToken(ref string errorInput, bool isLast, bool lastInLine)
        {
            TokenEx errorToken = new TokenEx();
            errorToken.Names.Add(KxParserGlobal.ErrorTokenName);
            errorToken.Content = errorInput;
            errorToken.IsIgnored = true;
            errorToken.IsError = true;
            errorToken.LineNumber = _lineNumber;
            errorToken.StartIndex = _lineIndex - errorInput.Length - ((lastInLine) ? 0 : _tokens.Last().Content.Length);
            errorToken.EndIndex = _lineIndex - ((lastInLine) ? 0 : _tokens.Last().Content.Length) - 1;
            errorToken.TotalIndex = _totalIndex - errorInput.Length - ((lastInLine) ? 0 : _tokens.Last().Content.Length);
            
            if (!isLast && !lastInLine)
            {
                var totalTokens = _tokens.Count;
                _tokens.Insert(totalTokens - 1, errorToken);
            }
            else
            {
                _tokens.Add(errorToken);
            }
            // Anulle error input
            errorInput = "";
        }

        public TokenEx[] GetErrorTokens()
        {
            return _tokens.Where(token => token.IsError).ToArray();
        }

        public TokenEx[] GetActiveTokens()
        {
            return _tokens.Where(token => !token.IsIgnored).ToArray();
        }

        public TokenEx[] GetAllTokens()
        {
            return _tokens.ToArray();
        }

    }

    public enum ELexerStatus
    {
        ReservedWordViolation,
        EmptyNameViolation,
        LexicalAnalysisFail,
        NotAnalyzed,
        Ok
    }
}
