﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Xml.Serialization;
using System.Reflection;

namespace Atlepage.Lexical
{    
    [Serializable]
    internal class SerializableLexer
    {
        public TokenInfo[] infos;
        public Regex master;
        public SerializableLexer(TokenInfo[] infos, Regex master)
        {
            this.infos = infos;
            this.master = master;
        }
    }

    [Serializable]
    internal struct TokenInfo
    {
        public string Expression;
        public int Type;
        public string GroupName;
        public string HandlerName;
        public MethodInfo Handler;
    }

    /// <summary>
    /// 
    /// </summary>
    /// <typeparam name="TToken"></typeparam>
    /// <typeparam name="TKind"></typeparam>
    public class Lexer<TToken> : IEnumerable<TokenBase>
        where TToken : Token<TToken>, new()
    {
        private string data;
        internal Regex master;
        private Match masterMatch;
        private object lexerHandler;
        internal TokenInfo[] tokenInfos;
        private int location;
        private int line;
        private int column;
        private bool done = true;
        internal IEnum kind;

        internal Lexer(TokenInfo[] infos, Regex master, IEnum kind)
        {
            this.tokenInfos = infos;
            this.master = master;
            this.kind = kind;
        }

        internal Lexer(SerializableLexer state, IEnum kind)
        {
            this.tokenInfos = state.infos;
            this.master = state.master;
            this.kind = kind;
        }

        public void Initialize(object actor)
        {
            this.lexerHandler = actor;
        }

        internal Lexer<TToken> Clone()
        {
            return new Lexer<TToken>(tokenInfos, master, kind);
        }

        public void Begin(string data)
        {
            this.data = data;
            location = 0;
            line = 0;
            column = 0;
            masterMatch = null;
            done = false;
        }

        public TToken Next()
        {
            TToken token = null;
            while (!done && token == null)
            {
                if (this.location == data.Length)
                {
                    done = true;
                    break;
                }

                if (masterMatch == null)
                    masterMatch = master.Match(data, 0);
                else
                    masterMatch = masterMatch.NextMatch();

                if (masterMatch == null || !masterMatch.Success)
                {
                    InvalidToken(masterMatch);
                }

                // Select Group
                TokenInfo info = tokenInfos.Single((TokenInfo ti) => masterMatch.Groups[ti.GroupName].Success);
                Group group = masterMatch.Groups[info.GroupName];
                if (group == null)
                    InvalidToken(masterMatch);

                // Initialize token
                token = new TToken();
                token.Load(this, info.Type, group.Value, group.Index, line, column);

                // Call Handler
                if (info.Handler != null)
                    token = (TToken)info.Handler.Invoke(lexerHandler, new object[] {group, token});

                if (info.Type < 0)
                    token = null;

                UpdatePositionInfo(group.Value);
            }

            // return result
            return token;
        }

        private void UpdatePositionInfo(string p)
        {
            int ixPrev = p.IndexOf('\n'), ixCurr = ixPrev;
            while (ixCurr != -1)
            {
                line++;
                ixPrev = ixCurr;
                ixCurr = p.IndexOf('\n', ixCurr + 1);
            }
            if (ixPrev != -1)
                column = p.Length - ixPrev;
            else
                column += p.Length;
            location += p.Length;
        }

        private void InvalidToken(Match masterMatch)
        {
            throw new NotImplementedException();
        }

        #region IEnumerable<TokenBase> Members

        public IEnumerator<TokenBase> GetEnumerator()
        {
            TokenBase token = this.Next();
            while (token != null)
            {
                yield return token;
                token = this.Next();
            }
            TToken eof = new TToken();
            eof.Load(this, kind.MaximumValue + 1, "", this.data.Length, this.line, this.column);
            yield return eof;
        }

        #endregion

        #region IEnumerable Members

        System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
        {
            TokenBase token = this.Next();
            while (token != null)
            {
                yield return token;
                token = this.Next();
            }
            TToken eof = new TToken();
            eof.Load(this, kind.MaximumValue + 1, "", this.data.Length, this.line, this.column);
            yield return eof;
        }

        #endregion
    }
}
