﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Xml.Serialization;
using System.Reflection;

namespace Atlepage.Lexical
{
    /// <summary>
    /// 
    /// </summary>
    /// <typeparam name="TToken"></typeparam>
    /// <typeparam name="TKind"></typeparam>
    public class Lexer<TToken,TKind> : IEnumerable<TokenBase>
        where TToken : Token<TToken,TKind>, new()
        where TKind : IEnum, new()
    {
        struct TokenInfo
        {
            [XmlElement("expression")]
            public string Expression;
            [XmlElement("type")]
            public int Type;
            [XmlElement("group")]
            public string GroupName;
            [XmlElement("handler")]
            public string HandlerName;
            [XmlIgnore]
            public MethodInfo Handler;
        }

        private string data;
        private Regex master;
        private Match masterMatch;
        private object lexerHandler;
        private TokenInfo[] tokenInfos;
        private int line;
        private int column;
        private bool done = true;
        internal IEnum kind = new TKind();

        public Lexer(object lexerHandler)
        {
            this.lexerHandler = lexerHandler;

            BuildTokenInfo();
            BuildMaster();
        }

        public void Begin(string data)
        {
            this.data = data;
            line = 0;
            column = 0;
            masterMatch = null;
            done = false;
        }

        private void BuildMaster()
        {
            StringBuilder sb = new StringBuilder();
            sb.Append(@"\G(");
            for (int i = 0; i < tokenInfos.Length; i++)
                sb.AppendFormat(@"{2}(?<{0}>{1})", tokenInfos[i].GroupName, tokenInfos[i].Expression, i == 0 ? "" : "|");
            sb.Append(")");
            master = new Regex(sb.ToString());
        }

        private void BuildTokenInfo()
        {
            Type type = lexerHandler.GetType();

            var fields = from f in type.GetFields()
                         where f.FieldType == typeof(string)
                         let attribs = f.GetCustomAttributes(typeof(TokenAttribute), true)
                         where f.Name.StartsWith("t_")
                         select new { type = f.Name.Substring(2), re = (string)f.GetValue(lexerHandler) };

            var methods = from m in type.GetMethods()
                          // check return type
                          where m.ReturnType == typeof(TToken)
                          // check parameter types
                          let ps = m.GetParameters()
                          where ps.Length == 2 && ps[0].ParameterType == typeof(Group) && ps[1].ParameterType == typeof(TToken)
                          // get attributes and join them with methods
                          let attribs = (TokenAttribute[])m.GetCustomAttributes(typeof(TokenAttribute), true)
                          where attribs.Length > 0
                          from a in attribs
                          where a.Name != null || m.Name.StartsWith("t_")
                          select new
                          {
                              type = a.Name != null ? a.Name : m.Name.Substring(2),
                              re = a.Expression,
                              handlerName = m.Name,
                              handler = m
                          };

            int i = 0;
            List<TokenInfo> infos = new List<Lexer<TToken,TKind>.TokenInfo>();
            foreach (var method in methods)
            {
                TokenInfo info = new Lexer<TToken,TKind>.TokenInfo();
                info.Type = ToTKind(method.type);
                info.GroupName = "g" + i++;
                info.Handler = method.handler;
                info.HandlerName = method.handlerName;
                info.Expression = method.re;
                infos.Add(info);
            }

            foreach (var field in fields)
            {
                TokenInfo info = new Lexer<TToken,TKind>.TokenInfo();
                info.Type = ToTKind(field.type);
                info.GroupName = "g" + i++;
                info.Expression = field.re;
                infos.Add(info);
            }

            tokenInfos = infos.ToArray();
        }

        private int ToTKind(string p)
        {
            int i;
            if (kind.TryGetValue(p, out i))
                return i;
            return -1;
        }

        public TToken Next()
        {
            TToken token = null;
            while (!done && token == null)
            {
                if (masterMatch == null)
                    masterMatch = master.Match(data, 0);
                else
                    masterMatch = masterMatch.NextMatch();

                if (masterMatch == null || !masterMatch.Success)
                {
                    done = true;
                    break;
                }

                // Select Group
                TokenInfo info = tokenInfos.Single((TokenInfo ti) => masterMatch.Groups[ti.GroupName].Success);
                Group group = masterMatch.Groups[info.GroupName];
                if (group == null)
                    InvalidToken(masterMatch);

                // Initialize token
                token = new TToken();
                token.Load(this, info.Type, group.Value, group.Index, line, column);

                // Call Handler
                if (info.Handler != null)
                    token = (TToken)info.Handler.Invoke(lexerHandler, new object[] {group, token});

                if (info.Type < 0)
                    token = null;

                UpdatePositionInfo(group.Value);
            }

            // return result
            return token;
        }

        private void UpdatePositionInfo(string p)
        {
            int ixPrev = p.IndexOf('\n'), ixCurr = ixPrev;
            while (ixCurr != -1)
            {
                line++;
                ixPrev = ixCurr;
                ixCurr = p.IndexOf('\n', ixCurr + 1);
            }
            if (ixPrev != -1)
                column = p.Length - ixPrev;
            else
                column += p.Length;
        }

        private void InvalidToken(Match masterMatch)
        {
            throw new NotImplementedException();
        }

        #region IEnumerable<TokenBase> Members

        public IEnumerator<TokenBase> GetEnumerator()
        {
            TokenBase token = this.Next();
            while (token != null)
            {
                yield return token;
                token = this.Next();
            }
            TToken eof = new TToken();
            eof.Load(this, kind.MaximumValue + 1, "", this.data.Length, this.line, this.column);
            yield return eof;
        }

        #endregion

        #region IEnumerable Members

        System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
        {
            TokenBase token = this.Next();
            while (token != null)
            {
                yield return token;
                token = this.Next();
            }
            TToken eof = new TToken();
            eof.Load(this, kind.MaximumValue + 1, "", this.data.Length, this.line, this.column);
            yield return eof;
        }

        #endregion
    }
}
