﻿#region COPYRIGHT© 2005-2013 Phillip Clark. All rights reserved.

// For licensing information see License.txt (MIT style licensing).

#endregion

using System;
using System.Collections.Generic;
using System.Diagnostics.Contracts;

namespace FlitBit.Parse
{
	/// <summary>
	/// Base tokenizer.
	/// </summary>
	/// <typeparam name="TKind">token kind TKind</typeparam>
	public class Tokenizer<TKind>
		where TKind : struct
	{
		/// <summary>
		/// Special token kind value for EOF.
		/// </summary>
		public const int EofKindValue = 0x7FFFFFFF;
		/// <summary>
		/// Special token kind value for Errors.
		/// </summary>
		public const int ErrorKindValue = 0x7FFFFFFE;

		/// <summary>
		/// Special token kind for EOF.
		/// </summary>
		public static readonly TKind EofKind = (TKind) Enum.ToObject(typeof(TKind), EofKindValue);
		/// <summary>
		/// Special token kind for Errors.
		/// </summary>
		public static readonly TKind ErrorKind = (TKind) Enum.ToObject(typeof(TKind), ErrorKindValue);

		readonly List<RegisteredToken> _literals = new List<RegisteredToken>();
		readonly Dictionary<string, RegisteredToken> _tokens = new Dictionary<string, RegisteredToken>();
		RegisteredToken _identifier;
		bool _ignoringWhitespace;
		Dictionary<char, TokenMatch> _parseTree;
		readonly TryMatchCh _tryParseCh;
		readonly TryMatchS _tryParseS;
		RegisteredToken _whitespace;

		/// <summary>
		/// Creates a new instance.
		/// </summary>
		public Tokenizer()
		{
			_tryParseS = TokenAtCursor;
			_tryParseCh = TokenAtCursor;
		}

		/// <summary>
		/// Indicates whether the tokenizer has an identifier token type.
		/// </summary>
		public bool HasIdentifierToken { get { return _identifier.Token != null; } }
		/// <summary>
		/// Indicates whether the tokenizer has a whitespace token type.
		/// </summary>
		public bool HasWhitespaceToken { get { return _whitespace.Token != null; } }

		/// <summary>
		/// Indicates whether the tokenizer has been prepared.
		/// </summary>
		public bool Prepared { get; private set; }

		/// <summary>
		/// Tokenizes the input.
		/// </summary>
		/// <param name="input"></param>
		/// <returns></returns>
		public virtual IEnumerable<Token<TKind>> Tokenize(char[] input)
		{
			Contract.Requires(input != null, "input cannot be null");

			foreach (var tk in InnerTokenize(input))
			{
				yield return tk;
			}
		}

		/// <summary>
		/// Adds a constant specification for a <paramref name="kind" /> of TKind.
		/// </summary>
		/// <param name="kind"></param>
		/// <param name="spec"></param>
		/// <returns></returns>
		public Tokenizer<TKind> AddLiteralSpec(TKind kind, TokenSpec spec)
		{
			Contract.Requires(!Prepared, "parser already prepared; all specs must be defined before parsing begins");

			var comparer = EqualityComparer<TKind>.Default;
			foreach (var c in _literals)
			{
				if (comparer.Equals(c.Kind, kind))
				{
					throw new InvalidOperationException(String.Concat("Parser already contains a literal for kind: ", kind.ToString()));
				}
			}
			_literals.Add(new RegisteredToken
			{
				Kind = kind,
				Token = spec
			});
			return this;
		}

		/// <summary>
		/// Adds a token spec.
		/// </summary>
		/// <param name="kind"></param>
		/// <param name="spec"></param>
		/// <returns></returns>
		public Tokenizer<TKind> AddTokenSpec(TKind kind, TokenSpec spec)
		{
			Contract.Requires(!Prepared, "parser already prepared; all specs must be defined before parsing begins");

			if (_tokens.ContainsKey(spec.Name))
			{
				throw new InvalidOperationException("token already defined");
			}

			var name = spec.Name;
			_tokens.Add(name, new RegisteredToken
			{
				Token = spec,
				Kind = kind
			});
			return this;
		}

		/// <summary>
		/// Makes the tokenizer ignore whitespace identified by the <paramref name="kind"/> and <paramref name="spec"/>.
		/// </summary>
		/// <param name="kind"></param>
		/// <param name="spec"></param>
		/// <returns></returns>
		public Tokenizer<TKind> IgnoreWhitespace(TKind kind, TokenSpec spec)
		{
			Contract.Requires(!Prepared, "parser already prepared; all specs must be defined before parsing begins");
			Contract.Requires(!HasWhitespaceToken, "cannot reassign whitespace spec");

			_ignoringWhitespace = true;
			_whitespace = new RegisteredToken
			{
				Kind = kind,
				Token = spec
			};
			return this;
		}

		/// <summary>
		/// Performs the tokenizing logic.
		/// </summary>
		/// <param name="input"></param>
		/// <returns></returns>
		public IEnumerable<Token<TKind>> InnerTokenize(char[] input)
		{
			Prepare();
			var cursor = new Cursor();
			var lookahead = new Queue<Token<TKind>>();
			while (true)
			{
				Token<TKind> tk;
				if (lookahead.Count > 0)
				{
					tk = lookahead.Dequeue();
				}
				else
				{
					if (cursor >= input.Length)
					{
						break;
					}
					if (!TokenAtCursor(ref cursor, input, out tk, lookahead))
					{
						var cc = cursor;
						if (this._whitespace.Token != null)
						{
							while (cc < input.Length && this._whitespace.Token.At(input, cc))
							{
								cc++;
							}
						}
						else
						{
							while (cc < input.Length && input.AtWhitespace(cc))
							{
								cc++;
							}
						}
						var len = Math.Max(cc.Offset - cursor.Offset, 1);
						var errCapture = new Capture(Match.NullMatch, cursor, input.Slice(cursor, len));
						tk = new Token<TKind>(TokenSpec.ErrorSpec, ErrorKind, errCapture);
						if (cursor == cc)
						{
							cursor++;
						}
						else
						{
							cursor = cc;
						}
					}
				}
				if (_ignoringWhitespace && tk.MetaKind == TokenMetaKind.Whitespace)
				{
					continue;
				}

				yield return tk;
			}
		}

		/// <summary>
		/// Prepares the tokenizer.
		/// </summary>
		/// <returns></returns>
		public Tokenizer<TKind> Prepare()
		{
			if (_parseTree == null)
			{
				_parseTree = new Dictionary<char, TokenMatch>();
				foreach (var t in _tokens.Values)
				{
					var cm = t.Token.GetExactCharacterMatchesOnFront();
					foreach (var c in cm[0].SpecificChars)
					{
						TokenMatch match;
						if (!_parseTree.TryGetValue(c, out match))
						{
							_parseTree.Add(c, match = new TokenMatch(1));
						}
						match.AddToken(t, cm);
					}
				}
				Prepared = true;
			}
			return this;
		}

		/// <summary>
		/// Sets the tokenizer's identifier spec.
		/// </summary>
		/// <param name="kind"></param>
		/// <param name="spec"></param>
		/// <returns></returns>
		public Tokenizer<TKind> SetIdentifierSpec(TKind kind, TokenSpec spec)
		{
			Contract.Requires(!Prepared, "parser already prepared; all specs must be defined before parsing begins");
			Contract.Requires(!HasIdentifierToken, "cannot reassign identifier spec");

			_identifier = new RegisteredToken
			{
				Kind = kind,
				Token = spec
			};
			return this;
		}

		/// <summary>
		/// Sets the tokenizer's whitespace spec.
		/// </summary>
		/// <param name="kind"></param>
		/// <param name="spec"></param>
		/// <returns></returns>
		public Tokenizer<TKind> SetWhitespaceSpec(TKind kind, TokenSpec spec)
		{
			Contract.Requires(!Prepared, "parser already prepared; all specs must be defined before parsing begins");
			Contract.Requires(!HasWhitespaceToken, "cannot reassign whitespace spec");

			_whitespace = new RegisteredToken
			{
				Kind = kind,
				Token = spec
			};
			return this;
		}

		/// <summary>
		/// Ensures a token adheres to the specification's followed-by rules.
		/// </summary>
		/// <param name="tt"></param>
		/// <param name="cursor"></param>
		/// <param name="input"></param>
		/// <param name="lookahead"></param>
		/// <returns></returns>
		bool IsAppropriatelyFollowed(Token<TKind> tt, ref Cursor cursor, string input, Queue<Token<TKind>> lookahead)
		{
			if (cursor >= input.Length)
			{
				return true;
			}
			switch (tt.Spec.FollowedBy)
			{
				case TokenFollowedBy.Not:
				case TokenFollowedBy.Any:
					return true;
				default:
					var localLook = new Queue<Token<TKind>>();
					Token<TKind> ll;
					if (!TokenAtCursor(ref cursor, input, out ll, localLook))
					{
						return true;
					}

					if ((tt.Spec.FollowedBy & TokenFollowedBy.Not) == TokenFollowedBy.Not)
					{
						if (ll.MetaKind == TokenMetaKind.Symbol)
						{
							if ((tt.Spec.FollowedBy & TokenFollowedBy.Symbol) == TokenFollowedBy.Symbol)
							{
								return false;
							}
						}
						else if (ll.MetaKind == TokenMetaKind.Operator)
						{
							if ((tt.Spec.FollowedBy & TokenFollowedBy.Operator) == TokenFollowedBy.Operator)
							{
								return false;
							}
						}
						else if (ll.MetaKind == TokenMetaKind.Whitespace)
						{
							if ((tt.Spec.FollowedBy & TokenFollowedBy.Whitespace) == TokenFollowedBy.Whitespace)
							{
								return false;
							}
						}

						lookahead.Enqueue(ll);
						foreach (var ttt in localLook)
						{
							lookahead.Enqueue(ttt);
						}
						return true;
					}
					if (ll.MetaKind == TokenMetaKind.Symbol)
					{
						if ((tt.Spec.FollowedBy & TokenFollowedBy.Symbol) == TokenFollowedBy.Symbol)
						{
							lookahead.Enqueue(ll);
							foreach (var ttt in localLook)
							{
								lookahead.Enqueue(ttt);
							}
							return true;
						}
					}
					else if (ll.MetaKind == TokenMetaKind.Operator)
					{
						if ((tt.Spec.FollowedBy & TokenFollowedBy.Operator) == TokenFollowedBy.Operator)
						{
							lookahead.Enqueue(ll);
							foreach (var ttt in localLook)
							{
								lookahead.Enqueue(ttt);
							}
							return true;
						}
					}
					else if (ll.MetaKind == TokenMetaKind.Whitespace)
					{
						if ((tt.Spec.FollowedBy & TokenFollowedBy.Whitespace) == TokenFollowedBy.Whitespace)
						{
							lookahead.Enqueue(ll);
							foreach (var ttt in localLook)
							{
								lookahead.Enqueue(ttt);
							}
							return true;
						}
					}
					break;
			}

			return false;
		}

		/// <summary>
		/// Ensures a token adheres to the specification's followed-by rules.
		/// </summary>
		/// <param name="tt"></param>
		/// <param name="cursor"></param>
		/// <param name="input"></param>
		/// <param name="lookahead"></param>
		/// <returns></returns>
		bool IsAppropriatelyFollowed(Token<TKind> tt, ref Cursor cursor, char[] input, Queue<Token<TKind>> lookahead)
		{
			if (cursor >= input.Length)
			{
				return true;
			}
			switch (tt.Spec.FollowedBy)
			{
				case TokenFollowedBy.Not:
				case TokenFollowedBy.Any:
					return true;
				default:
					var localLook = new Queue<Token<TKind>>();
					Token<TKind> ll;
					if (!TokenAtCursor(ref cursor, input, out ll, localLook))
					{
						return true;
					}

					if ((tt.Spec.FollowedBy & TokenFollowedBy.Not) == TokenFollowedBy.Not)
					{
						if (ll.MetaKind == TokenMetaKind.Symbol)
						{
							if ((tt.Spec.FollowedBy & TokenFollowedBy.Symbol) == TokenFollowedBy.Symbol)
							{
								return false;
							}
						}
						else if (ll.MetaKind == TokenMetaKind.Operator)
						{
							if ((tt.Spec.FollowedBy & TokenFollowedBy.Operator) == TokenFollowedBy.Operator)
							{
								return false;
							}
						}
						else if (ll.MetaKind == TokenMetaKind.Whitespace)
						{
							if ((tt.Spec.FollowedBy & TokenFollowedBy.Whitespace) == TokenFollowedBy.Whitespace)
							{
								return false;
							}
						}

						lookahead.Enqueue(ll);
						foreach (var ttt in localLook)
						{
							lookahead.Enqueue(ttt);
						}
						return true;
					}
					if (ll.MetaKind == TokenMetaKind.Symbol)
					{
						if ((tt.Spec.FollowedBy & TokenFollowedBy.Symbol) == TokenFollowedBy.Symbol)
						{
							lookahead.Enqueue(ll);
							foreach (var ttt in localLook)
							{
								lookahead.Enqueue(ttt);
							}
							return true;
						}
					}
					else if (ll.MetaKind == TokenMetaKind.Operator)
					{
						if ((tt.Spec.FollowedBy & TokenFollowedBy.Operator) == TokenFollowedBy.Operator)
						{
							lookahead.Enqueue(ll);
							foreach (var ttt in localLook)
							{
								lookahead.Enqueue(ttt);
							}
							return true;
						}
					}
					else if (ll.MetaKind == TokenMetaKind.Whitespace)
					{
						if ((tt.Spec.FollowedBy & TokenFollowedBy.Whitespace) == TokenFollowedBy.Whitespace)
						{
							lookahead.Enqueue(ll);
							foreach (var ttt in localLook)
							{
								lookahead.Enqueue(ttt);
							}
							return true;
						}
					}
					break;
			}

			return false;
		}

		bool TokenAtCursor(ref Cursor cursor, string input, out Token<TKind> tk, Queue<Token<TKind>> lookahead)
		{
			var cc = cursor;
			// Whitespace
			if (_whitespace.Token != null && _whitespace.Token.TryMatch(input, ref cc, _whitespace.Kind, out tk))
			{
				cursor = cc;
				return true;
			}

			// Symbols and keywords...
			var c = input[cc];
			TokenMatch match;
			if (_parseTree.TryGetValue(c, out match)
				&& match.TryMatch(input, ref cc, out tk, lookahead, _tryParseS))
			{
				cursor = cc;
				return true;
			}
			// Identifiers
			if (_identifier.Token != null && _identifier.Token.TryMatch(input, ref cc, _identifier.Kind, out tk))
			{
				if (IsAppropriatelyFollowed(tk, ref cc, input, lookahead))
				{
					cursor = cc;
					return true;
				}
			}
			else if (_literals.Count > 0)
			{
				// Constants
				foreach (var constant in _literals)
				{
					if (constant.Token.At(input, cc))
					{
						constant.Token.TryMatch(input, ref cc, constant.Kind, out tk);
						if (IsAppropriatelyFollowed(tk, ref cc, input, lookahead))
						{
							cursor = cc;
							return true;
						}
						return false;
					}
				}
			}
			tk = default(Token<TKind>);
			return false;
		}

		bool TokenAtCursor(ref Cursor cursor, char[] input, out Token<TKind> tk, Queue<Token<TKind>> lookahead)
		{
			var cc = cursor;
			// Whitespace
			if (_whitespace.Token != null && _whitespace.Token.TryMatch(input, ref cc, _whitespace.Kind, out tk))
			{
				cursor = cc;
				return true;
			}

			// Symbols and keywords...
			var c = input[cc];
			TokenMatch match;
			if (_parseTree.TryGetValue(c, out match)
				&& match.TryMatch(input, ref cc, out tk, lookahead, _tryParseCh))
			{
				cursor = cc;
				return true;
			}
			// Identifiers
			if (_identifier.Token != null && _identifier.Token.TryMatch(input, ref cc, _identifier.Kind, out tk))
			{
				if (IsAppropriatelyFollowed(tk, ref cc, input, lookahead))
				{
					cursor = cc;
					return true;
				}
			}
			else if (_literals.Count > 0)
			{
				// Literals
				foreach (var constant in _literals)
				{
					if (constant.Token.At(input, cc))
					{
						constant.Token.TryMatch(input, ref cc, constant.Kind, out tk);
						if (IsAppropriatelyFollowed(tk, ref cc, input, lookahead))
						{
							cursor = cc;
							return true;
						}
						return false;
					}
				}
			}
			tk = default(Token<TKind>);
			return false;
		}

		struct RegisteredToken
		{
			public TKind Kind;
			public TokenSpec Token;
		}

		class TokenMatch
		{
			readonly int _length;
			readonly Dictionary<char, TokenMatch> _longerTokens;
			RegisteredToken _token;

			internal TokenMatch(int len)
			{
				_length = len;
				_longerTokens = new Dictionary<char, TokenMatch>();
				_token = default(RegisteredToken);
			}

			/// <summary>
			/// Gets the tokenizer's length.
			/// </summary>
			public int Length { get { return _length; } }

			internal void AddToken(RegisteredToken tk, CharMatch[] leadChars)
			{
				if (leadChars.Length == _length)
				{
					if (_token.Token != null)
					{
						throw new InvalidOperationException(
							String.Concat("encountered ambiguous tokens: ", _token.Token.Name, " and ", tk.Token.Name));
					}
					_token = tk;
				}
				else
				{
					TokenMatch match;
					foreach (var c in leadChars[_length].SpecificChars)
					{
						if (!_longerTokens.TryGetValue(c, out match))
						{
							_longerTokens.Add(c, match = new TokenMatch(_length + 1));
						}
						match.AddToken(tk, leadChars);
					}
				}
			}

			internal bool TryMatch(string input, ref Cursor cursor, out Token<TKind> tk, Queue<Token<TKind>> lookahead,
				TryMatchS tryMatch)
			{
				// Try for the deepest match
				TokenMatch match;

				if ((cursor + _length) < input.Length && _longerTokens.TryGetValue(input[cursor + _length], out match))
				{
					if (match.TryMatch(input, ref cursor, out tk, lookahead, tryMatch))
					{
						return true;
					}
				}

				// No deeper match, take the current token.
				var cc = cursor;
				if (_token.Token != null && _token.Token.TryMatch(input, ref cc, _token.Kind, out tk)
					&& IsAppropriatelyFollowed(tk, ref cc, input, lookahead, tryMatch))
				{
					cursor = cc;
					return true;
				}
				tk = default(Token<TKind>);
				return false;
			}

			internal bool TryMatch(char[] input, ref Cursor cursor, out Token<TKind> tk, Queue<Token<TKind>> lookahead,
				TryMatchCh tryMatch)
			{
				// Try for the deepest match
				TokenMatch match;

				if ((cursor + _length) < input.Length && _longerTokens.TryGetValue(input[cursor + _length], out match))
				{
					if (match.TryMatch(input, ref cursor, out tk, lookahead, tryMatch))
					{
						return true;
					}
				}

				// No deeper match, take the current token.
				var cc = cursor;
				if (_token.Token != null && _token.Token.TryMatch(input, ref cc, _token.Kind, out tk)
					&& IsAppropriatelyFollowed(tk, ref cc, input, lookahead, tryMatch))
				{
					cursor = cc;
					return true;
				}
				tk = default(Token<TKind>);
				return false;
			}

			static bool IsAppropriatelyFollowed(Token<TKind> tt, ref Cursor cursor, string input, Queue<Token<TKind>> lookahead,
				TryMatchS tryMatch)
			{
				if (cursor >= input.Length)
				{
					return true;
				}
				switch (tt.Spec.FollowedBy)
				{
					case TokenFollowedBy.Not:
					case TokenFollowedBy.Any:
						return true;
					default:
						var localLook = new Queue<Token<TKind>>();
						Token<TKind> ll;
						if (!tryMatch(ref cursor, input, out ll, localLook))
						{
							return true;
						}

						if ((tt.Spec.FollowedBy & TokenFollowedBy.Not) == TokenFollowedBy.Not)
						{
							if (ll.MetaKind == TokenMetaKind.Symbol)
							{
								if ((tt.Spec.FollowedBy & TokenFollowedBy.Symbol) == TokenFollowedBy.Symbol)
								{
									return false;
								}
							}
							else if (ll.MetaKind == TokenMetaKind.Operator)
							{
								if ((tt.Spec.FollowedBy & TokenFollowedBy.Operator) == TokenFollowedBy.Operator)
								{
									return false;
								}
							}
							else if (ll.MetaKind == TokenMetaKind.Whitespace)
							{
								if ((tt.Spec.FollowedBy & TokenFollowedBy.Whitespace) == TokenFollowedBy.Whitespace)
								{
									return false;
								}
							}

							lookahead.Enqueue(ll);
							foreach (var ttt in localLook)
							{
								lookahead.Enqueue(ttt);
							}
							return true;
						}
						if (ll.MetaKind == TokenMetaKind.Symbol)
						{
							if ((tt.Spec.FollowedBy & TokenFollowedBy.Symbol) == TokenFollowedBy.Symbol)
							{
								lookahead.Enqueue(ll);
								foreach (var ttt in localLook)
								{
									lookahead.Enqueue(ttt);
								}
								return true;
							}
						}
						else if (ll.MetaKind == TokenMetaKind.Operator)
						{
							if ((tt.Spec.FollowedBy & TokenFollowedBy.Operator) == TokenFollowedBy.Operator)
							{
								lookahead.Enqueue(ll);
								foreach (var ttt in localLook)
								{
									lookahead.Enqueue(ttt);
								}
								return true;
							}
						}
						else if (ll.MetaKind == TokenMetaKind.Whitespace)
						{
							if ((tt.Spec.FollowedBy & TokenFollowedBy.Whitespace) == TokenFollowedBy.Whitespace)
							{
								lookahead.Enqueue(ll);
								foreach (var ttt in localLook)
								{
									lookahead.Enqueue(ttt);
								}
								return true;
							}
						}
						break;
				}

				return false;
			}

			bool IsAppropriatelyFollowed(Token<TKind> tt, ref Cursor cursor, char[] input, Queue<Token<TKind>> lookahead,
				TryMatchCh tryMatch)
			{
				if (cursor >= input.Length)
				{
					return true;
				}
				switch (tt.Spec.FollowedBy)
				{
					case TokenFollowedBy.Not:
					case TokenFollowedBy.Any:
						return true;
					default:
						var localLook = new Queue<Token<TKind>>();
						Token<TKind> ll;
						if (!tryMatch(ref cursor, input, out ll, localLook))
						{
							return true;
						}

						if ((tt.Spec.FollowedBy & TokenFollowedBy.Not) == TokenFollowedBy.Not)
						{
							if (ll.MetaKind == TokenMetaKind.Symbol)
							{
								if ((tt.Spec.FollowedBy & TokenFollowedBy.Symbol) == TokenFollowedBy.Symbol)
								{
									return false;
								}
							}
							else if (ll.MetaKind == TokenMetaKind.Operator)
							{
								if ((tt.Spec.FollowedBy & TokenFollowedBy.Operator) == TokenFollowedBy.Operator)
								{
									return false;
								}
							}
							else if (ll.MetaKind == TokenMetaKind.Whitespace)
							{
								if ((tt.Spec.FollowedBy & TokenFollowedBy.Whitespace) == TokenFollowedBy.Whitespace)
								{
									return false;
								}
							}

							lookahead.Enqueue(ll);
							foreach (var ttt in localLook)
							{
								lookahead.Enqueue(ttt);
							}
							return true;
						}
						if (ll.MetaKind == TokenMetaKind.Symbol)
						{
							if ((tt.Spec.FollowedBy & TokenFollowedBy.Symbol) == TokenFollowedBy.Symbol)
							{
								lookahead.Enqueue(ll);
								foreach (var ttt in localLook)
								{
									lookahead.Enqueue(ttt);
								}
								return true;
							}
						}
						else if (ll.MetaKind == TokenMetaKind.Operator)
						{
							if ((tt.Spec.FollowedBy & TokenFollowedBy.Operator) == TokenFollowedBy.Operator)
							{
								lookahead.Enqueue(ll);
								foreach (var ttt in localLook)
								{
									lookahead.Enqueue(ttt);
								}
								return true;
							}
						}
						else if (ll.MetaKind == TokenMetaKind.Whitespace)
						{
							if ((tt.Spec.FollowedBy & TokenFollowedBy.Whitespace) == TokenFollowedBy.Whitespace)
							{
								lookahead.Enqueue(ll);
								foreach (var ttt in localLook)
								{
									lookahead.Enqueue(ttt);
								}
								return true;
							}
						}
						break;
				}

				return false;
			}
		}

		delegate bool TryMatchCh(ref Cursor c, char[] input, out Token<TKind> tk, Queue<Token<TKind>> lookahead);

		delegate bool TryMatchS(ref Cursor c, string input, out Token<TKind> tk, Queue<Token<TKind>> lookahead);
	}
}