﻿
using System.Collections.Generic;
using System;
namespace Pretext {
	/// <summary>
	/// Class to turn strings of characters into tokens
	/// </summary>
	public class Tokenizer {

		string SourceFileDescription { get; set; }

		#region Tokenizer state variables

		/// <summary>
		/// Whether the processor is currently within the pretext processing brackets or not.
		/// </summary>
		public bool InsidePretextBrackets { get; private set; }
		public bool InsideCommentBrackets { get; private set; }

		/// <summary>
		/// Whether the processor is currently within string quote marks or not.
		/// </summary>
		public bool InsideStringQuotes { get; private set; }

		public Token CurrentToken { get; private set; }

		public List<Token> Tokens { get; private set; }
		public List<TokenWatcherItem> CurrentlyWatchedTokens;
		public Dictionary<string, SpecialTokenInfo> SpecialTokens;
		private int CurrentTokenLevel { get; set; }
		private int CurrentLineNumber { get; set; }

		bool CarriageReturnLineFeedEncountered { get; set; }

		#endregion

		public Tokenizer(string sourceFileDescription) {

			Tokens = new List<Token>();
			CurrentlyWatchedTokens = new List<TokenWatcherItem>();

			SpecialTokens = new Dictionary<string, SpecialTokenInfo>();

			// call to setup the special tokens
			SetupSpecialTokens();

			// start at line one
			CurrentLineNumber = 1;

			SourceFileDescription = sourceFileDescription;

		}

		/// <summary>
		/// Specifies all special tokens
		/// </summary>
		void SetupSpecialTokens() {

			// the opening pretext bracket
			SpecialTokens.Add("<%",
				new SpecialTokenInfo(
					TokenType.StartOfPretextBracket,
					new TokenMatchedDelegate(StartOfPretextBracket_OnMatched),
					SpecialTokenConditions.OnlyOutsidePretext));

			// the closing pretext bracket
			SpecialTokens.Add("%>",
				new SpecialTokenInfo(
					TokenType.EndOfPretextBracket,
					new TokenMatchedDelegate(EndOfPretextBracket_OnMatched),
					SpecialTokenConditions.OnlyInsidePretext));

			// normal brackets (only inside pretext)
			SpecialTokens.Add("(",
							new SpecialTokenInfo(
								TokenType.OpeningArgumentBracket,
								SpecialTokenConditions.OnlyInsidePretext));

			SpecialTokens.Add(")",
							new SpecialTokenInfo(
								TokenType.ClosingArgumentBracket,
								SpecialTokenConditions.OnlyInsidePretext));

			SpecialTokens.Add("{",
							new SpecialTokenInfo(
								TokenType.OpeningBlockBracket,
								new TokenMatchedDelegate(OpeningBlockBracket_OnMatched),
								SpecialTokenConditions.OnlyInsidePretext));

			SpecialTokens.Add("}",
							new SpecialTokenInfo(
								TokenType.ClosingBlockBracket,
								new TokenMatchedDelegate(ClosingBlockBracket_OnMatched),
								SpecialTokenConditions.OnlyInsidePretext));

			SpecialTokens.Add(",",
							new SpecialTokenInfo(
								TokenType.GeneralSyntaxGrammer,
								SpecialTokenConditions.OnlyInsidePretext));

			SpecialTokens.Add(";",
							new SpecialTokenInfo(
								TokenType.GeneralSyntaxGrammer,
								SpecialTokenConditions.OnlyInsidePretext));

			SpecialTokens.Add("\"",
							new SpecialTokenInfo(
								TokenType.GeneralSyntaxGrammer,
								new TokenMatchedDelegate(StringQuoteMarks_OnMatched),
								SpecialTokenConditions.OnlyInsidePretext));

			SpecialTokens.Add("=",
							new SpecialTokenInfo(
								TokenType.AssignmentOperator,
								new TokenMatchedDelegate(AssignmentOperator_OnMatched),
								SpecialTokenConditions.OnlyInsidePretext));

			SpecialTokens.Add("+",
				new SpecialTokenInfo(TokenType.AssignmentOperator,
					SpecialTokenConditions.OnlyInsidePretext));


			SpecialTokens.Add("true",
							new SpecialTokenInfo(
								TokenType.BooleanValue,
								SpecialTokenConditions.OnlyInsidePretext));

			SpecialTokens.Add("false",
							new SpecialTokenInfo(
								TokenType.BooleanValue,
								SpecialTokenConditions.OnlyInsidePretext));

			SpecialTokens.Add("==",
							new SpecialTokenInfo(
								TokenType.EqualityOperator,
								SpecialTokenConditions.OnlyInsidePretext));

			SpecialTokens.Add("!=",
							 new SpecialTokenInfo(
								 TokenType.InequalityOperator,

								 SpecialTokenConditions.OnlyInsidePretext));

			SpecialTokens.Add("<%--",
				new SpecialTokenInfo(
					TokenType.StartComment,
					new TokenMatchedDelegate(StartOfCommentBracket_OnMatched),
					SpecialTokenConditions.OnlyOutsidePretext));

			SpecialTokens.Add("--%>",
							new SpecialTokenInfo(TokenType.EndComment,
								new TokenMatchedDelegate(EndOfCommentBracket_OnMatched),
								SpecialTokenConditions.OnlyOutsidePretext));

			SpecialTokens.Add("/*",
							new SpecialTokenInfo(TokenType.StartComment,
								SpecialTokenConditions.OnlyInsidePretext));

			SpecialTokens.Add("*/",
							new SpecialTokenInfo(TokenType.EndComment,
								SpecialTokenConditions.OnlyInsidePretext));

		}

		public static List<Token> GetTokens(string source, string sourceFileDescription) {

			Tokenizer t = new Tokenizer(sourceFileDescription);
			t.Process(source);
			t.ValidateFinished();
			return t.Tokens;

		}

		#region Processing Methods

		public void Process(string s) {

			if (string.IsNullOrEmpty(s)) {
				return;
			}

#if DEBUG
			if (s.Length == 1)
				throw new PretextException("Wrong overload called for Tokenizer.Process().  Use char instead of single character string.");
#endif

			char[] chars = s.ToCharArray();

			foreach (char c in chars)
				Process(c);

		}

		public void Process(char c) {

			if (CurrentToken == null) {

				// create the first token and assume normal text 
				CurrentToken = new Token(TokenType.NormalText);

				CurrentToken.LineNumber = this.CurrentLineNumber;

				Tokens.Add(CurrentToken);

			}

			if (CharShouldBeAdded(c))
				// add this to the current token
				CurrentToken.Value += c;

			// check for new tokens to watch
			CheckForNewTokensToWatch(c);

			// check the tokens currently being watched
			CheckWatchedTokens(c);

			// check for line feeds and update line number
			CheckForLineFeeds(c);

			// store this character as the previous character
			this.PreviousCharacter = c;

		}

		public void ValidateFinished() {

			if (InsideStringQuotes)
				throw new SyntaxException("Unterminated string constant.  Strings are expected to close.");

			if (InsidePretextBrackets)
				throw new SyntaxException("Expected pretext closing brackets tag.");

			if (CurrentTokenLevel != 0) {
				if (CurrentTokenLevel > 0)
					throw new SyntaxException("Unbalanced block braces.  Expected " + CurrentTokenLevel + " more closing bracket(s).");
				else
					throw new SyntaxException("Unbalanced block braces.  " + CurrentTokenLevel + " closing bracket(s) too many.");
			}

		}


		public void CheckForLineFeeds(char c) {

			char otherLineFeedChar = char.MinValue;

			if (c == '\r' || c == '\n') {
				
				// increase the line number
				this.CurrentLineNumber++;

				if (c == '\r')
					otherLineFeedChar = '\n';
				else
					otherLineFeedChar = '\r';
			}

			if (PreviousCharacter != char.MinValue && PreviousCharacter == otherLineFeedChar && !CarriageReturnLineFeedEncountered) {
				this.CurrentLineNumber--;
				CarriageReturnLineFeedEncountered = true;
			}
			else if (CarriageReturnLineFeedEncountered)
				CarriageReturnLineFeedEncountered = false;

			// set current token line number
			CurrentToken.LineNumber = CurrentLineNumber;

		}

		public void CheckForNewTokensToWatch(char c) {

			foreach (string token in SpecialTokens.Keys) {

				SpecialTokenInfo info = SpecialTokens[token];

				// make sure we match the conditions specified by the
				// info object
				if (
					(
						info.Conditions == SpecialTokenConditions.OnlyInsidePretext
						&& !InsidePretextBrackets
					) || (
						info.Conditions == SpecialTokenConditions.OnlyOutsidePretext
						&& InsidePretextBrackets
					)
				)
					continue;

				if (token.StartsWith(c.ToString()))
					this.CurrentlyWatchedTokens.Add(new TokenWatcherItem(token));

			}

		}

		public void CheckWatchedTokens(char c) {

			List<TokenWatcherItem> tokensToRemove = new List<TokenWatcherItem>();

			bool tokenFullyMatched = false;
			string matchedToken = string.Empty;

			foreach (TokenWatcherItem tokenWatcherItem in CurrentlyWatchedTokens) {

				char s = default(char);

				bool singleCharToken = (tokenWatcherItem.Token.Length == 1);

				// if this a single character token?
				if (!singleCharToken)

					// does this character match the next character
					// for this token to be considered matched?

					// get the character
					s = char.Parse(tokenWatcherItem.Token.Substring(tokenWatcherItem.PositionValidated + 1, 1));

				// does this match - or is it a single character?
				if (singleCharToken || (s == c)) {

					// have we matched the entire token?
					if (singleCharToken ||
						(tokenWatcherItem.PositionValidated + 1 == tokenWatcherItem.Token.Length - 1)) {

						// matched ALL of the token
						tokenFullyMatched = true;
						matchedToken = tokenWatcherItem.Token;

						// remove it
						tokensToRemove.Add(tokenWatcherItem);

						break;

					} else

						// matched more of it but not all
						// so increase the validated counter
						tokenWatcherItem.IncreaseValidatedPosition();

				} else
					// not matched - remove it from the watch list
					tokensToRemove.Add(tokenWatcherItem);

			}

			// remove all tokens marked to be removed
			foreach (TokenWatcherItem token in tokensToRemove)
				CurrentlyWatchedTokens.Remove(token);

			// did we fully match this token?
			if (tokenFullyMatched)
				ProcessSpecialToken(matchedToken);



		}

		void ProcessSpecialToken(string token) {

#if DEBUG

			// remove this from the current token's value
			if (!this.CurrentToken.Value.EndsWith(token))
				throw new ApplicationException("CurrentToken.Value should end with the token being matched.");

#endif

			int i = ((CurrentToken.Value.Length - token.Length) > -1) ? CurrentToken.Value.Length - token.Length : 0;

			// remove the token from the end
			CurrentToken.Value = CurrentToken.Value.Substring(0, i);

			// get information about this special token
			SpecialTokenInfo specialToken = SpecialTokens[token];

#if DEBUG
			if (specialToken == null)
				throw new ArgumentException(string.Format("\"{0}\" is not a special token and can not be passed to ProcessSpecialToken", token));
#endif

			// if the current token is useless then
			// remove it
			if (IsUselessToken(CurrentToken)) {
				// remove the last token - it's useless!
				Tokens.RemoveAt(Tokens.Count - 1);
			}

			// create a new token
			CurrentToken = new Token(SpecialTokens[token].Type, token, CurrentTokenLevel, CurrentLineNumber, SourceFileDescription);
			Tokens.Add(CurrentToken);

			// assume the default result
			TokenMatchedResult result = TokenMatchedResult.DefaultResult;

			// call the callback for this special token
			if (specialToken.OnMatchedCallback != null)
				result = specialToken.OnMatchedCallback();

			if (result.CreateNewToken) {

				TokenType newTokenType = TokenType.NormalText;

				// are we inside pretext?
				if (InsidePretextBrackets) newTokenType = TokenType.Symbol;

				// are we inside a string?
				if (InsideStringQuotes) newTokenType = TokenType.String;

				CurrentToken = new Token(newTokenType, String.Empty, CurrentTokenLevel, CurrentLineNumber, SourceFileDescription);

				// create the normal one
				Tokens.Add(CurrentToken);

			}

		}

		bool CharShouldBeAdded(Char c) {

			if (InsidePretextBrackets) {

				// whitespace?
				if (Char.IsWhiteSpace(c)) {

					// inside a string?
					return InsideStringQuotes;

				}

			}

			return true;

		}

		bool IsUselessToken(Token token) {

			bool uselessToken = false;
			// make sure the current token is valuable

			switch (CurrentToken.Type) {

				case TokenType.NormalText:
				case TokenType.Symbol:

					uselessToken = uselessToken || string.IsNullOrEmpty(CurrentToken.Value);
					break;
				default:
					break;
			}

			return uselessToken;

		}

		Token PreviousToken {
			get {
				if (Tokens.Count > 1)
					return Tokens[Tokens.Count - 2];
				else
					return null;
			}
		}

		Char PreviousCharacter { get; set; }

		#endregion

		#region Token Matched Delegates

		/// <summary>
		/// Called when the pretext opening tag is encountered.
		/// </summary>
		TokenMatchedResult StartOfPretextBracket_OnMatched() {

			if (InsidePretextBrackets)
				throw new SyntaxException("Can not nest pretext opening bracket tags.");

			this.InsidePretextBrackets = true;

			return TokenMatchedResult.DefaultResult;

		}

		/// <summary>
		/// Called when the pretext end tag is encountered.
		/// </summary>
		TokenMatchedResult EndOfPretextBracket_OnMatched() {

			this.InsidePretextBrackets = false;
			return TokenMatchedResult.DefaultResult;
		}

		/// <summary>
		/// Called when comment start tag is encounterd.
		/// </summary>
		TokenMatchedResult StartOfCommentBracket_OnMatched() {
			this.InsideCommentBrackets = true;
			return TokenMatchedResult.DefaultResult;
		}

		/// <summary>
		/// Called when comment end tag is encountered.
		/// </summary>
		TokenMatchedResult EndOfCommentBracket_OnMatched() {
			this.InsideCommentBrackets = false;
			return TokenMatchedResult.DefaultResult;
		}



		TokenMatchedResult StringQuoteMarks_OnMatched() {

#if DEBUG
			if (!InsidePretextBrackets)
				throw new SyntaxException("String token can not be matched when not within Pretext brackets.");
#endif

			TokenMatchedResult result;

			if (PreviousCharacter != '\\') {

				// toggle the string quotes boolean
				this.InsideStringQuotes = !this.InsideStringQuotes;

				result = TokenMatchedResult.DefaultResult;

			} else {

				// remove the current token from the end of the tokens
				Tokens.RemoveAt(Tokens.Count - 1);

				// go back to extending the previous token
				CurrentToken = Tokens[Tokens.Count - 1];

				// trim off the escape character
				CurrentToken.Value = CurrentToken.Value.Substring(0, CurrentToken.Value.Length - 1);

				// add a quote
				CurrentToken.Value += "\"";

				result = new TokenMatchedResult(false);

			}

			return result;
		}

		TokenMatchedResult OpeningBlockBracket_OnMatched() {
			CurrentTokenLevel++;
			return TokenMatchedResult.DefaultResult;
		}

		TokenMatchedResult ClosingBlockBracket_OnMatched() {

#if DEBUG
			if (CurrentTokenLevel - 1 < 0)
				throw new SyntaxException("Too many closing block brackets.");
#endif

			CurrentTokenLevel--;
			return TokenMatchedResult.DefaultResult;

		}

		TokenMatchedResult AssignmentOperator_OnMatched() {

			// get the previous token
			Token previousToken = Tokens[Tokens.Count - 2];

			// is the previous token also an assignment operator?
			if (previousToken.Type == TokenType.AssignmentOperator) {

				// merge the two values
				string tokenValue = previousToken.Value + CurrentToken.Value;

				// remove the previous token
				Tokens.RemoveAt(Tokens.Count - 2);

				// change this current tokens type to "EqualityOperator"
				CurrentToken.Type = TokenType.EqualityOperator;
				CurrentToken.Value = tokenValue;

			}
			return TokenMatchedResult.DefaultResult;

		}

		#endregion

	}
}
