using System;
using System.Collections.Generic;
using System.Text;

namespace HappyTemplate
{
	//Has to be public to satify accessibility rules 
	public enum TokenType
	{
		Verbatim,
		Identifier,
		KeywordIf,
		KeywordEndIf,
		KeywordElse,
		KeywordLookup,
		KeywordDefault,
		OpenParen,
		CloseParen,
		Bang,
		Comma,
		LiteralString,
		LiteralInt32,
		LiteralDouble,
		LiteralChar,
		KeywordNext,
		KeywordIn,
		KeywordForEach,
		BeginTemplate,
		EndTemplate,
		EndOfInput,
		InvalidCharacter,
		OpenBracket,
		CloseBracket,
		OperatorAssign,
		OpenBrace,
		CloseBrace,
		KeywordBetween,
		LiteralBool,
		KeywordSet,
		KeywordGlobal,
		KeywordNull
	}

	public class Token
	{
		internal readonly TokenType TokenType;
		internal readonly SourceLocation Location;
		internal readonly string Text;
		internal readonly string[] Parts;


		internal Token(SourceLocation loc, TokenType tokenType, string text)
		{
			this.TokenType = tokenType;
			this.Location = loc;
			this.Text = text;
			this.Parts = new[] {text };
		}
		internal Token(SourceLocation loc, TokenType tokenType, string text, string[] parts)
		{
			this.TokenType = tokenType;
			this.Location = loc;
			this.Text = text;
			this.Parts = parts;
		}

		internal Identifier ToIdentifier()
		{
			return new Identifier(this.Location, this.Text, this.Parts);
		}
	}

	public class TemplateLexer
	{
		private const char CHAR_PAIR_OPEN = '<';
		private const char CHAR_PAIR_INNER = '$';
		private const char	CHAR_PAIR_CLOSE = '>';

		readonly InputReader _reader;
		readonly List<Token> _readAhead = new List<Token>();
		readonly TemplateErrorCollector _errorCollector;
		LexerState _state;
		SourceLocation _enteredLastStatementGroupAt;

		private enum LexerState
		{
			LexingVerbatimText,
			LexingStatement
		}

		internal TemplateLexer(InputReader reader, SourceErrorCollection sec)
		{
			_reader = reader;	
			_errorCollector = new TemplateErrorCollector(sec);
			ChangeState(LexerState.LexingStatement);
		}

		public bool Eof
		{
			get {
				return this.PeekToken().TokenType == TokenType.EndOfInput || _reader.Eof;
			}
		}

		private void ChangeState(LexerState ls)
		{
			_state = ls;

			if (ls == LexerState.LexingStatement)
				_enteredLastStatementGroupAt = _reader.CurrentLocation;
		}

		public Token PeekToken()
		{
			return PeekToken(0);
		}
		public Token PeekToken(int lookAheadDepth)
		{
			while (_readAhead.Count <= lookAheadDepth)
				_readAhead.Add(ExtractToken());

			return _readAhead[lookAheadDepth];
		}

		public Token NextToken()
		{
			Token retval;
			if (_readAhead.Count > 0)
			{
				retval = _readAhead[0];
				_readAhead.RemoveAt(0);
			}
			else
			{
				return ExtractToken();
			}
			return retval;
		}

		private void ReadAppendWord(StringBuilder build)
		{
			while (Char.IsLetterOrDigit(_reader.Peek()) || _reader.Peek() == '_')
				build.Append(_reader.Read());
		}

		public void FailIfEof(SourceLocation loc)
		{
			if (_reader.Eof)
			{
				foreach(Token t in _readAhead)
					if(t.TokenType != TokenType.EndOfInput)
						return;

					_errorCollector.AddUnexpectedEndOfInputWhileParsingSection(loc);
				throw new AbortParseException(_reader.CurrentLocation);
			}

		}

		public void FailIfEof()
		{
			if (_reader.Eof)
			{
				foreach(Token t in _readAhead)
					if(t.TokenType != TokenType.EndOfInput)
						return;

				if(_state == LexerState.LexingStatement)
					_errorCollector.AddUnexpectedEndOfInputWhileParsingSection(_enteredLastStatementGroupAt);
				else
					_errorCollector.AddUnexpectedEndOfInput(_reader.CurrentLocation);

				throw new AbortParseException(_reader.CurrentLocation);
			}
		}

		public Token ExtractToken()
		{
			Token retval;
			StringBuilder sb;

			if(_reader.Eof)
				retval = new Token(_reader.CurrentLocation, TokenType.EndOfInput, Resources.MiscMessages.EndOfInput);
			else
			{
				SourceLocation startLocation;
				switch(_state)
				{
				case LexerState.LexingVerbatimText:
					bool keepReading = true;
					//read until a statement begin or end of template is encountered
					sb = new StringBuilder();

					startLocation = _reader.CurrentLocation;
					while(keepReading)
					{
						if(_reader.Peek(0) == CHAR_PAIR_OPEN && _reader.Peek(1) == CHAR_PAIR_INNER)
						{
							ChangeState(LexerState.LexingStatement);
							_reader.Read();
							_reader.Read();
							keepReading = false;
						}
						else if (_reader.Peek(0) == '}' && _reader.Peek(1) == CHAR_PAIR_CLOSE)
						{
							ChangeState(LexerState.LexingStatement);
							//Purposfully do NOT eat }> as it will be eaten in the LextingStatement state.
							keepReading = false;
							
						}
						else
						{
							if(_reader.Eof)
							{
								_errorCollector.AddUnexpectedEndOfInputWhileParsingSection(startLocation);
								throw new AbortParseException(startLocation);
							}
							sb.Append(_reader.Read());
						}
					}

					//Entirely skip empty Verbatim sections; just parse the next token
					if (sb.Length == 0)
						retval = ExtractToken();
					else
						retval = new Token(startLocation, TokenType.Verbatim, sb.ToString());

					break;
				case LexerState.LexingStatement:
					//read until an end of statement is encountered
					this.EatWhite();
					if(_reader.Eof)
						retval = new Token(_reader.CurrentLocation, TokenType.EndOfInput, Resources.MiscMessages.EndOfInput);
					else
					{
						startLocation = _reader.CurrentLocation;
						if (Char.IsLetter(_reader.Peek()))
						{
							sb = new StringBuilder();
							ReadAppendWord(sb);
							string word = sb.ToString();

							switch (word)
							{
								case "if":
									retval = new Token(startLocation, TokenType.KeywordIf, "if");
									break;
								case "else":
									retval = new Token(startLocation, TokenType.KeywordElse, "else");
									break;
								case "endif":
									retval = new Token(startLocation, TokenType.KeywordEndIf, "endif");
									break;
								case "foreach":
									retval = new Token(startLocation, TokenType.KeywordForEach, "foreach");
									break;
								case "in":
									retval = new Token(startLocation, TokenType.KeywordIn, "in");
									break;
								case "between":
									retval = new Token(startLocation, TokenType.KeywordBetween, "between");
									break;
								case "next":
									retval = new Token(startLocation, TokenType.KeywordNext, "next");
									break;
								case "lookup":
									retval = new Token(startLocation, TokenType.KeywordLookup, "lookup");
									break;
								case "default":
									retval = new Token(startLocation, TokenType.KeywordDefault, "default");
									break;
								case "true":
									retval = new Token(startLocation, TokenType.LiteralBool, "true");
									break;
								case "false":
									retval = new Token(startLocation, TokenType.LiteralBool, "false");
									break;
								case "set":
									retval = new Token(startLocation, TokenType.KeywordSet, "set");
									break;
								case "global":
									retval = new Token(startLocation, TokenType.KeywordGlobal, "global");
									break;
								case "null":
									retval = new Token(startLocation, TokenType.KeywordNull, "null");
									break;
								default:
									//The word read doesn't match any of the keywords, so it must be
									//and identifier.

									this.EatWhite();
									//If the next character after any whitespace is a dot, lex a "multipart identifier"
									//i.e.  object.property1.property2
									if (_reader.Peek() == '.')
									{
										List<string> parts = new List<string> { word };
										do
										{
											//Eat the dot
											_reader.Read();
											//Skip whitespace
											this.EatWhite();
											//clear string builder
											sb.Length = 0;
											//Read in the next part
											ReadAppendWord(sb);
											//Add it to the list of parts
											parts.Add(sb.ToString());
											//Skip to the next end of the whitespace
											this.EatWhite();

											//If the next character after the whitespace is a dot, continue
											//reading the mutipart identifier
										} while (_reader.Peek() == '.');

										//Reset the StringBuilder again
										sb.Length = 0;

										//Build a textual representation of the mutlipart identifier
										//without any whitespace.
										sb.Append(parts[0]);
										for (int i = 1; i < parts.Count; ++i)
										{
											sb.Append('.');
											sb.Append(parts[i]);
										}
										retval = new Token(startLocation, TokenType.Identifier, sb.ToString(), parts.ToArray());
									}
									else
										retval = new Token(startLocation, TokenType.Identifier, word);
									break;
							}
						}
						else if(Char.IsNumber(_reader.Peek(0)) || _reader.Peek(0) == '-')
						{
							sb = new StringBuilder();
							do
							{
								sb.Append(_reader.Read());
							} while (Char.IsNumber(_reader.Peek(0)) || _reader.Peek(0) == '.');

							string aNumber = sb.ToString();

							if(aNumber.IndexOf('.') < 0)
								retval = new Token(startLocation, TokenType.LiteralInt32, aNumber);
							else
								retval = new Token(startLocation, TokenType.LiteralDouble, aNumber);
						}
						else //else the next character in the character stream is not a letter
						{
							//if a comment is next in the reader, advance the reader until 
							//the end of the comment is reached.

							switch(_reader.Peek())
							{
							case '(':
								_reader.Read();
								retval = new Token(_reader.CurrentLocation, TokenType.OpenParen, "(");
								break;
							case ')':
								_reader.Read();
								retval = new Token(_reader.CurrentLocation, TokenType.CloseParen, ")");
								break;
							case '!':
								_reader.Read();
								retval = new Token(_reader.CurrentLocation, TokenType.Bang, "!");
								break;
							case ',':
								_reader.Read();
								retval = new Token(_reader.CurrentLocation, TokenType.Comma, ",");
								break;
							case '[':
								_reader.Read();
								retval = new Token(_reader.CurrentLocation, TokenType.OpenBracket, "[");
								break;
							case ']':
								_reader.Read();
								retval = new Token(_reader.CurrentLocation, TokenType.CloseBracket, "]");
								break;
							case '{':
								_reader.Read();
								retval = new Token(_reader.CurrentLocation, TokenType.OpenBrace, "{");
								break;
								//CloseBrace lex'd below...
							case '=':
								_reader.Read();
								retval = new Token(_reader.CurrentLocation, TokenType.OperatorAssign, "=");
								break;
							case '\'':
								startLocation = _reader.CurrentLocation;
								_reader.Read();
								retval = new Token(startLocation, TokenType.LiteralChar, _reader.Read().ToString());
								if (_reader.Read() != '\'')
									_errorCollector.AddLiteralCharsMustBeOneCharLong(_reader.CurrentLocation);
								break;
							case '"':
								sb = new StringBuilder();
								startLocation = _reader.CurrentLocation;
								//Eat opening quote
								_reader.Read();
								while(_reader.Peek() != '"')
								{
									if(_reader.Peek() == '\\')
									{
										SourceLocation escapeSequenceLocation = _reader.CurrentLocation;
										_reader.Read();
										char escapedChar = _reader.Read();
										switch(escapedChar)
										{
										case 't':
											sb.Append('\t');
											break;
										case 'r':
											sb.Append('\r');
											break;
										case 'n':
											sb.Append('\n');
											break;
										case '"':
											sb.Append('"');
											break;
										case '\\':
											sb.Append('\\');
											break;
										default:
											_errorCollector.AddInvalidEscapeSequence(escapeSequenceLocation, escapedChar);
											break;
										}
									}
									else
										sb.Append(_reader.Read());

									if(_reader.Peek() == '\n')
									{
										_errorCollector.AddNewLineInStringLiteral(_reader.CurrentLocation);
										break;
									}
								}
								//Eat closing quote
								_reader.Read();

								retval = new Token(startLocation, TokenType.LiteralString, sb.ToString());
								break;
							default:
								if(_reader.Peek(0) == '}')
								{
									if(_reader.Peek(1) == CHAR_PAIR_CLOSE)
									{
										retval = new Token(_reader.CurrentLocation, TokenType.EndTemplate, "}>");
										_reader.Read();
										_reader.Read();
									}
									else
									{
										retval = new Token(_reader.CurrentLocation, TokenType.CloseBrace, "}");
										_reader.Read();
									}
								}
								else if(_reader.Peek(0) == CHAR_PAIR_OPEN
								        && _reader.Peek(1) == '{')
								{
									retval = new Token(_reader.CurrentLocation, TokenType.BeginTemplate, "<{");
									_reader.Read();
									_reader.Read();
									ChangeState(LexerState.LexingVerbatimText);
								}
								else if(_reader.Peek(0) == CHAR_PAIR_INNER
										&& _reader.Peek(1) == CHAR_PAIR_CLOSE)
								{
									_reader.Read();
									_reader.Read();
									ChangeState(LexerState.LexingVerbatimText);
									retval = NextToken();
								}
								else
								{
									_errorCollector.AddInvalidCharacter(_reader.CurrentLocation, _reader.Read());
									retval = new Token(
										_reader.CurrentLocation, TokenType.InvalidCharacter, Resources.MiscMessages.InvalidCharacter);
								}
								break;
							}
						}
					}
					break;
				default:
					throw new UnhandledCaseSourceException(_reader.CurrentLocation);
				}
			}

			DebugAssert.IsFalse(_reader.Peek() == 0xFFFF && !_reader.Eof, "_reader.Peek() returned 0xFFFF yet _reader.Eof is false");
			DebugAssert.IsNotNull(retval, "retval cannot be null");
			return retval;
		}

		private void EatWhite()
		{
			bool eatMore = true;

			while (eatMore)
			{
				if (!_reader.PeekIsWhite())
				{
					//Eat up comments too.
					if (_reader.Peek(0) == '/' && _reader.Peek(1) == '*')
					{
						//Eat the /*
						_reader.Read();
						_reader.Read();

						//Eat the text of the comment
						while (_reader.Peek(0) != '*' && _reader.Peek(1) != '/')
						{
							char x = _reader.Read();
							if (_reader.Eof)
								throw new EofException(_reader.CurrentLocation);
						}

						//Eat the */
						_reader.Read();
						_reader.Read();
					}
					else
						eatMore = false;
				}
				else
					_reader.Read();
			}
		}
	}
}
