using Collections = System.Collections.Generic;
using IO = System.IO;
using Text = System.Text;
using System;
using Microsoft.Scripting;
using Microsoft.Scripting.Hosting;

namespace Nua.Compiler
{
	// Converts program source text into a sequence of tokens
	public sealed class Scanner
	{
        private const int EOF = -1;
        private const int DefaultBufferCapacity = 1024;
        private SourceUnit _sourceUnit;
        private TokenizerBuffer _buffer;
        private ErrorSink _errors;

	    private Token lookAhead;
	    private Token token;

		private bool pushBackDotDot; // whether there's a pending '..' in the input

		// Creates a scanner for the specified TextReader
        public Scanner(ErrorSink sink, SourceUnit sourceUnit)
        {
            this._errors = sink;
            this._sourceUnit = sourceUnit;
            
            if (_sourceUnit == null) throw new ArgumentNullException("sourceUnit");

            Initialize(null, _sourceUnit.GetReader(), SourceLocation.MinValue, DefaultBufferCapacity);
		}

        public Scanner(ErrorSink sink)
        {
            throw new NotImplementedException();
        }

	    public void Initialize(SourceUnit sourceUnit) 
        {
            if (sourceUnit == null) throw new ArgumentNullException("sourceUnit");

            Initialize(null, sourceUnit.GetReader(), SourceLocation.MinValue, DefaultBufferCapacity);
        }

        public void Initialize(object state, SourceUnitReader sourceReader, SourceLocation initialLocation) 
        {
            Initialize(state, sourceReader, initialLocation, DefaultBufferCapacity);
        }

        public void Initialize(object state, SourceUnitReader sourceReader, SourceLocation initialLocation, int bufferCapacity) 
        {
            if (sourceReader == null) throw new ArgumentNullException("sourceReader");

            _sourceUnit = sourceReader.SourceUnit;

            // TODO: we can reuse the buffer if there is enough free space:
            _buffer = new TokenizerBuffer(sourceReader, initialLocation, bufferCapacity, !_sourceUnit.DisableLineFeedLineSeparator);
        }

        public SourceUnit SourceUnit
        {
            get 
            {
                return _sourceUnit;
            }
        }

        public ErrorSink Errors 
        { 
            get { return _errors; } 
            set 
            {
                if (value == null) throw new ArgumentNullException("value");
                _errors = value; 
            } 
        }

        public bool IsEndOfFile 
        {
            get { return _buffer.Peek() == EOF; }
        }

        public SourceLocation TokenStart 
        {
            get { return _buffer.TokenStart;  }
        }

        public SourceLocation TokenEnd 
        {
            get { return _buffer.TokenEnd; }
        }

        public SourceSpan TokenSpan 
        {
            get { return _buffer.TokenSpan; }
        }

        public Token Peek()
        {
            if (token.Kind == TokenKind.Nothing)
            {
                token = ReadNextToken();
            }

            return token;
        }

        public Token Read()
        {
            Token tempToken;
            if (token.Kind == TokenKind.Nothing)
            {
                tempToken = ReadNextToken();
                token = ReadNextToken();
                return tempToken;
            }

            tempToken = token;
            token = ReadNextToken();
            return tempToken;
        }

	    public ulong ReadToken() 
        {

            if (_buffer == null) {
                throw new InvalidOperationException("Uninitialized");
            }

            return (ulong)Read().Kind;
        }


		// Gets the next token
		private Token ReadNextToken()
		{
			if (this.pushBackDotDot)
			{
				// Number parsing encountered .. and had to consume it
				this.pushBackDotDot = false;
                _buffer.MarkTokenEnd(false);
				Token tt = new Token(TokenKind.DotDot, new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), null);
                _buffer.DiscardToken();
			    return tt;
			}

			// Skip white space
			while (_buffer.Peek() != -1 && char.IsWhiteSpace((char)_buffer.Peek()))
			{
				_buffer.Read();
                _buffer.DiscardToken();
			}

			if (_buffer.Peek() == -1)
			{
				// No more characters, so return EOF
                _buffer.MarkTokenEnd(true);
				Token tt = new Token(TokenKind.EndOfFile, new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), null);
                _buffer.DiscardToken();
			    return tt;
			}

			char next = (char)_buffer.Peek();

			if (char.IsLetter(next) || next == '_')
			{
				return this.ScanKeywordOrIdentifier();
			}
			else if (next == '\'' || next == '\"')
			{
				return this.ScanStringLiteral();
			}
			else if (char.IsDigit(next))
			{
				return this.ScanNumericLiteral();
			}
			else if (next == '[')
			{
				// [ may start a long literal string ([[) or be a token on its
				// own

				// skip '['
				_buffer.Read();

				if (_buffer.Peek() == '[')
				{
					// A long literal string
					return this.ScanLongStringLiteral();
				}
				else
				{
					// Just a square bracket on its own
                    _buffer.MarkTokenEnd(false);
					Token tt = new Token(TokenKind.LeftSquare, new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), null);
                    _buffer.DiscardToken();
			        return tt;  
				}
			}
			else if (next == '-')
			{
				// - may be a token on its own; part of a longer token; or
				// start a comment

				// Skip '-'
				_buffer.Read();

				if (_buffer.Peek() == '-')
				{
					// A comment; may be long or short

					// skip second '-', comment, return next token
					_buffer.Read();					
					this.SkipComment();
					return this.Read();
				}
				else
				{
					// Just a - on its own
                    _buffer.MarkTokenEnd(false);
					Token tt = new Token(TokenKind.Minus, new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), null);
                    _buffer.DiscardToken();
			        return tt;
				}
			}
			else if (next == '~')
			{
				// Must be ~=

				// skip ~
				_buffer.Read();

				if (_buffer.Peek() == '=')
				{
					// skip =
					_buffer.Read();
                    _buffer.MarkTokenEnd(false);
					Token tt = new Token(TokenKind.TildeEquals, new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), null);
                    _buffer.DiscardToken();
			        return tt;
				}
				else
				{
					// Report error and continue
                    _errors.Add(_sourceUnit, "expected ~=", new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), 0, Severity.FatalError);
                    _buffer.MarkTokenEnd(false);
					Token tt = new Token(TokenKind.Error, new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), null);
                    _buffer.DiscardToken();
			        return tt;
				}
			}
			else if (next == '<' || next == '>' || next == '=')
			{
				// Any of <, > and = can appear on their own, or with another =
				// (e.g. <=, ==, etc.)


				// skip </>/=
				_buffer.Read();

				if (_buffer.Peek() == '=')
				{
					// skip =
					_buffer.Read();

					TokenKind kind;

					switch (next)
					{
						case '<':
							kind = TokenKind.LessEquals;
							break;

						case '>':
							kind = TokenKind.GreaterEquals;
							break;

						case '=':
							kind = TokenKind.EqualsEquals;
							break;

						default:
							throw new System.Exception("unreachable");
					}

                    _buffer.MarkTokenEnd(false);
					Token tt = new Token(kind, new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), null);
                    _buffer.DiscardToken();
			        return tt;
				}
				else
				{
					TokenKind kind;

					switch (next)
					{
						case '<':
							kind = TokenKind.Less;
							break;

						case '>':
							kind = TokenKind.Greater;
							break;

						case '=':
							kind = TokenKind.Equals;
							break;

						default:
							throw new System.Exception("unreachable");
					}

                    _buffer.MarkTokenEnd(false);
					Token tt = new Token(kind, new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), null);
                    _buffer.DiscardToken();
			        return tt;
				}
			}
			else if (next == '.')
			{
				// May be ., .. or ...

				// skip .
				_buffer.Read();

				if (_buffer.Peek() == '.')
				{
					// skip .
					_buffer.Read();

					if (_buffer.Peek() == '.')
					{
						// Triple ...
						_buffer.Read();
                        _buffer.MarkTokenEnd(false);
						Token tt = new Token(TokenKind.DotDotDot, new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), null);
                        _buffer.DiscardToken();
			            return tt;
					}
					else
					{
						// Double ..
                        _buffer.MarkTokenEnd(false);
						Token tt = new Token(TokenKind.DotDot, new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), null);
                        _buffer.DiscardToken();
			            return tt;
					}
				}
				else
				{
					// Single .
                    _buffer.MarkTokenEnd(false);
					Token tt = new Token(TokenKind.Dot, new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), null);
                    _buffer.DiscardToken();
			        return tt;
				}
			}
			else if ("+*/^(){}];:,".IndexOf(next) != -1)
			{
				// Miscellaneous single-character punctuation
				
				TokenKind kind;

				switch (_buffer.Read())
				{
					case '+':
						kind = TokenKind.Plus;
						break;

					case '*':
						kind = TokenKind.Star;
						break;

					case '/':
						kind = TokenKind.Slash;
						break;

					case '^':
						kind = TokenKind.Caret;
						break;

					case '(':
						kind = TokenKind.LeftRound;
						break;

					case ')':
						kind = TokenKind.RightRound;
						break;

					case '{':
						kind = TokenKind.LeftCurly;
						break;

					case '}':
						kind = TokenKind.RightCurly;
						break;

					// Note: [ already is handled above
					case ']':
						kind = TokenKind.RightSquare;
						break;

					case ';':
						kind = TokenKind.Semi;
						break;

					case ':':
						kind = TokenKind.Colon;
						break;

					case ',':
						kind = TokenKind.Comma;
						break;

					default:
						throw new System.Exception("unreachable");
				}

                _buffer.MarkTokenEnd(false);
				Token tt = new Token(kind, new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), null);
                _buffer.DiscardToken();
			    return tt;
			}
			else
			{
				// Invalid character! Report an error and continue.

				_errors.Add(_sourceUnit, "invalid character '" + next +"'", new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), 0, Severity.FatalError);
                _buffer.MarkTokenEnd(false);
				Token result = new Token(TokenKind.Error, new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), null);
			    
				_buffer.Read();
                _buffer.DiscardToken();
				return result;
			}
		}

		// Scans names and classifies them as keywords or identifiers
		public Token ScanKeywordOrIdentifier()
		{
			Token identifier = this.ScanIdentifier();
		    Token tt;

			switch ((string)identifier.Value)
			{
				case "and":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.And, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				case "break":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.Break, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				case "do":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.Do, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				case "else":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.Else, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				case "elseif":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.ElseIf, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				case "end":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.End, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				case "false":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.False, identifier.Span, false);
                    _buffer.DiscardToken();
			        return tt;

				case "for":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.For, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				case "function":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.Function, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				case "if":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.If, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				case "in":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.In, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				case "local":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.Local, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				case "nil":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.Nil, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				case "not":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.Not, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				case "or":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.Or, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				case "repeat":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.Repeat, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				case "return":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.Return, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				case "then":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.Then, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				case "true":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.True, identifier.Span, true);
                    _buffer.DiscardToken();
			        return tt;

				case "until":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.Until, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				case "while":
                    _buffer.MarkTokenEnd(false);
					tt = new Token(TokenKind.While, identifier.Span, null);
                    _buffer.DiscardToken();
			        return tt;

				default:
					return identifier;
			}
		}

		// Scans a name and returns it as an identifier
		public Token ScanIdentifier()
		{
			// Read has already checked that the first character
			// is a letter or underscore

			Text.StringBuilder accum = new Text.StringBuilder();

			while (_buffer.Peek() != -1 &&
				   (char.IsLetter((char)_buffer.Peek()) ||
				    char.IsDigit((char)_buffer.Peek()) ||
					(char)_buffer.Peek() == '_'))
			{
				accum.Append((char)_buffer.Read());
			}

            _buffer.MarkTokenEnd(false);

			Token tt = new Token(TokenKind.Identifier, new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), accum.ToString());
            _buffer.DiscardToken();
			return tt;
		}

		// Scans a simple string literal (i.e. one quoted with ' or ")
		public Token ScanStringLiteral()
		{
			Text.StringBuilder accum = new Text.StringBuilder();
			bool done = false;

			// Read has already ensured next character is ' or "
			char quoteChar = (char)_buffer.Read();

			while (!done)
			{
				switch (_buffer.Peek())
				{
					case '\r':
					case '\n':
					case -1:
						// Error, unterminated string literal
						_errors.Add(_sourceUnit, "unterminated string literal", new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), 0, Severity.FatalError);
                        _buffer.MarkTokenEnd(false);
						Token tt = new Token(TokenKind.Error, new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), null);
                        _buffer.DiscardToken();
			            return tt;

					case '\\':
						// Escape sequence
						accum.Append(this.ScanEscapeSequence());
						break;

					case '\"':
					case '\'':
						// Quote
						char ch = (char)_buffer.Read();

						if (ch == quoteChar)
						{
							// Same as opening, so terminates the string
							done = true;
						}
						else
						{
							// Just another character
							accum.Append(ch);
						}

						break;

					default:
						accum.Append((char)_buffer.Read());
						break;
				}
			}

            _buffer.MarkTokenEnd(false);
			Token blah = new Token(TokenKind.StringLiteral, new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), accum.ToString());
            _buffer.DiscardToken();
			return blah;
		}

		// Scans an escape sequence within a string literal
		public string ScanEscapeSequence()
		{
			// skip \, caller has already ensured next character is \
			
			_buffer.Read();

			switch (_buffer.Peek())
			{
				case 'a':
					_buffer.Read();
					return "\a";

				case 'b':
					_buffer.Read();
					return "\b";

				case 'f':
					_buffer.Read();
					return "\f";

				case 'n':
					_buffer.Read();
					return "\n";

				case 'r':
					_buffer.Read();
					return "\r";

				case 't':
					_buffer.Read();
					return "\t";

				case 'v':
					_buffer.Read();
					return "\v";

				case '\\':
					_buffer.Read();
					return "\\";

				case '\"':
					_buffer.Read();
					return "\"";

				case '\'':
					_buffer.Read();
					return "'";

				case '[':
					_buffer.Read();
					return "[";

				case ']':
					_buffer.Read();
					return "]";

				case '\r':
				case '\n':
					char ch = (char)_buffer.Read();

					if (ch == '\r' && _buffer.Peek() == '\n')
					{
						_buffer.Read();
						return "\r\n";
					}
					else
					{
						return new string(ch, 1);
					}

				default:
					if (_buffer.Peek() != -1 && char.IsDigit((char)_buffer.Peek()))
					{
						// Numerical value, 1-3 decimal digits

						int n = 0;

						for (int i = 0; i <= 3 && _buffer.Peek() != -1 && char.IsDigit((char)_buffer.Peek()); i++)
						{
							n = n * 10 + _buffer.Read() - '0';
						}

						return new string((char)n, 1);
					}
					else
					{
						// Unrecognized escape sequence
						_errors.Add(_sourceUnit, "unrecognized escape sequence '\\" + (char)_buffer.Peek() + "'", new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), 0, Severity.FatalError);
						_buffer.Read();
						return "\\" + (char)_buffer.Peek();
					}
			}
		}

		// Scans a Lua "here document"; these can contain nested pairs of [[s
		public Token ScanLongStringLiteral()
		{
			// Skip second '['
			_buffer.Read();

			// Skip optional newline
			if (_buffer.Peek() == '\r')
			{
				// just \r so far
				_buffer.Read();

				if (_buffer.Peek() == '\n')
				{
					// \r\n
					_buffer.Read();
				}
			}
			else if (_buffer.Peek() == '\n')
			{
				// just \n
				_buffer.Read();
			}

            _buffer.MarkTokenEnd(false);
			Token tt = new Token(TokenKind.StringLiteral, new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), this.ScanLongStringLiteralBody());
            _buffer.DiscardToken();
			return tt;
		}

		// Scans a Lua "here document" at a given nesting of brackets
		public string ScanLongStringLiteralBody()
		{
			int depth = 1; // number of nested [[, ]]s
			Text.StringBuilder accum = new Text.StringBuilder();

			while (depth > 0)
			{
				switch (_buffer.Peek())
				{
					case -1:
						// Unterminated string literal
						_errors.Add(_sourceUnit, "unterminated string literal", new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), 0, Severity.FatalError);
						return accum.ToString();

					case '[':
						_buffer.Read();

						if (_buffer.Peek() == '[')
						{
							_buffer.Read();
							depth++;
							accum.Append("[[");
						}
						else
						{
							accum.Append('[');
						}

						break;

					case ']':
						_buffer.Read();

						if (_buffer.Peek() == ']')
						{
							_buffer.Read();
							depth--;
							if (depth > 0)
							{
								accum.Append("]]");
							}
						}
						else
						{
							accum.Append(']');
						}

						break;

					default:
						accum.Append((char)_buffer.Read());
						break;
				}
			}

			return accum.ToString();
		}

		public Token ScanNumericLiteral()
		{
			Text.StringBuilder accum = new Text.StringBuilder();

			this.ScanDigits(accum);

			// optional decimal part
			if (_buffer.Peek() == '.')
			{
				_buffer.Read();

				if (_buffer.Peek() == '.')
				{
					// have encountered a range, like 3..7
					_buffer.Read();
					this.pushBackDotDot = true;
				}
				else
				{
					accum.Append('.');
					this.ScanDigits(accum);
				}
			}

			// optional exponent part
			if (_buffer.Peek() == 'E' || _buffer.Peek() == 'e')
			{
				accum.Append((char)_buffer.Read());

				if (_buffer.Peek() == '-')
				{
					accum.Append((char)_buffer.Read());
				}

				this.ScanDigits(accum);
			}

            _buffer.MarkTokenEnd(false);
			Token tt = new Token(TokenKind.NumericLiteral, new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), double.Parse(accum.ToString()));
            _buffer.DiscardToken();
			return tt;
		}

		// Expects to find digits in the input and appends them to accum
		public void ScanDigits(Text.StringBuilder accum)
		{
			if (_buffer.Peek() == -1 || !char.IsDigit((char)_buffer.Peek()))
			{
				// Invalid numeric constant, no decimal part; e.g. 3.
				_errors.Add(_sourceUnit, "expected digits", new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), 0, Severity.FatalError);
				accum.Append("0");
			}
			else
			{
				while (_buffer.Peek() != -1 && char.IsDigit((char)_buffer.Peek()))
				{
					accum.Append((char)_buffer.Read());
				}
			}
		}

		public void SkipComment()
		{
			bool shortComment;

			// Determine (destructively) whether this is a long or short comment
			if (_buffer.Peek() == '[')
			{
				_buffer.Read();

				if (_buffer.Peek() == '[')
				{
					_buffer.Read();
					shortComment = false;
				}
				else
				{
					shortComment = true;
				}
			}
			else
			{
				shortComment = true;
			}

			if (shortComment)
			{
				// skip to the end of the line
				while (_buffer.Peek() != -1 && _buffer.Peek() != '\r' && _buffer.Peek() != '\n')
				{
					_buffer.Read();
				}
			}
			else
			{
				// skip to the closing ]]
				int depth = 1;

				while (depth > 0)
				{
					switch (_buffer.Peek())
					{
						case -1:
							// Unterminated comment
							_errors.Add(_sourceUnit, "unterminated comment", new SourceSpan(_buffer.TokenStart, _buffer.TokenEnd), 0, Severity.FatalError);
							return;

						case '[':
							_buffer.Read();

							if (_buffer.Peek() == '[')
							{
								_buffer.Read();
								depth++;
							}

							break;

						case ']':
							_buffer.Read();

							if (_buffer.Peek() == ']')
							{
								_buffer.Read();
								depth--;
							}

							break;

						default:
							_buffer.Read();
							break;
					}
				}
			}
		}
	}
}
