package jp.millibit.json
{
	import flash.utils.Dictionary;

	public class Tokenizer
	{
		include "tt.as";

		private static var _patterns:Dictionary;

		private static var _ws:Pattern;

		internal static function init():void
		{
			var p:Dictionary = new Dictionary();
			p[BEGIN_ARRAY] = new Pattern("[");
			p[BEGIN_OBJECT] = new Pattern("{");
			p[END_ARRAY] = new Pattern("]");
			p[END_OBJECT] = new Pattern("}");
			p[FALSE] = new Pattern("false");
			p[NAME_SEPARATOR] = new Pattern(":");
			p[NULL] = new Pattern("null");
			p[TRUE] = new Pattern("true");
			p[VALUE_SEPARATOR] = new Pattern(",");
			p[NUMBER] = new Pattern(null, /^-?([0-9]|([1-9][0-9]+))++((\.[0-9]+)|([eE][+\-]?[0-9]+)|(\.[0-9]+[eE][+\-]?[0-9]+))?/, "0123456789.-+eE");

			var escapeChar:String = "(\\\\([\"/nbftr\\\\]|u[a-fA-F0-9]{4}))";
			var unescapeChar:String = "([ !#-\\[\\]-" + String.fromCharCode(0xffff) + "])";
			var character:String = "(" + escapeChar + "|" + unescapeChar + ")";
			var quote:String = "\"";
			p[STRING] = new Pattern(null, new RegExp("^" + quote + character + "*" + quote), "\"");

			_patterns = p;
			_ws = new Pattern(null, /^[ \t\n\r]+/, " \t\r\n");
		}

		private var _column:int;

		private var _buf:String;

		private var _len:int;

		public function Tokenizer(json:String)
		{
			_buf = json;
			_column = 1;
			_len = _buf.length;
		}

		public function get column():int
		{
			return _column;
		}

		public function error(message:String):JsonError
		{
			return new JsonError(message, column);
		}

		public function next(tokenTypes:Array):Token
		{
			var result:Object,
				chars:String,
				pLen:int = tokenTypes.length,
				len:int,
				value:String,
				i:int,
				pattern:Pattern,
				ch:String,
				type:int;

			if (_len === 0)
			{
				return null;
			}

			ch = _buf.charAt(0);
			if (_ws.pre.hasOwnProperty(ch))
			{
				result = _ws.pattern.exec(_buf);
				len = result[0].length;
				_buf = _buf.substring(len);
				_len -= len;
				_column += len;
				
				return next(tokenTypes);
			}

			for each (type in tokenTypes)
			{
				pattern = _patterns[type];
				if (pattern.regexp)
				{
					if (!pattern.pre.hasOwnProperty(ch))
					{
						continue;
					}
					result = pattern.pattern.exec(_buf);
					if (result === null)
					{
						continue;
					}

					value = result[0];
					len = value.length;
				}
				else
				{
					chars = pattern.chars;
					len = pattern.len;
					if (len === 1 && ch !== chars)
					{
						continue;
					}
					else if (_buf.indexOf(chars) !== 0)
					{
						continue;
					}

					value = chars;
				}

				_buf = _buf.substring(len);
				_len -= len;
				_column += len;

				return new Token(type, value);
			}

			throw error("Invalid JSON format.");
		}
	}

	Tokenizer.init();
}

class Pattern
{

	public var regexp:Boolean;

	public var pattern:RegExp;

	public var chars:String;

	public var pre:Object;

	public var len:int;

	public function Pattern(chars:String, pattern:RegExp = null, pre:String = null)
	{
		this.pattern = pattern;
		this.chars = chars;
		this.regexp = pattern is RegExp;
		this.len = chars ? chars.length : 0;

		if (pre !== null)
		{
			this.pre = {};

			for (var i:int = 0, l:int = pre.length; i < l; i++)
			{
				this.pre[pre.charAt(i)] = null;
			}
		}
	}
}
