extends Object
var identifiers = []
var constants = []
var lines = {}
var tokens = []
var path
const ENCODE_MASK = 0xFF
const ENCODE_FLAG_64 = 1 << 16
const ENCODE_FLAG_OBJECT_AS_ID = 1 << 16
enum TOKEN{
	TOKEN_BYTE_MASK = 0x80,
	TOKEN_BITS = 8,
	TOKEN_MASK = (1 << 8) - 1,
	TOKEN_LINE_BITS = 24,
	TOKEN_LINE_MASK = (1 << 8) - 1,
}

const GDScriptDecompVersion = [
	[GDScriptDecomp_f3f05dc, "     4.0 dev (f3f05dc / 2020-02-13 / Bytecode version: 13) - removed `SYNC` and `SLAVE` tokens",13],
	[GDScriptDecomp_506df14, "     4.0 dev (506df14 / 2020-02-12 / Bytecode version: 13) - removed `decimals` function" ,13],
	[GDScriptDecomp_5565f55, "3.2.0 release (5565f55 / 2019-08-26 / Bytecode version: 13) - added `ord` function" ,13],
	[GDScriptDecomp_6694c11, "     3.2 dev (6694c11 / 2019-07-20 / Bytecode version: 13) - added `lerp_angle` function" ,13],
	[GDScriptDecomp_a60f242, "     3.2 dev (a60f242 / 2019-07-19 / Bytecode version: 13) - added `posmod` function" ,13],
	[GDScriptDecomp_c00427a, "     3.2 dev (c00427a / 2019-06-01 / Bytecode version: 13) - added `move_toward` function" ,13],
	[GDScriptDecomp_620ec47, "     3.2 dev (620ec47 / 2019-05-01 / Bytecode version: 13) - added `step_decimals` function" ,13],
	[GDScriptDecomp_7f7d97f, "     3.2 dev (7f7d97f / 2019-04-29 / Bytecode version: 13) - added `is_equal_approx` and `is_zero_approx` functions" ,13],
	[GDScriptDecomp_514a3fb, "3.1.1 release (514a3fb / 2019-03-19 / Bytecode version: 13) - added `smoothstep` function" ,13],
	[GDScriptDecomp_1a36141, "3.1.0 release (1a36141 / 2019-02-20 / Bytecode version: 13) - removed `DO`, `CASE`, `SWITCH` tokens" ,13],
	[GDScriptDecomp_1ca61a3, "     3.1 beta 1 - beta 5 (1ca61a3 / 2018-10-31 / Bytecode version: 13) - added `push_error`, `push_warning` function" ,13],
	[GDScriptDecomp_d6b31da, "     3.1 dev (d6b31da / 2018-09-15 / Bytecode version: 13) - added `PUPPET` token, token `SLAVESYNC` renamed to `PUPPETSYNC`" ,13],
	[GDScriptDecomp_8aab9a0, "     3.1 dev (8aab9a0 / 2018-07-20 / Bytecode version: 13) - added `AS`, `VOID`, `FORWARD_ARROW` tokens" ,13],
	[GDScriptDecomp_a3f1ee5, "     3.1 dev (a3f1ee5 / 2018-07-15 / Bytecode version: 13) - added `CLASS_NAME` token" ,13],
	[GDScriptDecomp_8e35d93, "     3.1 dev (8e35d93 / 2018-05-29 / Bytecode version: 12) - added `REMOTESYNC`, `MASTERSYNC`, `SLAVESYNC` tokens" ,12],
	[GDScriptDecomp_3ea6d9f, "     3.1 dev (3ea6d9f / 2018-05-28 / Bytecode version: 12) - added `print_debug` function" ,12],
	[GDScriptDecomp_a56d6ff, "     3.1 dev (a56d6ff / 2018-05-17 / Bytecode version: 12) - added `get_stack` function" ,12],
	[GDScriptDecomp_ff1e7cf, "     3.1 dev (ff1e7cf / 2018-05-07 / Bytecode version: 12) - added `is_instance_valid` function" ,12],
	[GDScriptDecomp_054a2ac, "3.0.0 - 3.0.6 release (054a2ac / 2017-11-20 / Bytecode version: 12) - added `polar2cartesian`, `cartesian2polar` functions" ,12],
	[GDScriptDecomp_91ca725, "     3.0 dev (91ca725 / 2017-11-12 / Bytecode version: 12) - added `CONST_TAU` token" ,12],
	[GDScriptDecomp_216a8aa, "     3.0 dev (216a8aa / 2017-10-13 / Bytecode version: 12) - added `wrapi`, `wrapf` functions" ,12],
	[GDScriptDecomp_d28da86, "     3.0 dev (d28da86 / 2017-08-18 / Bytecode version: 12) - added `inverse_lerp`, `range_lerp` functions" ,12],
	[GDScriptDecomp_c6120e7, "     3.0 dev (c6120e7 / 2017-08-07 / Bytecode version: 12) - added `len` function" ,12],
	[GDScriptDecomp_015d36d, "     3.0 dev (015d36d / 2017-05-27 / Bytecode version: 12) - added `IS` token" ,12],
	[GDScriptDecomp_5e938f0, "     3.0 dev (5e938f0 / 2017-02-28 / Bytecode version: 12) - added `CONST_INF`, `CONST_NAN` tokens" ,12],
	[GDScriptDecomp_c24c739, "     3.0 dev (c24c739 / 2017-01-20 / Bytecode version: 12) - added `WILDCARD` token" ,12],
	[GDScriptDecomp_f8a7c46, "     3.0 dev (f8a7c46 / 2017-01-11 / Bytecode version: 12) - added `MATCH` token" ,12],
	[GDScriptDecomp_62273e5, "     3.0 dev (62273e5 / 2017-01-08 / Bytecode version: 12) - added `validate_json`, `parse_json`, `to_json` function" ,12],
	[GDScriptDecomp_8b912d1, "     3.0 dev (8b912d1 / 2017-01-08 / Bytecode version: 11) - added `DOLLAR` token" ,11],
	[GDScriptDecomp_23381a5, "     3.0 dev (23381a5 / 2016-12-17 / Bytecode version: 11) - added `ColorN` functio" ,11],
	[GDScriptDecomp_513c026, "     3.0 dev (513c026 / 2016-10-03 / Bytecode version: 11) - added `char` function" ,11],
	[GDScriptDecomp_4ee82a2, "     3.0 dev (4ee82a2 / 2016-08-27 / Bytecode version: 11) - added `ENUM` token" ,11],
	[GDScriptDecomp_1add52b, "     3.0 dev (1add52b / 2016-08-19 / Bytecode version: 11) - added `REMOTE`, `SYNC`, `MASTER`, `SLAVE` tokens" ,11],
	[GDScriptDecomp_ed80f45, "2.1.3 - 2.1.6 release (ed80f45 / 2017-04-06 / Bytecode version: 10) - added `ENUM` token (backport)" ,10],
	[GDScriptDecomp_85585c7, "2.1.2 release (85585c7 / 2017-01-12 / Bytecode version: 10) - added `ColorN` function (backport)" ,10],
	[GDScriptDecomp_7124599, "2.1.0 - 2.1.1 release (7124599 / 2016-06-18 / Bytecode version: 10) - added `type_exists` function" ,10],
	[GDScriptDecomp_23441ec, "2.0.0 - 2.0.4-1 release (23441ec / 2016-01-02 / Bytecode version: 10) - added `var2bytes`, `bytes2var` functions" ,10],
	[GDScriptDecomp_6174585, "     2.0 dev (6174585 / 2016-01-02 / Bytecode version: 9) - added `CONST_PI` token" ,9],
	[GDScriptDecomp_64872ca, "     2.0 dev (64872ca / 2015-12-31 / Bytecode version: 8) - added `Color8` function" ,8],
	[GDScriptDecomp_7d2d144, "     2.0 dev (7d2d144 / 2015-12-29 / Bytecode version: 7) - added `BREAKPOINT` token" ,7],
	[GDScriptDecomp_30c1229, "     2.0 dev (30c1229 / 2015-12-28 / Bytecode version: 6) - added `ONREADY` token" ,6],
	[GDScriptDecomp_48f1d02, "     2.0 dev (48f1d02 / 2015-06-24 / Bytecode version: 5) - added `SIGNAL` token" ,5],
	[GDScriptDecomp_65d48d6, "1.1.0 release (65d48d6 / 2015-05-09 / Bytecode version: 4) - added `prints` function" ,4],
	[GDScriptDecomp_be46be7, "     1.1 dev (be46be7 / 2015-04-18 / Bytecode version: 3) - function `get_inst` renamed to `instance_from_id`" ,3],
	[GDScriptDecomp_97f34a1, "     1.1 dev (97f34a1 / 2015-03-25 / Bytecode version: 3) - added `seed`, `get_inst` function" ,3],
	[GDScriptDecomp_2185c01, "     1.1 dev (2185c01 / 2015-02-15 / Bytecode version: 3) - added `var2str`, `str2var` functions" ,3],
	[GDScriptDecomp_e82dc40, "1.0.0 release (e82dc40 / 2014-10-27 / Bytecode version: 3) - added `SETGET` token" ,3],
	[GDScriptDecomp_8cab401, "     1.0 dev (8cab401 / 2014-09-15 / Bytecode version: 2) - added `YIELD` token" ,2],
	[GDScriptDecomp_703004f, "     1.0 dev (703004f / 2014-06-16 / Bytecode version: 2) - added `hash` function" ,2],
	[GDScriptDecomp_31ce3c5, "     1.0 dev (31ce3c5 / 2014-03-13 / Bytecode version: 2) - added `funcref` function" ,2],
	[GDScriptDecomp_8c1731b, "     1.0 dev (8c1731b / 2014-02-15 / Bytecode version: 2) - added `load` function" ,2],
	[GDScriptDecomp_0b806ee, "     1.0 dev (0b806ee / 2014-02-09 / Bytecode version: 1) - initial version",1]
]
func _init(filePath:String):
	self.path=filePath
	self.parse()
	
func getNextObjLength(file:File):
	var pos = file.get_position()
	var type_len=4
	var type = decode_uint32(file.get_buffer(type_len))
	var r_length=type_len
	var r_var=null
	match type & ENCODE_MASK:
		TYPE_NIL:
			r_length += 0
		TYPE_BOOL:
			r_length += 4
		TYPE_INT:
			if type & ENCODE_FLAG_64:
				r_length += 8
			else:
				r_length += 4
		TYPE_REAL:
			if type & ENCODE_FLAG_64:
				r_length += 8
			else:
				r_length += 4
		TYPE_STRING:
			r_length += 4
			r_length += decode_variant(file.get_buffer(4));
		TYPE_VECTOR2:
			r_length += 4 * 2
		TYPE_RECT2:
			r_length += 4 * 4
		TYPE_VECTOR3:
			r_length += 4 * 3
		TYPE_TRANSFORM2D:
			r_length += 4 * 6
		TYPE_PLANE:
			r_length += 4 * 4
		TYPE_QUAT:
			r_length += 4 * 4
		TYPE_AABB:
			r_length += 4 * 4
		TYPE_BASIS:
			r_length += 4 * 9
		TYPE_TRANSFORM:
			r_length += 4 * 12
		TYPE_COLOR:
			r_length += 4 * 4
		TYPE_NODE_PATH:
			var strlen =  decode_variant(file.get_buffer(4))
			strlen &= 0x7FFFFFFF
			var namecount = strlen
			var subnamecount = decode_variant(file.get_buffer(4))
			var flags = decode_variant(file.get_buffer(4))
			var total = namecount + subnamecount
			r_length += 12
			for i in range(total):
				r_length += 4
				r_length += decode_variant(file.get_buffer(4))
		TYPE_RID:
			r_length += 0
		TYPE_OBJECT:
			if type & ENCODE_FLAG_OBJECT_AS_ID :
				r_length += 8
			else:
				var strlen = decode_variant(file.get_buffer(4))
				r_length += 4
				r_length += strlen
				var strval = decode_variant(file.get_buffer(strlen))
				print("TYPE_OBJECT:",strval)
				if strval!="":
					#var count = decode_uint32(file)
					r_length += 4
	file.seek(pos)
	return r_length
	
func decode_uint32(buffer:PoolByteArray):
	var u = 0
	for i in range(4):
		var b = buffer[i]
		b<<=i*8
		u |=b
	return u
		
func decode_variant(buffer:PoolByteArray):
	var base64 = Marshalls.raw_to_base64(buffer)
	return Marshalls.base64_to_variant(base64)
		
func decode_uint64(buffer:PoolByteArray):
	var u = 0
	for i in range(8):
		var b = buffer[i] & 0xff
		b <<= (i*8)
		u |= b
	return u
func decode_double(buffer:PoolByteArray):
	pass
		
func appendCode(code,p_code):
	if !p_code.ends_with(" "):
		p_code += " "
	p_code += code
	return p_code

func get_constant_string(constId):
	var constString = constants[constId] .get_construct_string()
	print(constString)
	return constString
func parse():
	var file:File = File.new()
	var  err = file.open(self.path, File.READ)
	if err != OK:
		print("ERROR: %s open failed,error code %d" % [self.path,err])
		file.close()
		return false
	var total_len = file.get_len()
	var magic:String = file.get_buffer(4).get_string_from_utf8()
	if magic!="GDSC":
		print("ERROR %s is not gdc file",[self.path])
		file.close()
	var version = decode_uint32(file.get_buffer(4))
	print("gdc version:",total_len,":","version:",version)
	var identifier_count = decode_uint32(file.get_buffer(4))
	var constant_count = decode_uint32(file.get_buffer(4))
	var line_count = decode_uint32(file.get_buffer(4))
	var token_count = decode_uint32(file.get_buffer(4))
	print(identifier_count)
	for i in range(identifier_count):
		var b_len = decode_uint32(file.get_buffer(4))
		var cs=[]
		for j in range(b_len):
			var buff = file.get_buffer(1)
			cs.append(buff[0] ^ 0xb6)
		cs.append(0)
		var s = PoolByteArray(cs).get_string_from_utf8()
		identifiers.append(s)
	for i in range(constant_count):		
		var position = file.get_position()
		var buffer = file.get_buffer(file.get_len()-file.get_position())
		var stream = StreamPeerBuffer.new()
		stream.put_32(buffer.size())
		stream.put_data(buffer)
		stream.seek(0)
		var obj = stream.get_var()
		var bytes=var2bytes(obj)
		file.seek(bytes.size()+position)
		constants.append(obj)
	print("line_count:",line_count,"constant_count:",constant_count,"pos:",file.get_position())
	for i in range(line_count):
		var token = decode_uint32(file.get_buffer(4))
		var linecol = decode_uint32(file.get_buffer(4))
		lines[token]=linecol
	for i in range(token_count):
		var pos = file.get_position()
		var header = file.get_buffer(1)
		file.seek(pos)
		if header[0] & TOKEN.TOKEN_BYTE_MASK:
			#litten endian		
			tokens.append(decode_uint32(file.get_buffer(4)) & ~TOKEN.TOKEN_BYTE_MASK)
		else:
			tokens.append(file.get_buffer(1)[0])
		print("iii:",i," pos:",pos," d:",header[0]," head:",header[0] & TOKEN.TOKEN_BYTE_MASK," mask:",TOKEN.TOKEN_BYTE_MASK,"v:",tokens[i])
	file.close()
	
func format(func_names,token):
	var line=""
	var script_text=""
	var indent = 0
	var prev_token= token.TK_NEWLINE
	for i in (tokens.size()):
		print("token: ",i," : ",tokens[i] & TOKEN.TOKEN_MASK)
		match tokens[i] & TOKEN.TOKEN_MASK:
			token.TK_EMPTY:
				pass
			token.TK_CONSTANT:
				var constant = tokens[i] >> TOKEN.TOKEN_BITS
				line =line + get_constant_string(constant)
			token.TK_IDENTIFIER:
				var identifier = tokens[i] >> TOKEN.TOKEN_BITS
				line += str(identifiers[identifier])
			token.TK_SELF:
				line += "self"
			token.TK_BUILT_IN_TYPE:
				line += "type "
			token.TK_BUILT_IN_FUNC:
				line += func_names[tokens[i] >> TOKEN.TOKEN_BITS]
			token.TK_OP_IN:
				line = appendCode("in ",line)
			token.TK_OP_EQUAL:
				line = appendCode("== ",line)
			token.TK_OP_NOT_EQUAL:
				line = appendCode("!= ",line)
			token.TK_OP_LESS:
				line = appendCode("< ",line)
			token.TK_OP_LESS_EQUAL:
				line = appendCode("<= ",line)
			token.TK_OP_GREATER:
				line = appendCode("> ",line)
			token.TK_OP_GREATER_EQUAL:
				line = appendCode(">= ",line)
			token.TK_OP_AND:
				line = appendCode("and ",line)
			token.TK_OP_OR:
				line = appendCode("or ",line)
			token.TK_OP_NOT:
				line = appendCode("not ",line)
			token.TK_OP_ADD:
				line = appendCode("+ ",line)
			token.TK_OP_SUB:
				line = appendCode("- ",line)
			token.TK_OP_MUL:
				line = appendCode("* ",line)
			token.TK_OP_DIV:
				line = appendCode("/ ",line)
			token.TK_OP_MOD:
				line = appendCode("% ",line)
			token.TK_OP_SHIFT_LEFT:
				line = appendCode("<< ",line)
			token.TK_OP_SHIFT_RIGHT:
				line = appendCode(">> ",line)
			token.TK_OP_ASSIGN:
				line = appendCode("= ",line)
			token.TK_OP_ASSIGN_ADD:
				line = appendCode("+= ",line)
			token.TK_OP_ASSIGN_SUB:
				line = appendCode("-= ",line)
			token.TK_OP_ASSIGN_MUL:
				line = appendCode("*= ",line)
			token.TK_OP_ASSIGN_DIV:
				line = appendCode("/= ",line)
			token.TK_OP_ASSIGN_MOD:
				line = appendCode("%= ",line)
			token.TK_OP_ASSIGN_SHIFT_LEFT:
				line = appendCode("<<= ",line)
			token.TK_OP_ASSIGN_SHIFT_RIGHT:
				line = appendCode(">>= ",line)
			token.TK_OP_ASSIGN_BIT_AND:
				line = appendCode("&= ",line)
			token.TK_OP_ASSIGN_BIT_OR:
				line = appendCode("|= ",line)
			token.TK_OP_ASSIGN_BIT_XOR:
				line = appendCode("^= ",line)
			token.TK_OP_BIT_AND:
				line = appendCode("& ",line)
			token.TK_OP_BIT_OR:
				line = appendCode("| ",line)
			token.TK_OP_BIT_XOR:
				line = appendCode("^ ",line)
			token.TK_OP_BIT_INVERT:
				line = appendCode("! ",line)
			token.TK_CF_IF:
				if prev_token != token.TK_NEWLINE:
					line = appendCode("if ",line)
				else:
					line += "if "
			token.TK_CF_ELIF:
				line += "elif "
			token.TK_CF_ELSE:
				if prev_token != token.TK_NEWLINE:
					line = appendCode("else ",line)
				else:
					line += "else "
			token.TK_CF_FOR:
				line += "for "
			token.TK_CF_DO:
				line += "do "
			token.TK_CF_WHILE:
				line += "while "
			token.TK_CF_SWITCH:
				line += "swith "
			token.TK_CF_CASE:
				line += "case "
			token.TK_CF_BREAK:
				line += "break"
			token.TK_CF_CONTINUE:
				line += "continue"
			token.TK_CF_PASS:
				line += "pass"
			token.TK_CF_RETURN:
				line += "return "
			token.TK_PR_FUNCTION:
				line += "func "
			token.TK_PR_CLASS:
				line += "class "
			token.TK_PR_EXTENDS:
				line += "extends "
			token.TK_PR_TOOL:
				line += "tool "
			token.TK_PR_STATIC:
				line += "static "
			token.TK_PR_EXPORT:
				line += "export "
			token.TK_PR_CONST:
				line += "const "
			token.TK_PR_VAR:
				line += "var "
			token.TK_PR_PRELOAD:
				line += "preload"
			token.TK_PR_ASSERT:
				line += "assert"
			token.TK_BRACKET_OPEN:
				line += "["
			token.TK_BRACKET_CLOSE:
				line += "]"
			token.TK_CURLY_BRACKET_OPEN:
				line += "{"
			token.TK_CURLY_BRACKET_CLOSE:
				line += "}"
			token.TK_PARENTHESIS_OPEN:
				line += "("
			token.TK_PARENTHESIS_CLOSE:
				line += ")"
			token.TK_COMMA:
				line += ", "
			token.TK_SEMICOLON:
				line += ";"
			token.TK_PERIOD:
				line += "."
			token.TK_QUESTION_MARK:
				line += "?"
			token.TK_COLON:
				line += ":"
			token.TK_NEWLINE:
				for j in range(indent):
					script_text += "\t"
				script_text += line + "\n"
				line = ""
				indent = tokens[i] >> TOKEN.TOKEN_BITS
	
	if !line.empty():
		for j in range(indent):
			script_text += "\t"
		script_text += line + "\n"
	
	#print("code:",script_text)
