import json
import new
import copy
from types import *

import __main__ as m

__doc__ = """TurboJSON module

TurboJSON will store any object (with restrictions for modules for now) and that object  
can be restored in almost any environment, since it is reconstructed from scratch. 

If the function which is going to be stored (be it alone or part of another object) requires 
functions/objects which are imported (but not modules), they will not be stored. If so, provide them with  
global_attributes optional parameter (should be a dict). The import modules used will be reimported without. 

Protocol implementation:

1 - will use pure JSON 

The main restirction is that keys for dicts can be only strings or unicode strings

2 - will use pickle, when needed

The restriction for keys for dictionaries is gone (but it must be a pickable object AND it should be 
accessible from the globals scope)

"""


class CannotBePackedException(TypeError):
	pass

class CannotBeUnpackedException(TypeError):
	pass

class CannotBePacked(TypeError):
	pass

class TurboJsonWarning(RuntimeWarning):
	pass

class UnpackingError(RuntimeError):
	pass


class TurboJSON(object):
	
	class Foo(object):
		def __init__(self, x):
			pass
		
	unpackable = [GetSetDescriptorType, DictProxyType]
	unpackable_attribs = (
						"wrapper_descriptor",
						"member_descriptor",
						)
	
	def __init__(self):
		self.is_module = False
		d = {}
		exec "pass" in d
		self._b_copy = d["__builtins__"] 
		
		self.req_kwargs = {}
		self.req_args = ()
		
	
	def _cleanup(self, final=False):
		self.module_lookup = {"__main__": m}
		self.module_stack = []
		self.glob_att = {}
		self.protocol = 1
		if final:
			self.req_kwargs = {}
			self.req_args = ()
			
	
	def required_keywords(self, kw):
		self.req_kwargs = kw
		
	
	def set_required(self, args):
		self.req_args = args
		
		
	def check_globals(self, required):
		for r in required:
			if r not in self.req_kwargs:
				raise UnpackingError("Cannot unpack object! %s required global missing!"%r)
		
		
	def dumps(self, obj, protocol, *args, **kwargs):
		self._cleanup()
		
		self.protocol = protocol
		if self.protocol > 1:
			self.pickle = __import__("pickle")
		
		build = []
		build.append("TJ")
		build.append(self.protocol)
		
		if self.protocol > 2:
			build.append(self.req_args)
		
		build.append(self.parse_item(obj, 0))
		
		data = json.dumps(build, *args, **kwargs)
		self._cleanup(True)
		return data
	
	
	def loads(self, obj, protocol, global_attributes, *args, **kwargs):
		self._cleanup()
		datablock = json.loads(obj, *args, **kwargs)
		self.protocol = protocol
		
		if datablock[0] != u"TJ":
			raise UnpackingError("Wrong Header")
		elif datablock[1] > self.protocol:
			raise UnpackingError("Protocol specified is lower than the string requires")
		
		if protocol > 1:
			self.pickle = __import__("pickle")
		
		self.glob_att = global_attributes

		if protocol > 2:
			self.check_globals(datablock[2])
			
		
		obj = self.parse_object(datablock[-1])
		
		self._cleanup(True)
		return obj
	
	
	def parse_object(self, block, is_str=False, cls=None):
		object = None
		
		if is_str:
			if self.protocol > 1:
				try:
					object = self.pickle.loads(str(block))
				except Exception, e:
					return block
				else:
					return object
			else:
				return block
				
		
		interface, content = block
		
		# base json
		if interface == "json":
			object = content
		# base pickle
		elif interface == "pickle":
			object = self.pickle.loads(content)
		# base pritimitives
		elif interface == "str":
			object = str(content).decode("base64")
		elif interface == "long":
			object = long(content)
		# containers
		else:
			if interface == "list":
				object = []
				for obj in content:
					object.append(self.parse_object(obj))
			elif interface == "tuple":
				object = []
				for obj in content:
					object.append(self.parse_object(obj))
				object = tuple(object)
			elif interface == "set":
				object = []
				for obj in content:
					object.append(self.parse_object(obj))
				object = set(object)
			elif interface == "frozenset":
				object = []
				for obj in content:
					object.append(self.parse_object(obj))
				object = frozenset(object)
			elif interface == "dict":
				object = {}
				for obj in content:
					idata = self.parse_object(obj[0], is_str=True)
					ddata = self.parse_object(obj[1], cls=cls)
					object[idata] = ddata
			# or some high level shit
			else:
				# code object
				if interface == "code":
					argcount = self.parse_object(content[0])
					nlocals = self.parse_object(content[1])
					stacksize = self.parse_object(content[2])
					flags = self.parse_object(content[3])
					code = self.parse_object(content[4])
					consts = []
					for const in content[5]:
						consts.append(self.parse_object(const))
					consts = tuple(consts)
					names = []
					for name in content[6]:
						names.append(self.parse_object(name))
					names = tuple(names)
					vnames = []
					for vname in content[7]:
						vnames.append(self.parse_object(vname))
					vnames = tuple(vnames)
					filename = self.parse_object(content[8])
					name = self.parse_object(content[9])
					flineno = self.parse_object(content[10])
					lnotab = self.parse_object(content[11])
					
					object = new.code(argcount, nlocals, stacksize, flags, code, consts, names, vnames,
									filename, name, flineno, lnotab)
				# function
				elif interface == "func":
					code = self.parse_object(content[0])
					module = self.parse_object(content[1])
					globals = {}
					if len(self.module_stack) > 0:
						globals = self.module_stack[-1].__dict__
					elif module in self.module_lookup:
						globals = self.module_lookup[module].__dict__
					else:
						try:
							mod = __import__(module)
						except ImportError:
							globals = self._b_copy
						else:
							globals = mod.__dict__
					global_imports = content[2]
					
					for mod in global_imports:
						mod = __import__(self.parse_object(mod))
						if mod not in globals.values():
							globals[mod.__name__] = mod
							
					for ga in self.glob_att:
						gat = self.glob_att[ga]
						if hasattr(gat, "__name__") and (gat.__name__ not in globals):
							globals[gat.__name__] = gat
					
					print self.protocol
					if self.protocol > 2:
						for attr in self.req_kwargs:
							globals[attr] = self.req_kwargs[attr]
					
						
					
					name = self.parse_object(content[3])
					defaults = self.parse_object(content[4])
					closure = self.parse_object(content[5])
					object = new.function(code, globals, name, defaults, closure)
					object.__dict__ = self.parse_object(content[6])
				# method
				elif interface == "meth":
					func = self.parse_object(content[0])
					klass = None
					if cls is not None:
						klass = cls
					else:
						module = self.parse_object(content[1])
						cls_name = self.parse_object(content[2])
						if module in self.module_lookup:
							klass = self.module_lookup[module].__dict__[cls_name]
						else:
							try:
								mod = __import__(module)
								klass = getattr(mod, cls_name)
							except ImportError, AttributeError:
								if cls_name in self._b_copy:
									klass = getattr(self._b_copy, cls_name)
								else:
									raise CannotBeUnpackedException("Cannot find %s.%s class for unbound/bound method"%(module, cls_name))
					object = new.instancemethod(func, None, klass)
				# old style class
				elif interface == "class":
					cls_name = self.parse_object(content[0])
					bases = tuple(self.parse_object(content[1]))
					static_dict = self.parse_object(content[2])
					klass = new.classobj(cls_name, bases, static_dict)
					methods = self.parse_object(content[3], cls=klass)
					for method in methods:
						setattr(klass, method, methods[method])
					object = klass
				# new style class
				elif interface == "new_class":
					cls_name = self.parse_object(content[0])
					bases = tuple(self.parse_object(content[1]))
					static_dict = self.parse_object(content[2])
					klass = type.__new__(type, cls_name, bases, static_dict)
					methods = self.parse_object(content[3], cls=klass)
					for method in methods:
						setattr(klass, method, methods[method])
					object = klass
				# instance, old style
				elif interface == "inst":
					klass = self.parse_object(content[0])
					dict = self.parse_object(content[1])
					
					object = new.instance(klass, dict)
				# instance, new style
				elif interface == "new_inst":
					klass = self.parse_object(content[0])
					dict = self.parse_object(content[1])
					
					object = klass.__new__(klass)
					for obj in dict:
						object.__dict__[obj] = dict[obj]
				# builtin object
				elif interface == "builtin":
					bname = self.parse_object(content)
					object = self._b_copy[bname]
				
				else:
					raise CannotBeUnpackedException("%s is not a valid TurboJSON identifier"%interface)
		return object
	
	
	def parse_item(self, obj, level, need_str=False, cls=None):
		build = None
		fix = ""
		
		if need_str:
			if self.protocol > 1:
				# pure json not needed
				try:
					string = self.pickle.dumps(obj)
				except:
					raise CannotBePackedException("%s cannot be converted to a valid string"%obj)
				else:
					return string
			else:
				# pure json
				# will fail on non strings
				if (not isinstance(obj, str)) and (not isinstance(obj, unicode)):
					raise CannotBePackedException("%s is not a valid json dict handler. Try raising protocol to use pickle instead."%obj)
				return obj
			
		
		# is object json?
		if obj is None:
			build = ["json", obj]
		elif obj is True:
			build = ["json", obj]
		elif obj is False:
			build = ["json", obj]
		elif isinstance(obj, str) and type(obj.__class__) == TypeType:
			build = ["str", obj.encode("base64")]
			fix = obj
		elif isinstance(obj, unicode) and type(obj.__class__) == TypeType:
			build = ["json", obj]
		elif isinstance(obj, int) and type(obj.__class__) == TypeType:
			build = ["json", obj]
		elif isinstance(obj, long) and type(obj.__class__) == TypeType:
			build = ["long", obj]
		elif isinstance(obj, float) and type(obj.__class__) == TypeType:
			build = ["json", obj]
		# or maybe its base container?
		else:
			if isinstance(obj, list) and type(obj.__class__) == TypeType:
				data = []
				for item in obj:
					data.append(self.parse_item(item, level+1))
				build = ["list", data]
			elif isinstance(obj, tuple) and type(obj.__class__) == TypeType:
				data = []
				for item in obj:
					data.append(self.parse_item(item, level+1))
				build = ["tuple", data]
			elif isinstance(obj, set) and type(obj.__class__) == TypeType:
				data = []
				for item in obj:
					data.append(self.parse_item(item, level+1))
				build = ["set", data]
			elif isinstance(obj, frozenset) and type(obj.__class__) == TypeType:
				data = []
				for item in obj:
					data.append(self.parse_item(item, level+1))
				build = ["frozenset", data]
			elif isinstance(obj, dict) and type(obj.__class__) == TypeType:
				if len(obj) > 0:
					new_dict = []
					for item in obj:
						idata = self.parse_item(item, level+1, True, cls=cls)
						if cls:
							try:
								data = self.parse_item(obj[item], level+1, cls=cls)
							except CannotBePackedException, PickleError:
								continue
						else:
							data = self.parse_item(obj[item], level+1, cls=cls)
						new_dict.append([idata, data])
					build = ["dict", new_dict]
				else:
					build = ["json", {}]
			# nah, its something weirder, so lets try all
			else:
				# code object
				if type(obj) == CodeType:
					code = []
					code.append(self.parse_item(obj.co_argcount, level))
					code.append(self.parse_item(obj.co_nlocals, level))
					code.append(self.parse_item(obj.co_stacksize, level))
					code.append(self.parse_item(obj.co_flags, level))
					code.append(self.parse_item(obj.co_code, level))
					consts = []
					for const in obj.co_consts:
						consts.append(self.parse_item(const, level+1))
					code.append(consts)
					names = []
					for name in obj.co_names:
						names.append(self.parse_item(name, level+1))
					code.append(names)
					vnames = []
					for vname in obj.co_varnames:
						vnames.append(self.parse_item(vname, level+1))
					code.append(vnames)
					code.append(self.parse_item(obj.co_filename, level))
					code.append(self.parse_item(obj.co_name, level))
					code.append(self.parse_item(obj.co_firstlineno, level))
					code.append(self.parse_item(obj.co_lnotab, level))
					build = ["code", code]
				# function
				elif type(obj) == FunctionType:
					func = []
					func.append(self.parse_item(obj.__code__, level))
					func.append(self.parse_item(obj.__module__, level))
					
					global_refs = []
					for globb in obj.func_globals:
						glob = obj.func_globals[globb]
						if type(glob) == ModuleType:
							global_refs.append(self.parse_item(glob.__name__, level+1))
					
					func.append(global_refs)
					
					func.append(self.parse_item(obj.func_name, level))
					func.append(self.parse_item(obj.func_defaults, level))
					func.append(self.parse_item(obj.func_closure, level))
					func.append(self.parse_item(obj.__dict__, level+1))
					build = ["func", func]
				# method
				elif type(obj) == MethodType:
					meth = []
					meth.append(self.parse_item(obj.im_func, level))
					if cls is None:
						meth.append(self.parse_item(obj.im_class.__module__, level))
						meth.append(self.parse_item(obj.im_class.__name__, level))
					build = ["meth", meth]
				# class object (old style/new style)
				elif (type(obj) == ClassType) or (type(obj) == TypeType):
					if obj in self._b_copy.itervalues():
						# it is a builtin object
						bname = ""
						for key in self._b_copy:
							if self._b_copy[key] is obj:
								bname = key
						build = ["builtin", self.parse_item(bname, level)]
					else:
						klass = []
						klass.append(self.parse_item(obj.__name__, level))
						
						klass.append(self.parse_item(obj.__bases__, level))
						
						static = {}
						methods = {}
						
						for item in obj.__dict__:
							if type(obj.__dict__[item]) != FunctionType:
								static[item] = obj.__dict__[item]
							else:
								methods[item] = obj.__dict__[item]
						
						klass.append(self.parse_item(static, level+1, cls=True))
						klass.append(self.parse_item(methods, level+1, cls=True))
						if type(obj) == ClassType:
							build = ["class", klass]
						else:
							build = ["new_class", klass]
				elif (type(obj) == InstanceType) or (type(obj) is obj.__class__):
					if (type(obj) in self.unpackable) or (hasattr(obj, "__name__") and obj.__name__ in self.unpackable_attribs):
						raise CannotBePackedException("internal")
					inst = []
					inst.append(self.parse_item(obj.__class__, level+1))
					inst.append(self.parse_item(obj.__dict__, level+1))
					if type(obj) == InstanceType:
						build = ["inst", inst]
					else:
						build = ["new_inst", inst]
				else:
					# undefined, whatever
					if self.protocol < 2:
						raise CannotBePackedException("%s cannot be packed in pure JSON. Try raising protocol"%obj)
					else:
						build = ["pickle", self.pickle.dumps(obj)]
		return build
			


def dumps_proxy(dumpsf):
	def wrapper(obj, protocol=1, *args, **kwargs):
		if protocol > 2:
			class DumpsProxy(object):
				def __call__(self, *req_args):
					turbojson.set_required(req_args)
					return dumpsf(obj, protocol, args, kwargs)
			return DumpsProxy()
		else:
			return dumpsf(obj, protocol, *args, **kwargs)
	return wrapper
			

def dump_proxy(dumpf):
	def wrapper(obj, file, protocol=1, *args, **kwargs):
		if protocol > 2:
			class DumpProxy(object):
				def __call__(self, *req_args):
					turbojson.set_required(req_args)
					return dumpf(obj, protocol, args, kwargs)
			return DumpProxy
		else:
			return dumpf(obj, file, protocol, *args, **kwargs)
	return wrapper
			
	

def loads_proxy(loadsf):
	def wrapper(stream, protocol=1, glob={}, *args, **kwargs):
		if protocol > 2:
			class LoadsProxy(object):
				def __call__(self, **req_kw):
					turbojson.required_keywords(req_kw)
					return loadsf(stream, protocol, glob, *args, **kwargs)
			return LoadsProxy()
		else:
			return loadsf(stream, protocol, glob, *args, **kwargs)
	return wrapper
			

def load_proxy(loadf):
	def wrapper(file, protocol=1, glob={}, *args, **kwargs):
		if protocol > 2:
			class LoadProxy(object):
				def __call__(self, **req_kw):
					turbojson.required_keywords(req_kw)
					return loadf(file, protocol, glob, *args, **kwargs)
			return LoadProxy()
		else:
			return loadf(file, protocol, glob, *args, **kwargs)
	return wrapper
			

			
	

@dumps_proxy
def dumps(obj, protocol=1, *args, **kwargs):
	"""dumps the object to the string.
	
	obj is object to be dumped, protocol is the turbojson protocol (int)
	
	you can modify json attribs with args and kwargs
	"""
	return turbojson.dumps(obj, protocol, *args, **kwargs)
	
@loads_proxy
def loads(stream, protocol=1, global_attributes={}, *args, **kwargs):
	"""restores object from string
	
	stream is string, protocol is turbojson protocol (int) and global_attributes is a dict of  
	globals you want to assign to the functions that will be restored for the object.
	
	you can modify json attribs with args and kwargs
	"""
	return turbojson.loads(stream, protocol, global_attributes, *args, **kwargs)


@dump_proxy
def dump(obj, file, protocol=1, *args, **kwargs):
	"""dumps the object to the file.
	
	obj is object to be dumped, file is file like object, protocol is the turbojson protocol (int)
	
	you can modify json attribs with args and kwargs
	"""
	data = turbojson.dumps(obj, protocol, *args, **kwargs)
	file.write(data)
	file.close()

@load_proxy	
def load(file, protocol=1, global_attributes={}, *args, **kwargs):
	"""restores object from file
	
	file is file like object, protocol is turbojson protocol (int) and global_attributes is a dict of  
	globals you want to assign to the functions that will be restored for the object.
	
	you can modify json attribs with args and kwargs
	"""
	stream = file.read()
	file.close()
	return turbojson.loads(stream, protocol, global_attributes, *args, **kwargs)


turbojson = TurboJSON()

if __name__ == "__main__":
	import os
	
	class B(object):
		def a(self):
			print os.path
			print "test"
			
	
	f = dumps(B)
	del os
	print f
	X = loads(f)
	
	a = X()
	
	d = dumps(a)
	print d
	
	k = loads(d)
	k.a()
	