#!/usr/bin/python

# For debugging
import pdb

# General Imports
import string
import os
import hashlib
import types
import glob
import re
import platform
import sys
import argparse

# Concurency Imports
import Queue
import multiprocessing
import threading

class NoDescriptorFound(Exception):
	"""Thrown when requested descriptor does not exist"""
	pass

class DescriptorSyntaxException(Exception):
	"""Thrown when descriptor does not meet specifications"""
	pass

class CircularChildDescriptorException(Exception):
	"""Thrown when attempting to load the same descriptor twice"""
	pass

class AutoCleanComplete(Exception):
	"""Thrown when auto complete is complete in SQuad.EvalSources"""
	pass

class NothingToDo(Exception):
	"""Thrown when there is nothing to do."""
	pass

class SQuadStdLib():
	def Glob(self, template, **kwargs):
		glob_str = self.Eval(template, **kwargs)
		glob_str = glob_str.format(**self._eval())
		return glob.glob(glob_str)

	def JoinStrList(self, list, delim = " "):
		if len(list) == 0:
			return ""

		str = list[0]
		for i in list[1:]:
			str = str + delim + i

		return str

	def JoinDicts(self, *dicts):
		resulting_dict = {}
		
		for d in dicts:
			resulting_dict = dict(resulting_dict.items() + d.items())
		return resulting_dict
	
	def Brief(self, prefix, product):
		if self._args.print_briefs:
			print "%s %s" % (prefix, os.path.basename(product))

	@property
	def ThisFile(self):
		return 1

	@property
	def ExecFile(self):
		return 2

	@property
	def CSrcToObjTmpl(self):
		template = "{CC} {CPPFLAGS} {CCFLAGS} -c {SRC} -o {OBJ}"
		return self.Eval(template, **self.env)

	def StemReplace(self, sources, pattern, repl, signature = ".*"):
		"""Performs a stem replacement similar to Make"""

		# Always evaluate the user string parameters
		if type(pattern) != str or type(repl) != str or type(signature) != str:
			# TODO: Be more helpful
			raise Exception

		# Fill in any formatting from the parameters
		pattern = pattern.format(**self._eval())

		# Sanitize sources
		norm_sources = []
		if type(sources) != list:
			sources = [sources]
		for src in sources:
			if type(Src) == str:
				# Fill in any formatting from the parameter
				src = src.format(**self._eval())
				norm_sources = norm_sources + glob.glob(src)
		sources = norm_sources

		# Start out with an empty results array
		results = {}

		# Prepare the source of interest regular expression
		sig_pat = re.compile(signature)

		# Make sure pattern has a single '%' (stem)
		if pattern.count("%") != 1 or repl.count("%") != 1:
			raise Exception

		# Prepare the stem pattern regular expression
		src_pat = re.compile("^" + re.escape(pattern).replace("\\%", "(.*)") + "$")

		# Iterate through all the sources
		for src in sources:
			# Are we a source of interest?
			if sig_pat.match(src):
				# Do we match the stem pattern?
				obj = src_pat.match(src)
				if obj:
					outfile = repl.replace("%", obj.group(1))
					results[outfile] = src

		return results
	
	def Cmd(self, template, **kwargs):

		cmd_str = self.Eval(template, **kwargs)
		cmd_str = cmd_str.format(**self._eval())
		if self._args.print_commands:
			print cmd_str

		# TODO: Implement some sort of expectation checker
		# TODO: Handle dry runs
		# TODO: Make this more elegant (import subprocess)
		if os.system(cmd_str) != 0:
			raise Exception("Command returned non-zero")
		
	def UnconditionalRemove(self, path):
		if type(path) is str:
			try:
				#print "Removing %s" % os.path.abspath(path)
				os.unlink(os.path.abspath(path))
			except OSError:
				pass

	def RemoveProducts(self, sources):
		for prod_entry in sources.keys():
			if type(prod_entry) is str:
				self.UnconditionalRemove(prod_entry)
			elif type(prod_entry) is list:
				for prod_item in prod_entry:
					self.UnconditionalRemove(prod_item)

class SQuadEnvironment():
	"""SQuad Environment Namespace"""

	def __init__(self, env_stack = None, env_idx = 0, args = None):

		# Ensure that the environment stack parameter is a list
		if type(env_stack) is not list:
			print "ERROR: Environment stack parameter must be a list"
			pdb.set_trace()
			raise Exception

		# Ensure the environment index is an integer or None
		if env_idx is None:
			self._idx = None
		elif type(env_idx) is not int:
			print "ERROR: Environment index must be an int"
			pdb.set_trace()
			raise Exception

		self._idx = env_idx

		# Empty env_stack means we should add an empty dictionary
		if len(env_stack) == 0:
			self._stack = [{}]
		else:
			self._stack = env_stack

		self._order = []
		self._update_cache()

	def _update_cache(self):
		"""Ensure the cache is up to date
		"""
		# Merge our env stack into a single dictionary
		self._cache = {}
		for env in self._stack:
			self._cache.update(env.items())

		# Setup the ordering for partial lazy evaluation
		for i in range(len(self._stack) - 1, -1, -1):
			for item in self._stack[i].loadorder:
				if item not in self._order:
					self._order.append(item)

	def _eval_cache(self):
		"""Evaluate the _cache dictionary and return the result
		"""
		try:
			partial = ContextDict()
			for item in self._order:
				partial[item] = self.Eval(self._cache[item], **partial)
			return partial
		except:
			pdb.set_trace()

	# TODO: This should only be available for User and Global (maybe Default)
	def new_context(self, name):
		"""Create a new empty context
		"""
		self._stack[len(self._stack) - 1].new_context(name)
		
	def set_context(self, name, ctx):
		"""Set or replace an existing context
		"""
		self._stack[len(self._stack) - 1].set_context(name, ctx)
		
	def name_default(self, name):
		self._stack[len(self._stack) - 1].name_default(name)
		
	def del_context(self, name):
		"""Delete an existing context
		"""
		self._stack[len(self._stack) - 1].del_context(name)
		
	def get_context(self, name):
		"""Fetch the reference to an existing context
		"""
		return self._stack[len(self._stack) - 1].get_context(name)
	
	def contexts_list(self):
		return self._stack[len(self._stack) - 1].contexts_list()
	
	def switch_context(self, name):
		"""Switch current context to named context
		"""
		self._stack[len(self._stack) - 1].switch_context(name)
		self._update_cache()

	def current_context(self):
		"""Return reference to current reference
		"""
		return self._stack[len(self._stack) - 1].current_context()	

	def __setitem__(self, key, value):
		"""Sets this namespace's value for key
		"""
		if self._idx is not None:
			self._stack[self._idx][key] = value
		self._update_cache()

	def __delitem__(self, key):
		"""Only the namespace instance gets removed
		"""
		del self._stack[self._idx][key]
		self._update_cache()

	def __getitem__(self, key):
		"""When item is requested via [] we evaluate first.
		"""
		self._update_cache()
		return self._eval_cache()[key]

	def Eval(self, template, **kwargs):
		"""Allows partial string formatting of named refs"""

		formatter = string.Formatter()
		dict_copy = kwargs.copy()
		parse_obj = formatter.parse(template)

		# For each reference in the format string
		for i in parse_obj:
			# If the reference is not a number
			if unicode(i[1]).isnumeric():
				continue
			# And is not in dict_copy keys
			if not i[1] in dict_copy.keys():
				# Forward the reference
				if type(i[1]) == str:
					dict_copy[i[1]] = "{" + i[1] + "}"

		return template.format(**dict_copy)

	def Process(self, sources):
		"""Process a dictionary with given actions.
		"""
		
		if type(sources) is not dict:
			print "WARNING: Sources must be a dict."
			return {}

		# Sanitize sources dictionary
		sources = self.sanitize_target_sources(sources)

		# TODO: Start performing actions here

		# If user's selected auto clean, we ignore all uptodates
		#if self._args.auto_clean:
		#	self.RemoveProducts(sources)
		#	raise AutoCleanComplete

		if sources is not None:
			# Compare products to sources for out of date entries
			sources = self.check_source_dict(sources)
		else:
			sources = {}

		return sources

	def check_source_dict(self, src_dict):
		"""Perform update checks with sources dictionary"""

		def is_out_of_date(prod_src):
			if not self._args.ignore_uptodate:
				return int(os.path.getmtime(src)) > int(os.path.getmtime(prod))
			return True

		def check_one_to_one(results, prod, src):
			"""If src has changeed or is newer than prod, add to results"""

			# Does file exists?
			if not os.path.exists(prod) or is_out_of_date(prod, src):
				results[prod] = src
				# Ensure that the directory path exists for prod
				full_prod_path = os.path.dirname(os.path.abspath(prod))
				if not os.path.exists(full_prod_path):
					os.makedirs(full_prod_path)

		def check_one_to_many(results, prod, src_list):
			out_of_date = False
			for src in src_list:
				if not os.path.exists(prod) or is_out_of_date(prod, src):
					out_of_date = True
					break

			if out_of_date:
				results[prod] = src_list
				# Ensure that the directiry path exists for prod
				full_prod_path = os.path.dirname(os.path.abspath(prod))
				if not os.path.exists(full_prod_path):
					os.makedirs(full_prod_path)

		def check_many_to_one(results, prod_list, src):
			out_of_date = False
			for prod in prod_list:
				if not os.path.exists(prod) or is_out_of_date(prod, src):
					out_of_date = True
				# Ensure that the directory path exists for prod
				full_prod_path = os.path.dirname(os.path.abspath(prod))
				if not os.path.exists(full_prod_path):
					os.makedirs(full_prod_path)

			if out_of_date:
				results[prod_list] = src

		def check_many_to_many(results, prod_list, src_list):
			out_of_date = False
			for prod in prod_list:
				if not out_of_date:
					for src in src_list:
						prod_exists = os.path.exists(prod)
						if not prod_exists or is_out_of_date(prod, src):
							out_of_date = True
							break
			# Ensure that the directory path exists for prod
			full_prod_path = os.path.dirname(os.path.abspath(prod))
			if not os.path.exists(full_prod_path):
				os.makedirs(full_prod_path)

			if out_of_date:
				results[prod_list] = src_list

		results = {}

		for prod, src in src_dict.items():
			if type(prod) == str:
				if type(src) == str:
					check_one_to_one(results, prod, src)
				elif type(src) == list:
					check_one_to_many(results, prod, src)
			elif type(prod) == list:
				if type(src) == str:
					check_many_to_one(results, prod, src)
				elif type(src) == list:
					check_many_to_many(results, prod, src)
			else:
				# Doing something not supported
				pdb.set_trace()
				raise Exception

		return results

	def sanitize_target_sources(self, sources):
		"""Ensure that the user provided sources keyword argument is a 
		dictionary type before we start processing"""

		# If we're already of type dict
		if type(sources) == dict:
			
			# Copy from original, incase we need to sanitize again later with
			# different sources existing in the file system
			sources = sources.copy()

			# Expand out any formatting
			for prod_entry, src_entry in sources.items():

				# Remove our old key entry
				del sources[prod_entry]

				# Update the key formatting
				if type(prod_entry) == str:
					prod_entry = self.Eval(prod_entry, **self._eval())
					sources[prod_entry] = src_entry
				elif type(prod_entry) == list:
					prod_list = []
					for prod in prod_entry:
						prod_list.append(self.Eval(prod, **self._eval()))
					prod_entry = prod_list
					sources[prod_list] = src_entry

				if type(prod_entry) not in (str, list):
					pdb.set_trace()
					raise Exception

				# Update the value formatting
				if type(src_entry) == str:
					src_entry = self.Eval(src_entry, **self._eval())
					sources[prod_entry] = src_entry
				elif type(src_entry) == list:
					src_list = []
					for src in src_entry:
						src_list.append(self.Eval(src, **self._eval()))
					sources[prod_entry] = src_list

				if type(src_entry) not in (str, list):
					pdb.set_trace()
					raise Exception

			return sources

		# Initialize a dictionary with an empty key
		results = {"":[]}

		# If the parameter passed to sources is a string or list
		# put sources in the empty key of a results dictionary
		if type(sources) == str:
			results[""].append(self.Eval(sources, **self._eval()))
		elif type(sources) == list:
			formatted_list = []
			for src in sources:
				formatted_list.append(self.Eval(src, **self._eval()))
			results[""].extend(formatted_list)
		else:
			# Trying to use something not supported
			pdb.set_trace()
			raise Exception

		return results

class Dependency():
	def __init__(self, desc, obj, ctx = ''):
		"""obj is expected to be a Target"""
		self.desc = desc
		self.obj = obj
		self.ctx_id = ctx
		self.deps = []

	@property
	def id(self):
		return "%s:%s;%s" % (self.desc.id, self.obj.id, self.ctx_id)

	def build(self, builder):

		# Enter the descriptor's directory context
		old_path = os.getcwd()
		os.chdir(self.desc.dir)

		# If the Dependency contains a callable (function, method)
		if isinstance(self.obj, Target) and hasattr(self.obj.func, '__call__'):

			# TODO: Check concurrency issues with this:
			# Ensure that the user_cache context is correct
			self.desc.user_cache.switch_context(self.ctx_id)

			# Because we're using the descriptor's environment references,
			# a descriptor can perform hackish code flows within itself
			# all day, but should not taint the rest of the project.
			# Note: This behavior  is not guarenteed
			
			env_stack = [
				self.desc.default_cache,
				self.desc.global_cache,
				self.desc.descriptor_env,
				ContextDict(), # We don't persist rule_env
				self.desc.user_cache,
			]
			R = SQuadEnvironment(env_stack = env_stack,
								env_idx = 2,
								args = self.desc.args)

			# Give rule direct access to the decorator arguments provided
			R.args = self.obj.args
			R.kwargs = self.obj.kwargs

			try:
				# Run It!
				self.obj.func(R)
			except AutoCleanComplete:
				pass

		# Restore previous directory context
		os.chdir(old_path)

class Target():
	def __init__(self, func, *args, **kwargs):
		self.id = func.func_name
		self.func = func
		self.args = args
		self.kwargs = kwargs
		self.deps = []

class Descriptor():
	def __init__(self, id = None, env_dict = {}, args = None):
		# Note: Assumed id is expanded

		if id is None:
			pdb.set_trace()
			raise Exception("Must provide expanded descriptor id")

		# Convienence attributes
		self.dir = os.path.dirname(id)
		self.file = os.path.basename(id)
		self.path = id
		self.id = id

		# Target is list to preserve load order
		self.target_list = []

		# Target dictionary to store Target objects
		self.target_dict = {}

		# Stores what imports this descriptor has requested
		self.imports = []

		# Python namespace dictionaries
		self.local_ns = {}
		self.global_ns = {}
		self.orig_global_ns = {}

		# Our reference to top level SQuad
		self.env_dict = env_dict
		self.descriptor_env = ContextDict()

		# Arguments refernece
		if args is None:
			self.args = Object()
			self.args.descriptor_namespace = "D"
			self.args.global_namespace = "G"
		else:
			self.args = args

	def load(self, edges = {}):

		# Setup the namespace labels for descriptor
		contexts = ['']
		if self.env_dict.has_key('user_env'):
			contexts = self.env_dict['user_env'].contexts_list()
		attrs = {'desc': self, 'edges': edges, 'contexts': contexts}
		self.local_ns['rule'] = SQuadBuilder.gen_target_decorator(attrs)

		# Descriptors are little less trust worthy than say an
		# extension or user config. So lets give these guys cached
		# copies of the global environments.

		if self.env_dict.has_key('default_env') and \
				self.env_dict.has_key('global_env') and \
				self.env_dict.has_key('user_env'):
			# Build "readonly" caches
			self.default_cache = self.env_dict['default_env'].copy()
			self.global_cache = self.env_dict['global_env'].copy()
			self.user_cache = self.env_dict['user_env'].copy()		
		else:
			# We execute this when coming from expand_unknown_dep_id on
			# the hunt for target names. In that case we don't care.
			self.default_cache = ContextDict()
			self.global_cache = ContextDict()
			self.user_cache = ContextDict()

		env_stack = [
			self.default_cache,
			self.global_cache,
			self.descriptor_env,
			self.user_cache,
		]
		D = SQuadEnvironment(env_stack = env_stack, env_idx = 2, args = self.args)

		env_stack = [
			self.default_cache,
			self.global_cache,
			self.user_cache,
		]
		G = SQuadEnvironment(env_stack = env_stack, env_idx = None, args = self.args)

		self.local_ns[self.args.descriptor_namespace] = D
		self.local_ns[self.args.global_namespace] = G

		# Execute descriptor
		execfile(self.path, self.global_ns, self.local_ns)

		# Copy a reference to the child descriptor names
		if self.local_ns.has_key("squad_imports"):
			self.imports = self.local_ns['squad_imports']

		# Backup old global values before overlaying locals
		self.orig_global_ns = self.global_ns.copy()

		# Overlay locals ontop globals
		for symbol in self.local_ns.keys():
			self.global_ns[symbol] = self.local_ns[symbol]

class Object():
	pass

class ContextDict():
	def __init__(self):
		# Initialize a default 
		self.contexts = {'':OrderedDict()}
		# Set default context as current context
		self.current_id = ''
		self.current = self.contexts[self.current_id]
		
	def new_context(self, name):
		"""Create a new empty context
		"""
		self.contexts[name] = OrderedDict()
		
	def set_context(self, name, ctx):
		"""Set or replace an existing context
		"""
		self.contexts[name] = ctx
		
	def name_default(self, name):
		self.contexts[name] = self.contexts['']
		
	def del_context(self, name):
		"""Delete an existing context
		"""
		del self.contexts[name]
		
	def get_context(self, name):
		"""Fetch the reference to an existing context
		"""
		return self.contexts[name]
		
	def contexts_list(self):
		return self.contexts.keys()	
		
	def switch_context(self, name):
		"""Switch current context to named context
		"""
		if not self.contexts.has_key(name):
			self.contexts[name] = OrderedDict()
		self.current = self.contexts[name]
		self.current_id = name

	def current_context(self):
		"""Return reference to current reference
		"""
		return self.current
	
	def current_context_id(self):
		return self.current_id
	
	def copy(self):
		"""Create a new ContextDict instance and insert copies of our contexts
		"""
		ctx_dict = ContextDict()
		for ctx in self.contexts.keys():
			ctx_dict.set_context(ctx, self.contexts[ctx].copy())
			if self.current is self.contexts[ctx]:
				ctx_dict.current_id = ctx
				ctx_dict.current = ctx_dict.contexts[ctx]
		return ctx_dict

	def __setitem__(self, key, value):
		self.current[key] = value
		
	def __getitem__(self, key):
		return self.current[key]
	
	def __repr__(self):
		return self.current.__repr__()
	
	def __str__(self):
		return self.current.__str__()
	
	def __lt__(self, other):
		return self.current.__lt__(other)
	
	def __le__(self, other):
		return self.current.__le__(other)
	
	def __eq__(self, other):
		return self.current.__eq__(other)
	
	def __ne__(self, other):
		return self.current.__ne__(other)
	
	def __gt__(self, other):
		return self.current.__gt__(other)
	
	def __ge__(self, other):
		return self.current.__ge__(other)
	
	def __cmp__(self, other):
		return self.current.__cmp__(other)
	
	def __hash__(self, other):
		return self.current.__hash__()
		
	def __setitem__(self, key, value):
		self.current.__setitem__(key, value)
		
	def __delitem__(self, key):
		self.current.__delitem__(key)
		
	def __iter__(self):
		return self.current.__iter__(self)
	
	def __contains__(self, item):
		return self.current.__contains__(item)
	
	def clear(self, *args, **kwargs):
		return self.current.clear(*args, **kwargs)
	
	def fromkeys(self, *args, **kwargs):
		return self.current.fromkeys(*args, **kwargs)
	
	def get(self, *args, **kwargs):
		return self.current.get(*args, **kwargs)
	
	def has_key(self, *args, **kwargs):
		return self.current.has_key(*args, **kwargs)
	
	def items(self, *args, **kwargs):
		return self.current.items(*args, **kwargs)
	
	def iteritems(self, *args, **kwargs):
		return self.current.iteritems(*args, **kwargs)
	
	def iterkeys(self, *args, **kwargs):
		return self.current.iterkeys(*args, **kwargs)
	
	def itervalues(self, *args, **kwargs):
		return self.current.itervalues(*args, **kwargs)
	
	def keys(self, *args, **kwargs):
		return self.current.keys(*args, **kwargs)
	
	def pop(self, *args, **kwargs):
		return self.current.pop(*args, **kwargs)
	
	def popitem(self, *args, **kwargs):
		return self.current.popitem(*args, **kwargs)
	
	def setdefault(self, *args, **kwargs):
		return self.current.setdefault(*args, **kwargs)
	
	def update(self, *args, **kwargs):
		return self.current.update(*args, **kwargs)
	
	def values(self, *args, **kwargs):
		return self.current.values(*args, **kwargs)
	
	def viewitems(self, *args, **kwargs):
		return self.current.viewitems(*args, **kwargs)
	
	def viewkeys(self, *args, **kwargs):
		return self.current.viewkeys(*args, **kwargs)
	
	def viewvalues(self, *args, **kwargs):
		return self.current.viewvalues(*args, **kwargs)
	
	@property
	def loadorder(self):
		return self.current.loadorder

class OrderedDict(dict):
	def __init__(self, *args, **kwargs):
		super(OrderedDict, self).__init__(*args, **kwargs)
		self.loadorder = []
		for i in self.keys():
			self.loadorder.append(i)

	def __setitem__(self, key, value):
		if not key in self.loadorder:
			self.loadorder.append(key)
		super(OrderedDict, self).__setitem__(key, value)

	def copy(self):
		retval = OrderedDict(self.items())
		for i in self.loadorder:
			retval.loadorder.remove(i)
			retval.loadorder.append(i)
		return retval

class SQuadBuilder():
	def __init__(self):

		# Reset SQuadBuilder state (except for arguments)
		self.reset()

		# Storage of argparse.Namespace object passed back by parse_args()
		self.orig_args = None

		# Processed arguments object
		self.args = Object()

	def reset(self):
		# Initialize descriptor dictionary
		self.desc_dict = {}

		# Initialize descriptor list to preserve order
		self.desc_list = []

		# Our descriptor:rule graph that we build as we go
		self.outEdges = {}

		# A general index from descriptor:rule id to Dependency object
		self.outEdgesIdx = {}
		
		# An authoritative dictionary for global environments
		self.env_dict = {
			'default_env': ContextDict(),
			'global_env': ContextDict(),
			'user_env': ContextDict(),
		}

	def setup_environment(self, *extension_args):

		# Build gets to set defaults layer
		defaults = self.env_dict['default_env']
		defaults['CC'] = 'gcc'
		defaults['CCFLAGS'] = ''
		defaults['CPP'] = 'cpp'
		defaults['CPPFLAGS'] = ''
		defaults['LD'] = 'ld'
		defaults['LDFLAGS'] = ''
		defaults['AS'] = 'as'
		defaults['ASFLAGS'] = ''
		defaults['ECHO'] = 'echo'
		defaults['AR'] = 'ar'
		defaults['ARFLAGS'] = 'crs'
		defaults['COPY'] = 'cp'
		defaults['OBJCOPY'] = 'objcopy'
		defaults['PYHTON'] = 'python'

		# Extensions get to set in globals layer
		env_stack = [
			self.env_dict['default_env'],
			self.env_dict['global_env'],
			# Even though user_env isn't loaded, might as well set
			# things up so we can reuse this instance further down.
			self.env_dict['user_env'],
		]
		G = SQuadEnvironment(env_stack = env_stack, env_idx = 1, args = self.args)

		for arg in extension_args:
			arg.setup_environment(G)

		# User configurations are set in user layer
		for cfg in self.args.configs:
			if os.path.exists(cfg):

				# Create a squad user namespace
				env_stack = [
					self.env_dict['default_env'],
					self.env_dict['global_env'],
					self.env_dict['user_env'],
				]
				U = SQuadEnvironment(env_stack = env_stack, env_idx = 2, args = self.args)

				global_ns = {}
				local_ns = {}
				local_ns[self.args.global_namespace] = G
				local_ns[self.args.user_namespace] = U
				
				execfile(cfg, global_ns, local_ns)

	def handle_arguments(self, **kwargs):
		"""Processes all of the command lione options into the args attribute"""

		description = 'Build package with SQuad'
		argparser = argparse.ArgumentParser(description = description)

		args = ['--nop']
		dest = "nop"
		help = 'Intended to break multivalued arguments just before target'
		argparser.add_argument(*args, help = help, nargs="?")

		args = ['-j', '--jobs']
		dest = "jobs"
		help = 'Run JOBS jobs simultaneously'
		argparser.add_argument(*args, dest = dest, help = help, type = int)

		args = ['-C', '--directory']
		dest = "dir"
		default = "."
		help = 'Change to directory DIR before starting'
		argparser.add_argument(*args, dest = dest, default = default,
			help = help, type = str)

		args = ['--default-descriptor-filename']
		dest = "descriptor_filename"
		default = "descriptor"
		help = 'Change the default descriptor filename'
		argparser.add_argument(*args, dest = dest, default = default,
			help = help, type = str)

		args = ['--default-descriptor-namespace']
		dest = "descriptor_namespace"
		default = "D"
		help = 'Change the default descriptor namespace label'
		argparser.add_argument(*args, dest = dest, default = default,
			help = help, type = str)

		args = ['--default-global-namespace']
		dest = "global_namespace"
		default = "G"
		help = 'Change the default global namespace label'
		argparser.add_argument(*args, dest = dest, default = default,
			help = help, type = str)

		args = ['--default-user-namespace']
		dest = "user_namespace"
		default = "U"
		help = 'Change the default user namespace label'
		argparser.add_argument(*args, dest = dest, default = default,
			help = help, type = str)

		args = ['--squad-script']
		dest = "squad_script"
		default = "squad_script"
		help = 'Change the default squad_script filename'
		argparser.add_argument(*args,
			dest = dest, help = help, default = default, type = str)

		args = ['--config']
		dest = "configs"
		default = []
		help = 'Add a configuration file'
		argparser.add_argument(*args, dest = dest, help = help,
			default = default, type = str, nargs = "*")

		args = ['--multiprocess']
		dest = "use_mp"
		help = 'For parallel builds, use multiple processes instead of threads'
		default = False
		argparser.add_argument(*args, dest = dest, help = help,
			default = default, action = 'store_true')

		args = ['--print-target-build-order']
		dest = "print_target_build_order"
		help = "Prints target build order after topological sort"
		default = False
		argparser.add_argument(*args,  dest = dest, help = help, default = default, action = 'store_true')

		args = ['--no-print-briefs']
		dest = "print_briefs"
		help = "Print target briefs"
		default = True
		argparser.add_argument(*args, dest = dest, help = help, default = default, action = 'store_false')

		args = ['--print-commands']
		dest = "print_commands"
		help = "Print target commands."
		default = False
		argparser.add_argument(*args,  dest = dest, help = help, default = default, action = 'store_true')

		args = ['--print-notices']
		dest = "print_notices"
		help = "Prints any notices."
		default = False
		argparser.add_argument(*args, dest = dest, help = help, default = default, action = 'store_true')

		args = ['--ignore-uptodate']
		dest = "ignore_uptodate"
		help = "Builds sources even if up to date"
		default = False
		argparser.add_argument(*args, dest = dest, help = help, default = default, action = 'store_true')

		args = ['--auto-clean']
		dest = "auto_clean"
		help = "Automatically cleans any product passed into EvalSources"
		default = False
		argparser.add_argument(*args, dest = dest, help = help, default = default, action = 'store_true')

		help = "The target to build"
		default = ["."]
		argparser.add_argument("target_rules", type = str, help = help, default = default, nargs = '*')

		if kwargs.has_key('extensions'):
			for arg in kwargs['extensions']:
				arg.add_arguments(argparser)

		args = None
		if kwargs.has_key('sys_args'):
			args = argparser.parse_args(kwargs['sys_args'])
		else:
			args = argparser.parse_args()
		self.orig_args = args

		if kwargs.has_key('extensions'):
			for arg in kwargs['extensions']:
				arg.process_arguments(self.args, args)

		os.chdir(args.dir)

		self.args.dir = args.dir

		if args.jobs is not None:
			self.args.jobs = args.jobs
		else:
			self.args.jobs = 1

		self.args.jobs_type = 'multiprocess'

		self.args.targets = args.target_rules

		self.args.print_target_build_order = args.print_target_build_order

		self.args.print_briefs = args.print_briefs

		self.args.print_commands = args.print_commands

		self.args.print_notices = args.print_notices

		self.args.ignore_uptodate = args.ignore_uptodate

		#self.args.print_loaded_descriptors = args.print_loaded_descriptors

		self.args.auto_clean = args.auto_clean

		self.args.descriptor_filename = args.descriptor_filename

		self.args.squad_script = args.squad_script

		self.args.descriptor_namespace = args.descriptor_namespace

		self.args.global_namespace = args.global_namespace

		self.args.user_namespace = args.user_namespace

		self.args.configs = args.configs
	
	def get_referenced_contexts(self):
		context_list = []
		for edge in self.outEdges.keys():
			# If a edge.obj not a Target, skip it
			if not isinstance(edge.obj, Target):
				print "WARNING: Found non-Target instance in outEdge graph"
				continue
			
			target = edge.obj
			
			# If target has no dependencies, skip it
			if not target.kwargs.has_key('deps'):
				if self.args.print_notices:
					print "NOTICE: Target has no dependencies: %s:%s" % \
						(edge.desc.id, target.id)
				continue
			
			for dep in target.kwargs['deps']:
				# TODO Not sure if we need to include defaults...?
				if dep.count(";") > 0:
					# We've got a context switch (in theory)
					context = dep.split(";", 1)[1]
					if context not in context_list:
						context_list.append(context)
						
		return context_list
	
	def is_normal_dependency(self, edge):
		return isinstance(edge, Dependency) and \
			isinstance(edge.desc, Descriptor) and \
			isinstance(edge.obj, Target)
	
	def normalize_outEdges(self):
		# Generate an index
		for edge in self.outEdges.keys():
			if self.is_normal_dependency(edge):
				self.outEdgesIdx[edge.id] = edge

		# Get all context switches (Only needed if we decide to allow context 
		# definitions in descriptors.
		#contexts = self.get_referenced_contexts()
		
		# TODO:
		# 2) Develop an identical graph for each context
		#    (Excluding the context explicit dependencies)
		

		#self.env_dict['user_env'].contexts


		# Generate dependency digraph
		for edge in self.outEdges.keys():
			target = edge.obj
			desc = edge.desc
			
			# If not a Target, ignore it
			if not isinstance(target, Target):
				print "WARNING: Found non-Target instance in outEdge graph"
				continue

			# If target has no dependencies, skip it
			if not target.kwargs.has_key('deps'):
				if self.args.print_notices:
					print "NOTICE: Target has no dependencies: %s:%s" % \
						(desc.id, target.id)
				continue

			old_path = os.getcwd()
			# Enter this descriptor's directory context
			os.chdir(desc.dir)

			# For each listed dependency
			for dep in target.kwargs['deps']:
				if type(dep) == str:
					# Determine and verify full dependency id
					id = self.expand_id(dep, ctx = edge.ctx_id)
					if id is None:
						print "WARNING: No known id found for %s" % dep
						# null Dependency
						continue

					self.outEdges[edge].append(self.outEdgesIdx[id])

				else:
					# TODO: Warn? Support user provided dependency objects?
					pdb.set_trace()
					raise Exception

			# Restore the target rule directory context
			os.chdir(old_path)

	def start_build(self, dep_id):
		# Expand and verify dep_id is a valid dependency
		dep_id = self.expand_id(dep_id)
		if dep_id is None:
			raise NothingToDo

		target = self.outEdgesIdx[dep_id]
		
		# TODO: Dynamically determine this?
		processors = self.args.jobs
		outEdges = self.outEdges.copy()
		inEdges = {}

		build_order = []

		# Build our dep->target index
		for obj in outEdges.keys():
			if not inEdges.has_key(obj):
				inEdges[obj] = []

		# Populate the dep->target index
		for obj in outEdges.keys():
			for dep in outEdges[obj]:
				if not inEdges.has_key(dep):
					inEdges[dep] = []
				if not obj in inEdges[dep]:
					inEdges[dep].append(obj)

		# Prune off what we're not building
		zero_in_degree = []

		for item in inEdges.keys():
			if len(inEdges[item]) == 0 and item is not target:
				zero_in_degree.append(item)
				del inEdges[item]
		while len(zero_in_degree) > 0:
			obj = zero_in_degree[0]
			zero_in_degree.pop(0)
			for item in outEdges[obj]:
				try:
					inEdges[item].remove(obj)
				except KeyError:
					pass

				if len(inEdges[item]) == 0:
					zero_in_degree.append(item)
					del inEdges[item]

		# Topological Sort
		zero_in_degree = [target]
		del inEdges[target]
		while len(zero_in_degree) > 0:
			obj = zero_in_degree[0]
			build_order.insert(0, obj)
			zero_in_degree.pop(0)

			for item in outEdges[obj]:
				try:
					inEdges[item].remove(obj)
				except KeyError:
					print "Cyclic dependency involving %s:%s" % \
						(obj.desc.id, obj.obj.id)
					return []

				if len(inEdges[item]) == 0:
					zero_in_degree.append(item)
					del inEdges[item]

		if len(inEdges.keys()) > 0:
			pdb.set_trace()
			raise Exception

		if self.args.print_target_build_order:
			cnt = 1
			for i in build_order:
				print "%d) %s\n" % (cnt, i.id)
				cnt = cnt + 1

		if processors == 1:
			for dep in build_order:
				dep.build(self)
		else:
			if self.args.jobs_type == 'multiprocess':
				self.mp_parallel_build(processors, build_order, outEdges)
			else:
				pdb.set_trace()
				raise Exception

	@staticmethod
	def build_process(builder, todo_queue, done_queue, name):
		"""Entry point for multi process worker
		"""
		print "Creating %s\n" % name
		while (True):
			# TODO: Ad some kill feature
			dep_id = todo_queue.get(True)
			if type(dep_id) == str:
				dep = builder.outEdgesIdx[dep_id]
				if type(dep) == types.InstanceType and isinstance(dep, Dependency):
					dep.build(builder)
					done_queue.put(dep_id)
			else:
				sys.exit()

	def mp_parallel_build(self, processors, build_order, outEdges):
		"""Build command for building with multiple processes
		"""
		# Initialize the queues
		todo_queue = multiprocessing.Queue()
		done_queue = multiprocessing.Queue()

		# Spin up some workers
		workers = []
		worker_id = 0
		for t in range(0, processors):
			name = "Worker-%d" % worker_id
			worker_id = worker_id + 1
			args = (self, todo_queue, done_queue, name)
			target = SQuadBuilder.build_process
			new_worker = multiprocessing.Process(\
				target = target, name = name, args = args)
			workers.append(new_worker)
			new_worker.start()

		# While we have things left to build
		while len(build_order) > 0:
			# Iterate through build order for items with zero dependencies
			for dep in build_order:
				# If this item has no dependencies
				if len(outEdges[dep]) == 0:
					# Add it to the buildable queue
					todo_queue.put(dep.id, True)
					# Remove key reference from outEdges
					del outEdges[dep]
					build_order.remove(dep)
			# While we have completed tasks
			while not done_queue.empty():
				dep_id = done_queue.get(True)
				dep = self.outEdgesIdx[dep_id]
				# Clear out *all* references to this from outEdges
				for outEdge in outEdges.keys():
					cnt = outEdges[outEdge].count(dep)
					if cnt > 0:
						for i in range(0, cnt):
							outEdges[outEdge].remove(dep)

		# Kill all the threads
		for worker in workers:
			todo_queue.put(None)

	def expand_unknown_desc_id(self, path):
		"""Expand a partial descriptor ID to a full descriptor ID using implicit rules
		"""
		# Note: We should be in correct directory context

		path = os.path.abspath(path)
		if os.path.exists(path):
			if os.path.isdir(path):
				extra_path = os.path.join(path, self.args.descriptor_filename)
				if os.path.exists(extra_path):
					return extra_path
			if os.path.isfile(path):
				return path
		return None

	def expand_unknown_dep_id(self, dep_str, ctx = ''):
		"""Expand a partial dependency ID to a full dependency ID using implicit rules:
		
		1) Do we have a context id?
		
		1) Do we have a rel descriptor id with target_id?
		2) Are we an in-scope target?
		3) Are we a descriptor?
		
		"""
		
		# 1) Do we have a context specifier?
		if dep_str.find(";") != -1:
			(dep_str, ctx) = dep_str.split(";", 1)
		
		# Note: We should be in correct directory context
		# 1) Do we have a rel descriptor id with target_id?
		if dep_str.find(":") != -1:
			(desc_id, targ_id) = dep_str.split(":", 1)
			desc_id = self.expand_unknown_desc_id(desc_id)
			if desc_id is not None:
				return desc_id + ":" + targ_id + ";" + ctx
			return None

		# 2) Are we an in-scope target?
		desc_id = self.expand_unknown_desc_id(os.getcwd())
		if desc_id is not None:
			desc = Descriptor(id = desc_id)
			desc.load(edges = {})
			if dep_str in desc.target_list:
				return desc_id + ":" + dep_str + ";" + ctx

		# 3) Are we a descriptor?
		desc_id = self.expand_unknown_desc_id(dep_str)
		if desc_id is not None:
			desc = Descriptor(id = desc_id)
			desc.load(edges = {})
			if len(desc.target_list) > 0:
				return desc_id + ":" + desc.target_list[0] + ";" + ctx

		return None

	def expand_id(self, dep_str, ctx = ''):
		# 1) Do we have a context specifier
		if dep_str.find(";") != -1:
			(dep_str, ctx) = dep_str.split(";", 1)
		
		# 2) Do we have a rel descriptor id with target_id?
		if dep_str.find(":") != -1:
			(desc_id, targ_id) = dep_str.split(":", 1)
			desc_id = os.path.abspath(desc_id)
			if os.path.isdir(desc_id):
				desc_id = os.path.join(desc_id, self.args.descriptor_filename)
			if desc_id in self.desc_list and \
					targ_id in self.desc_dict[desc_id].target_list:
				return desc_id + ":" + targ_id + ";" + ctx
			pdb.set_trace()
			return None

		# 2) Are we an inscope target?
		desc_id = os.path.join(os.getcwd(), self.args.descriptor_filename)
		if desc_id in self.desc_list and \
				dep_str in self.desc_dict[desc_id].target_list:
			return desc_id + ":" + dep_str + ";" + ctx

		# 3) Are we a descriptor with a target?
		desc_id = os.path.abspath(dep_str)
		if os.path.isdir(desc_id):
			desc_id = os.path.join(desc_id, self.args.descriptor_filename)
		if desc_id in self.desc_list and \
				len(self.desc_dict[desc_id].target_list) > 0:
			return desc_id + ":" + self.desc_dict[desc_id].target_list[0] + ";" + ctx

		# 4) We're useless
		return None

	def recursively_load_descriptors(self, desc_id):
		if desc_id in self.desc_list:
			return

		#if self.args.print_loaded_descriptors:
		print "Loading descriptor %s" % desc_id

		# Create a new Descriptor object
		desc = Descriptor(id = desc_id, env_dict = self.env_dict, args = self.args)

		old_path = os.getcwd()
		# Enter the descriptors directory context
		os.chdir(desc.dir)

		# Load and execute the file
		desc.load(self.outEdges)

		# Normalize the import list
		norm_imports = []
		for imp in desc.imports:
			if type(imp) == str:
				imp = self.expand_unknown_desc_id(imp)
				if imp not in norm_imports:
					norm_imports.append(imp)
			else:
				pdb.set_trace()
				raise DescriptorSyntaxException
		desc.imports = norm_imports

		# Add descriptor path to list to preserve load order
		self.desc_list.append(desc_id)
		self.desc_dict[desc_id] = desc

		# If imports are found, keep going!
		for imp in desc.imports:
			self.recursively_load_descriptors(imp)

		# Restore previous path
		os.chdir(old_path)

	def load_descriptors(self, desc_id):
		dep_id = squad.expand_unknown_dep_id(desc_id)
		if dep_id is None:
			raise NothingToDo

		desc_id, target_id = dep_id.split(":", 1)

		# Initialize and load the descriptor database
		self.recursively_load_descriptors(desc_id)

		# Analyze and normalize the loaded results
		self.normalize_outEdges()

	@staticmethod
	def gen_target_decorator(attrs):
		def rule(*targs, **tkwargs):
			def wrapper(func):
				desc = attrs['desc']
				edges = attrs['edges']
				contexts = attrs['contexts']

				target_name = func.func_name

				desc.target_list.append(target_name)

				desc.target_dict[target_name] = \
					Target(func, *targs, **tkwargs)

				for ctx in contexts:
					edges[Dependency(desc, desc.target_dict[target_name], ctx)] = []
					

				def inner(*args, **kwargs):

					func(*args, **kwargs)
				return inner
			return wrapper
		return rule

if __name__ == "__main__":

	# Initialize SQuad environment
	squad = SQuadBuilder()

	# Process arguments into squad's DB
	sys_args = [] + sys.argv
	sys_args.pop(0)

	# Process arguments
	squad.handle_arguments(sys_args = sys_args)

	# Setup initial default, global, and user environments
	squad.setup_environment()

	if os.path.exists(squad.args.squad_script):
		squad_script = squad.args.squad_Script
		del squad

		execfile(squad_script)

	else:
		try:
			for target in squad.args.targets:
				#squad.reset()
				squad.load_descriptors(target)
				squad.start_build(target)
		except NothingToDo:
			print "Nothing To Do"
			pass
