#!/usr/bin/python

"""
.. module:: SQuad
   :platform: Unix, Windows
   :synopsis: An advanced build automation tool.

.. moduleauthor:: Crazy Chenz <crazychenz@gmail.com>


"""

import os
import sys
import pdb
import pickle
import copy
import time
import threading
import multiprocessing
import Queue

class ServiceStatus():
    """A class for storing various service state attributes."""
    def __init__(self):
        """Simple hashable class that wraps an unhashable dictionary type."""
        self.attrs = {}
        
    def __getitem__(self, key):
        """Pass through getitem to the internal dictionary.
        
        :param key: Dictionary key to get information from
        :type key: str or unicode
        
        """
        return self.attrs.__getitem__(key)
    
    def __setitem___(self, key, value):
        """Pass through setitem to the internal dictionary.
        
        :param key: Dictionary key to set information for
        :type key: str or unicode
        :param value: Object to set in the dictionary.
        :type value: str or unicode or int
        
        """
        return self.attrs.__setitem__(key, value)
    
class ObjectManager(multiprocessing.managers.BaseManager):
    """A thin class to add multiprocessing.managers registrations to 
    statically without poluting the BaseManager namespace.
    
    """
    pass

class Svc():
    """An interface class for implementing a cross platform service with
    the StartService class.
    
    """
    def __init__(self, *args, **kwargs):
        """Store any arguments passed for potential use later."""
        self.args = args
        self.kwargs = kwargs
        
    def start(self):
        """Use StartService to start this interface's implementation
        as a service of the running platform.
        
        """
        service = StartService(self)
        return service.pid
    
class Killer():
    """A class intended for sending through a communication queue to
    indicate the target needs to terminate without exception.
    
    """
    pass

class Channel():
    """Manages outgoing and incoming queues for communicating with other
    processes of the service. """
    
    From = 0
    To = 1
    Obj = 2
    
    def __init__(self, who_we_are, to_us, to_them):
        """Initialize the class by indicating the direction of the channel
        and who the user of the class is.
        
        :param who_we_are: A name for the user of the channel
        :type who_we_are: str
        :param to_us: A queue object for recieving objects
        :type to_us: Queue
        :param to_them: A queue object for sending out objects
        :type to_them: Queue
        
        """
        self.who_we_are = who_we_are
        self.to_us = to_us
        self.to_them = to_them
        
    def recv(self):
        """Recieve an object from the inbound queue."""
        return self.to_us.get(True)
    
    def send_to(self, to, obj):
        """Send an object to the outbound queue with default from address.
        
        :param to: An indicator of the destination
        :type to: str
        :param obj: The object to send
        :type obj: Something serializable
        
        """
        return self.to_them.put([self.who_we_are, to, obj])
    
    def send(self, sender, reciever, obj):
        """Send an object to the outbound queue with explicit from address.
        
        :param sender: An indicator of the source
        :type sender: str
        :param reciever: An indicator of the destination
        :type reciever: str
        :param obj: The object to send
        :type obj: Something serializable
        
        """
        self.to_them.put([sender, reciever, obj])
        
    def send_msg(self, msg):
        """Send a message object to the outbound queue.
        
        :param msg: The message to be sent to outbound queue
        :type msg: Three item array:: [from, to, obj]
        
        """
        self.to_them.put(msg)

class FSMonitorSvc(Svc):
    """Cross platform interface for managing file system monitoring."""
    
    def run(self):
        """Method that is invoked with the service is started with
        the StartService class.
        
        """
        
        ObjectManager.register('get_channel')
        mgr = ObjectManager(
            address = ('127.0.0.1', 50000),
            authkey='gofish')
        mgr.connect()
        self.channel = mgr.get_channel("monitor")
        
        while True:
            msg = self.channel.recv()
            if isinstance(msg[Channel.Obj], Killer):
                print "(PID: %d) Monitor Dead" % os.getpid()
                break
            
            log_msg = "FSMonitor last seen: %d" % int(time.time())
            self.channel.send_to("log", log_msg)
        
class BuilderSvc(Svc):
    """Cross platform interface for managing building of products."""
    
    def run(self):
        """Method that is invoked with the service is started with
        the StartService class.
        
        """
        ObjectManager.register('get_channel')
        mgr = ObjectManager(
            address = ('127.0.0.1', 50000),
            authkey = 'gofish')
        mgr.connect()
        self.channel = mgr.get_channel('builder')
        
        while True:
            msg = self.channel.recv()
            if isinstance(msg[Channel.Obj], Killer):
                print "(PID: %d) Builder Dead" % os.getpid()
                break
            log_msg = "Builder last seen: %d" % int(time.time())
            self.channel.send_to("log", log_msg)
            
class ServiceManagerSvc(Svc):
    """Head managing class for starting services, logging, and providing
    communication between all processes involved in the build system.
    
    """
    
    queue = Queue.Queue()
    _channels = {}
    
    @staticmethod
    def logger(log, fname):
        """A logger thread target for dumping things to a file."""
        
        #print "Logging to: %s" % fname
        log_file = open(fname, "ab")
        
        entry = "--- Log Start %d ---" % int(time.time())
        log.send_to("log", entry)
        
        while True:
            msg = log.recv()
            log_file.write("[%d] %s\n" % (time.time(), repr(msg)))
            if isinstance(msg[Channel.Obj], Killer):
                log_file.close()
                print "(PID: %d) Logger Dead" % os.getpid()
                return
            log_file.flush()
            
    @staticmethod
    def get_channel(cookie = None):
        """Method provided via a multiprocessing.managers.BaseManager
        instance. Creates an explicitly named communication channel or
        auto generates a channel for (command line) clients.
        
        :param cookie: The name of the channel
        :type cookie: str
        
        """
        
        # TODO: Ensure no cookie collisions
        if cookie is None:
            cookie = 'client%d' % time.time()
            
        client = Queue.Queue()
        service = ServiceManagerSvc.queue
        ServiceManagerSvc._channels[cookie] = \
            Channel("service", service, client)
        return Channel(cookie, client, service)
    
    @staticmethod
    def comms():
        """Thread target that uses multiprocessing.managers.BaseManager to
        provide an interface to other daemon and user processes for
        communication with the build system.
        
        """
        
        ObjectManager.register(
            'get_channel',
            callable = ServiceManagerSvc.get_channel)
        
        # Start up the manager
        mgr = ObjectManager( \
            address = ('127.0.0.1', 50000),
            authkey = 'gofish')
        
        server = mgr.get_server()
        print "Starting the Manager Service Thread!"
        server.serve_forever()
        
    def run(self):
        """Method invoked by the StartService class to initialize this
        class as a seperate process.
        
        """
        # Launch communication thread
        target = ServiceManagerSvc.comms
        comms_thread = threading.Thread(target = target)
        comms_thread.start()
        
        # TODO: Block until we know comms are up
        time.sleep(0.1)
        
        # Launch logger thread
        log = ServiceManagerSvc.get_channel("log")
        args = [log, "manager.log"]
        target = ServiceManagerSvc.logger
        log_thread = threading.Thread(args = args, target = target)
        log_thread.start()
        
        # TODO: Block until we know logs are up
        time.sleep(0.1)
        
        # Start up the builder service
        BuilderSvc().start()
        
        # Start up the monitor service
        FSMonitorSvc().start()
        
        # Now just route messages forever
        while True:
            request = ServiceManagerSvc.queue.get(True)
            
            if isinstance(request[Channel.Obj], Killer):
                channels = ServiceManagerSvc._channels
                for channel in channels:
                    channels[channel].send_to(channel, request[Channel.Obj])
                time.sleep(1)
                print "(PID %d) Service Dead" % os.getpid()
                os._exit(0)
                
            to = request[Channel.To]
            if to != "service":
                # Just passing through
                if ServiceManagerSvc._channels.has_key(to):
                    ServiceManagerSvc._channels[to].send_msg(request)
            else:
                # It must be for us
                log_msg = "Service last seen: %d" % int(time.time())
                ServiceManagerSvc._channels['log'].send_to("log", log_msg)
                
class StartService():
    """This class starts up services for the detected platform or raises an 
    exception that the platform is not supported.
    
    .. note:: 
    
        Multiprocessing doesn't cut it for us because multiprocessing processes
        can not launch children and die with the parent process of the
        multiprocessing execution tree.
    
    """
    
    def unix_service(self, svc):
        """Uses fork() to start an independent service process.
        
        :param svc: A subclass of Svc implementing a run() method
        :type svc: Svc
        
        """
        pid = os.fork()
        if pid != 0:
            return pid
        else:
            svc.run()
            os._exit(0)
            
    def windows_service(self, svc):
        """Uses pywin32 to start an independent service process.
        
        :param svc: A subclass of Svc implementing a run() method
        :type svc: Svc
        
        """
        raise Exception("Windows services not yet supported,")
    
    def __init__(self, svc):
        """Determines, with sys.platform, the method to use to start an
        independent service process.
        
        :param svc: A subclass of Svc implementing a run() method
        :type svc: Svc
        
        """
        if sys.platform == 'win32':
            self.pid = self.windows_service(svc)
        elif sys.platform in ['linux2', 'darwin', 'freebsd7']:
            self.pid = self.unix_service(svc)
        elif sys.platform in ['os2', 'os2emx', 'riscos', 'atheos', 'cygwin']:
            exstr = "Python for %s not supported yet." % sys.platform
            raise Exception(exstr)
        else:
            exstr = "Unknown platform (%s)" % sys.platform
            raise Exception(exstr)
        
class Client():
    def __init__(self):
        pass
    
    def check_services(self):
        # Ping IP:PORT
        # Try to connect to manager
        # Check last seen for each service
        pass
    
    def start_services(self):
        ServiceManagerSvc().start()
        
    def connect(self):
        ObjectManager.register('get_channel')
        mgr = ObjectManager(
            address = ('127.0.0.1', 50000),
            authkey = 'gofish')
        mgr.connect()
        return mgr.get_channel()
            
class Edge():
    """A class representation of a edge or rule in the dependency graph"""
    def __init__(self, name, func, args, kwargs):
        """Initialization of the Edge object
        
        :param name: Name of the rule.
        :type name: str
        :param func: A callable object to execute the rule.
        :type func: callable
        :param args: Sequence of arguments.
        :type args: list
        :param kwargs: Keyword arguments
        :type kwargs: dict
        
        """
        self.name = name
        self.func = func
        self.args = args
        self.kwargs = kwargs
        
class Node():
    """A class representation of a node or target in the dependency graph"""
    def __init__(self, name, func, args, kwargs):
        """Initialization of the Node object
        
        :param name: Name of the rule.
        :type name: str
        :param func: A callable object to execute the rule.
        :type func: callable
        :param args: Sequence of arguments.
        :type args: list
        :param kwargs: Keyword arguments
        :type kwargs: dict
        
        """
        self.name = name
        self.func = func
        self.args = args
        self.kwargs = kwargs

class Environment():
    """This class is designed to be highly modifiable, so make no assumptions
       about the availability of methods until runtime.
       
       The class has several different features that may be enabled:
         - Product Mode - For establishing rule-product-source relationships.
         - Variable Mode - For managaing the variable substitution environment.
    """
    def __init__(self, products = False):
        
        if products:
            # A tuple array of rule-products-sources relationships
            self.products = []
        else:
            self.__delattr__('Product')
        
    def Produce(self, rule = None, products = [], sources = [], deps = [], orderonly = []):
        # TODO: Support order only
        rel = {
            'rule': rule,
            'products': products,
            'sources': sources,
            'deps': deps
        }
        self.products.append(rel)
        
class Descriptor():
    """A class for representing a parsed and loaded descriptor file."""
    def __init__(self, id = None, env_dict = {}, load_now = True):
        """Initialize a descriptor object to load or not load the 
        cooresponding descriptor file.
        
        :param id: The relative path from the cache directory.
        :type id: str
        :param env_dict: The environmental dictionary to use when loading
        :type env_dict: Any class acting with dict interfaces
        :param load_now: Indicates that we won't wait to load the descriptor
        :type load_now: bool
        
        """
        # Note: Assumed id is expanded

        if id is None:
            pdb.set_trace()
            raise Exception("Must provide expanded descriptor id")

        # Convienence attributes
        self.dir = os.path.dirname(id)
        self.file = os.path.basename(id)
        self.path = id
        self.id = id

        # Lists of rules and builds defined in descriptor
        self.edges = []
        self.nodes = []

        # Stores what imports this descriptor has requested
        self.imports = []

        # Python namespace dictionaries
        self.local_ns = {}
        self.global_ns = {}
        
        self.env = Environment(products = True)
        
        if load_now is True:
            self.load()

    def __eq__(self, other):
        
        """When checking for a descriptor in a list, allow the check to
           occur via the desc_id OR a descriptor with the same id.
           
           :param other: The object we're comparing our id to.
           :type other: Class with id attribute.
           
        """
        
        if type(other) is str and other == self.id:
            return True
        
        if isinstance(other, Descriptor) and other.id == self.id:
            return True
        return False

    def load(self):
        """Load a descriptor file into its own namespace within the python
        interpreter. This acts as a in-memory cache of the file.
        
        """
        
        # Grab some cacheable code attributes
        self.source = open(self.path, "rb").read()
        self.code = compile(self.source, self.path, "exec")
        
        # Tell relavent decorators we're in *this* Descriptor
        attrs = {'desc': self}
        self.local_ns['rule'] = Descriptor.edge_decorator(attrs)
        self.local_ns['build'] = Descriptor.node_decorator(attrs)
        self.local_ns['D'] = self.env

        # Execute descriptor
        exec(self.code, self.global_ns, self.local_ns)

        # Copy a reference to the child descriptor names
        if self.local_ns.has_key("imports"):
            self.imports = self.local_ns['imports']

        # Overwrite locals on top of globals
        for symbol in self.local_ns.keys():
            self.global_ns[symbol] = self.local_ns[symbol]
            
    @staticmethod
    def edge_decorator(attrs):
        """Allows descriptors to indicate a function is a rule or edge."""
        def edge(*edge_args, **edge_kwargs):
            def wrapper(func):
                # Grab decorator args and target name
                desc = attrs['desc']
                edge = Edge(func.func_name, func, edge_args, edge_kwargs)
                desc.edges.append(edge)
                
                # The closure thing
                def inner(*args, **kwargs):
                    func(*args, **kwargs)                
                return inner
            return wrapper
        return edge
    
    @staticmethod
    def node_decorator(attrs):
        """Allows descriptors to indicate a function is a build or target."""
        def node(*node_args, **node_kwargs):
            def wrapper(func):
                # Grab decorator args and target name
                desc = attrs['desc']
                node = Node(func.func_name, func, node_args, node_kwargs)
                desc.nodes.append(node)
                
                # The closure thing
                def inner(*args, **kwargs):
                    func(*args, **kwargs)
                return inner
            return wrapper
        return node

# 1) Run all descriptors
# 2) 
# 3)

# Stupid: Python can't hash a dict, but has no problem
#         with classes, so here we are, wrapping a dict
#         with a class. Bleh!

class Dependency():
    """Hashable object that wraps a dictionary. This allows us to use a
    dictionary like object as a key into another dictionary.
    
    """
    
    def __init__(self):
        """Initialize internal dictionary"""
        self.obj = {}
    def __getitem__(self, key):
        """Pass on to internal dictionarie's __getitem__()"""
        return self.obj[key]
    def __setitem__(self, key, value):
        """Pass on to internal dictionarie's __setitem__()"""
        self.obj[key] = value
    def has_key(self, key):
        """Pass on to internal dictionarie's has_key()"""
        return self.obj.has_key(key)
    def values(self):
        """Pass on to internal dictionarie's values()"""
        return self.obj.values()
    def keys(self):
        """Pass on to internal dictionarie's keys()"""
        return self.obj.keys()

class DependencyGraph():
    """Performs the dependency graph construction as well as a number of 
    index contructions.
    
    """
    
    # Needs work
    def process_node(self, ctx, desc, node):
        # Add this node to desc->node dictionary
        if not self.desc_nodes.has_key(desc.id):
            self.desc_nodes[desc.id] = []
        self.desc_nodes[desc.id].append(node.name)
        
        id = "%s:%s;%s" % (desc.id, node.name, ctx)
        build = Dependency()
        self.out_edges[build] = []
        self.in_edges[build] = []
        self.targets[id] = build
        
        # Setup standard node attributes
        
        build['id'] = id
        build['ctx'] = ctx
        build['desc_obj'] = desc
        build['node_obj'] = node
        # Array of dict objects from self.targets
        build['deps'] = []
        build['sources'] = []
        build['parents'] = []
        build['tmp_deps'] = []
        build['group'] = True
        
        # Node dependency strings are actually dependencies of
        # the inner products, not the node itself.
        if node.kwargs.has_key('deps'):
            for dep in node.kwargs['deps']:
                if dep not in build['tmp_deps']:
                    build['tmp_deps'].append(dep)
        
        # Run the node to get the product-source-rule relationships
        env = Environment(products = True)
        node.func(env)
        
        self.process_node_products(ctx, desc, node, env)
        
    # Needs work
    def process_node_products(self, ctx, desc, node, env):
        # Grab all the products from the generated env
        for entry in env.products:

            entry_id = "|".join([build['id'], "|".join(entry['products'])])
            product = Dependency()
            self.targets[entry_id] = product
            self.out_edges[product] = []
            self.in_edges[product] = []
            
            # Setup standard node-product attributes
            product['id'] = entry_id
            product['ctx'] = ctx
            product['desc_obj'] = desc
            product['node_obj'] = node
            # Array of dict objects from self.targets
            product['deps'] = []
            product['sources'] = []
            product['parents'] = []
            product['group'] = False
            
            # Store attributes for post processing
            # TODO: Find this bug
            product['tmp_deps'] = [] #entry['deps']
            product['tmp_sources'] = entry['sources']
            product['tmp_rule'] = entry['rule']
            
            # Node dependency strings are actually dependencies of
            # the inner products, not the node itself.
            if node.kwargs.has_key('deps'):
                for dep in node.kwargs['deps']:
                    if dep not in product['tmp_deps']:
                        product['tmp_deps'].append(dep)
                
            # This product is a dependency of the node
            if product not in self.out_edges[build]:
                self.out_edges[build].append(product)
                
    def process_desc_products(self, ctx, desc):
        # Grab all the products from the generated env
        for entry in desc.env.products:
            node_id = "%s:;%s" % (desc.id, ctx)
            entry_id = "|".join([node_id, "|".join(entry['products'])])
            product = Dependency()
            self.targets[entry_id] = product
            self.out_edges[product] = []
            self.in_edges[product] = []
            
            # Setup standard desc-product attributes
            product['id'] = entry_id
            product['ctx'] = ctx
            product['desc_obj'] = desc
            product['node_obj'] = None
            # Array of dict objects from self.targets
            product['deps'] = []
            product['sources'] = []
            product['parents'] = []
            product['group'] = False
            product['source'] = False
            product['product'] = True
            
            # Store attributes for post processing
            product['products'] = copy.copy(entry['products'])
            
            # Index files
            #old_path = os.getcwd()
            #os.chdir(desc.dir)
            for path in product['products']:
                if path[0] != "/":
                    path = os.path.join(desc.dir, path)
                path = os.path.abspath(path)    
                if path not in self.file_targets:
                    self.file_targets[path] = []
                if product not in self.file_targets[path]:
                    self.file_targets[path].append(product)
            #os.chdir(old_path)
            
            product['tmp_deps'] = copy.copy(entry['deps'])
            product['tmp_sources'] = copy.copy(entry['sources'])
            product['tmp_rule'] = entry['rule']
            
    def process_product_source(self, src_id, target):
        source = Dependency()
        
        source['id'] = src_id
        source['ctx'] = target['ctx']
        source['desc_obj'] = target['desc_obj']
        source['node_obj'] = None
        source['deps'] = []
        source['sources'] = []
        source['parents'] = []
        
        source['group'] = False
        source['source'] = True
        source['product'] = False
        
        return source
        
    def process_rules(self, descriptors):    
        
        # Rules do not have files, dependencies, or contexts
        for desc in descriptors:
            for edge in desc.edges:
                
                # Add this edge to desc->edge dictionary
                if not self.desc_edges.has_key(desc.id):
                    self.desc_edges[desc.id] = []
                self.desc_edges[desc.id].append(edge.name)
                
                rule = {}
                rule['desc_obj'] = desc
                rule['edge_obj'] = edge
                rule['desc_id'] = desc.id
                rule['edge_name'] = edge.name
                rule['id'] = "%s:%s" % (desc.id, edge.name)
                self.rules[rule['id']] = rule
                
    def __init__(self, mgr, descriptors, contexts = [""]):
        
        self.file_targets = {}
        
        # Dictionary of all launchable targets. (Main dict)
        self.targets = {}
        self.out_edges = {}
        self.in_edges = {}
        
        # Dictionary of rules
        self.rules = {}
        
        # Dictionaries of all descriptor nodes and edges
        self.desc_edges = {}
        self.desc_nodes = {}
        
        # Index rules before graph for verifiability
        self.process_rules(descriptors)
                
        # Generate all products from loaded descriptors
        for ctx in contexts:
            for desc in descriptors:    
                
                # Process static descriptor products
                self.process_desc_products(ctx, desc)
                    
                # TODO: Iron out the relationship between nodes and products
                #for node in desc.nodes:
                #    self.process_node(ctx, desc, node)

        # Perform the post processing
        for targ in self.targets.keys():
            target = self.targets[targ]
            desc = target['desc_obj']

            # Process dependencies
            for dep in target['tmp_deps']:
                dep_id = self.expand_dep_id(dep, target)
                if not self.targets.has_key(dep_id):
                    pdb.set_trace()
                    raise Exception("No such dependency found.")
                if self.targets[dep_id] not in self.out_edges[target]:
                    self.out_edges[target].append(self.targets[dep_id])
            
            # Process source specifiers
            if target.has_key('tmp_sources'):
                for src in target['tmp_sources']:
                    # Add source as target
                    src_id = self.expand_source_id(src, target)
                    if src_id not in self.targets:
                        source = self.process_product_source(src_id, target)
                        self.targets[src_id] = source
                        self.out_edges[source] = []
                        self.in_edges[source] = []
                        
                    # Add source as target input
                    target['sources'].append(src_id)
                        
                    # Add new source to target dependency list
                    if self.targets[src_id] not in self.out_edges[target]:
                        self.out_edges[target].append(self.targets[src_id])
                        
            # Process the rule specifier
            if target.has_key('tmp_rule'):
                rule_id = self.expand_rule_id(target['tmp_rule'], target)
                if self.rules.has_key(rule_id):
                    target['rule'] = self.rules[rule_id]
                
        # Generate dependent graph
        for targ in self.targets:
            target = self.targets[targ]
            for dep in self.out_edges[target]:
                if target not in self.in_edges[dep]:
                    self.in_edges[dep].append(target)

    def expand_rule_id(self, rule, target):
        
        other = rule
        ctx = ""
        desc_id = target['desc_obj'].id
            
        # Do we have a descriptor
        if other.find(":") != -1:
            (desc_id, rule) = other.split(":", 1)
        else:
            rule = other
        
        # Return constructed rule
        return "%s:%s" % (desc_id, rule)
    
    def expand_source_id(self, src, target):

        files = None
        ctx = None
        desc_id = None
        node = None
       
        # Do we have file list?
        if src.find("|") != -1:
            (src, files) = src.split("|", 1)
            
        # Do we have ctx
        if src.find(";") != -1:
            (src, ctx) = src.split(";", 1)
            
        # Do we have descriptor AND node
        if src.find(":") != -1:
            (desc_id, node) = src.split(":", 1)
        
        if files is None:
            if ctx is not None or desc_id is not None:
                raise Exception("A source must have a filename")
            else:
                # We're probably a source filename
                if target['node_obj'] is None:
                    node_name = ""
                else:
                    node_name = target['node_obj'].name
                    
                src = "%s:%s;%s|%s" % \
                    (target['desc_obj'].id, \
                     node_name, \
                     target['ctx'], \
                     src)
                return src
            
        if files.count("|") > 0:
            raise Exception("There should only be one source file")
        
        if len(src) > 0:
            # Are we a target?
            if src in self.desc_nodes[target['desc_obj'].id]:
                node = src
                desc_id = target['desc_obj'].id
                
            # Are we a descriptor?
            else:
                tmp_desc = self.expand_desc_id(src, target['desc_obj'].dir)
                # TODO: Need a descriptor_ids array
                if tmp_desc in self.desc_nodes.keys():
                    desc_id = tmp_desc
                    node = ""
                else:
                    raise Exception("Cannot identify descriptor or node in source id")
                
            if ctx is None:
                ctx = target['ctx']
                
            return "%s:%s;%s|%s" % (desc_id, node, ctx, files)
        
    def expand_dep_id(self, dep_str, target):

        dep = dep_str
        files = None
        ctx = None
        desc_id = None
        node = None
        found = False
        
        # Do we have file list?
        if dep.find("|") != -1:
            (dep, files) = dep.split("|", 1)
            
        # Do we have ctx
        if dep.find(";") != -1:
            (dep, ctx) = dep.split(";", 1)
        else:
            ctx = target['ctx']
            
        # Do we have descriptor and node?
        if dep.find(":") != -1:
            (desc_id, node) = dep.split(":", 1)
            desc_id = self.expand_desc_id(desc_id, target['desc_obj'].dir)
            if desc_id is None:
                raise Exception("Attempting to reference non existing descriptor")
            
        else:
            # Are we an inscope target?
            desc_id = target['desc_obj'].id
            have_desc = self.desc_node.has_key(target['desc_obj'].id)
            if have_desc and (dep in self.desc_node[desc_id]):
                node = dep
            else:
                # Are we a descriptor?
                # TODO: Support descriptor dependencies (may translate
                # into multiple descriptor default targets.)
                pdb.set_trace()
                raise Exception("ERROR: Descriptor dependencies not yet supported.")
            
        
            
        # Build and return the expanded dependency ID
        dep_id = "%s:%s;%s" % (desc_id, node, ctx)
        if files is not None:
            dep_id = "|".join([dep_id, files])
            
        return dep_id
               
    def expand_desc_id(self, desc_id, parent_dir = None):
        
        if desc_id[0:1] == "#":
            cache_dir = os.path.dirname(SQuadManager.CachePath)
            desc_id = os.path.join(cache_dir, desc_id[1:])
        elif desc_id[0:1] not in ["/","."]:
            desc_id = os.path.join(parent_dir, desc_id)
            pass
            
        # Before checking if dir, does it exists?
        if os.path.exists(desc_id):
            # Are we just a directory?
            if os.path.isdir(desc_id):
                extra = os.path.join(desc_id, SQuadManager.DescriptorFilename)
                # Do we contain a descriptor?
                if os.path.exists(extra):
                    return extra
            # We're not a directory, assumed full descriptor path
            return desc_id
        
        return None
                        
    def to_dot(self, out_edges = None):
        
        if out_edges is None:
            out_edges = self.out_edges
        
        dot = "digraph topdown {\n"
        for dep1 in out_edges:
            #if not groups and self.targets[prod]['group']:
            #    continue
            for dep2 in out_edges[dep1]:
                #if len(self.out_edges[dep2]) > 0:
                dot += "\t\"%s\" -> \"%s\";\n" % (dep1['id'], dep2['id'])
            #if self.targets[prod].has_key('sources'):
            #    for src in self.targets[prod]['sources']:
            #        dot += "\t\"%s\" %s \"%s\";\n" % (prod, link, src)
        dot += "}\n\n"
        
        return dot
    
    #def report_updates(self, updated):
    #    # TODO: This needs to use files!
    #    for obj in updated:
    #        for dependent in self.dependents[obj]:
    
    def prune_graph(self, targets, targetdb):
        zero_parents = []

        for item_id in targetdb.keys():
            print "%s has %d parents" % (item_id, len(targetdb[item_id]['parents']))
            if len(targetdb[item_id]['parents']) == 0 and item_id not in targets:
                zero_parents.append(targetdb[item_id])
                print "Removing %s" % item_id
                del targetdb[item_id]
        
        while len(zero_parents) > 0:
            obj = zero_parents[0]
            zero_parents.pop(0)
            
            for item in obj['deps']:
                try:
                    print "REMOVING %s from %s's parents" % (obj['id'], item['id'])
                    item['parents'].remove(obj)
                except:
                    pass#pdb.set_trace()
        
                if len(item['parents']) == 0 and item['id'] not in targets:
                    zero_parents.append(item)
                    del targetdb[item['id']]
                  
    def edge_array_copy(self, orig):
        edges = {}
        for k in orig:
            edges[k] = []
            for e in orig[k]:
                edges[k].append(e)
        return edges
                    
    def build_sort(self, deps = None, parents = None):
        build_order = []
        zero_parents = []
        
        if deps is None:
            deps = self.out_edges
        if parents is None:
            parents = self.in_edges
        
        in_edges = self.edge_array_copy(parents)
        # TODO: We don't really need to copy out_edges...
        out_edges = self.edge_array_copy(deps)
        
        for dent in in_edges:
            if len(in_edges[dent]) == 0:
                zero_parents.append(dent)
        for dent in zero_parents:
            del in_edges[dent]
        
        while len(zero_parents) > 0:
            dent = zero_parents[0]
            build_order.insert(0, dent)
            zero_parents.pop(0)
            try:
                for dep in out_edges[dent]:
                    try:
                        in_edges[dep].remove(dent)
                    except KeyError:
                        print "Cyclic dependency"
                        return []
    
                    if len(in_edges[dep]) == 0:
                        zero_parents.append(dep)
                        del in_edges[dep]
            except:
                pdb.set_trace()

        if len(in_edges) > 0:
            pdb.set_trace()
            raise Exception
        
        return build_order
    
    def build(self, order):
        for prod in order:
            if prod['product']:
                rule = prod['rule']['edge_obj'].func
                env = {'CC': "gcc"}
                sources = prod['sources'][0].split("|", 1)[1].split("|")
                products = prod['products']
                
                old_path = os.getcwd()
                os.chdir(prod['desc_obj'].dir)
                rule(products, sources, env)
                os.chdir(old_path)
         
    def get_delta(self, changed):
        out_edges = {}
        in_edges = {}
        
        # Convert file changes to graph nodes
        to_trace = []
        for change in changed:
            change = change.rstrip()
            pathkey = os.path.abspath(change)
            products = self.file_targets[pathkey]
            for product in products:
                print "CHANGED: %s" % product['id']
                if product not in to_trace:
                    # TODO: Add product's parents, not product
                    to_trace.append(product)
        
        # Run up the DependencyGraph until we reach the top, all the while
        # generating a subgraph to perform a toposort and build with.
        while len(to_trace) > 0:
            # Remove an item from to_trace
            item = to_trace[0]
            to_trace.pop(0)
            
            if item not in in_edges:
                in_edges[item] = []
            if item not in out_edges:
                out_edges[item] = []
            
            for parent in self.in_edges[item]:
                if parent not in out_edges:
                    out_edges[parent] = []
                    
                out_edges[parent].append(item)
                in_edges[item].append(parent)
                        
                # Add parent to to_trace
                if parent not in to_trace:
                    to_trace.append(parent)
        
        return (out_edges, in_edges)
         
class SQuadManager():
    
    DescriptorFilename = "descriptor"
    CacheFolder = ".squad"
    CachePath = ""
    
    def get_top_dir(self):
        exists = os.path.exists
        join = os.path.join
        basename = os.path.basename
        cache = SQuadManager.CacheFolder
        
        while (1):
            # Can we see the SQuadCacheFolder here?
            cache_path = join(os.getcwd(), cache)
            if exists(cache_path):
                return cache_path
            
            if basename(os.getcwd()) == "":
                pdb.set_trace()
                raise Exception("No Cache Folder Found")
            
            os.chdir("..")
        
    def __init__(self):
        
        pass
        
    def init_cache(self):
        exists = os.path.exists
        join = os.path.join
        basename = os.path.basename
        cache = SQuadManager.CacheFolder
        
        # Prevent from overwriting a cache
        SQuadManager.CachePath = join('.', cache)
        if not exists(SQuadManager.CachePath):
            print "Generating a new cache folder."
            os.mkdir(cache, 0755)
        
        # TODO: Add an interface for the user to add/remove descriptor 
        # roots to the cache.
        
        # DEVONLY: Look for the root descriptor here.
        desc_id = join('.', SQuadManager.DescriptorFilename)
        if not exists(desc_id):
            print "Could not find a root descriptor."
            return

        # Load all the descriptors
        self.follow_descriptors(desc_id)
        
        # Generate the graph
        self.graph = DependencyGraph(self, self.descriptors)
        
        changed = open(".squad/changed", "r").readlines()
        (out_edges, in_edges) = self.graph.get_delta(changed)
        
        order = self.graph.build_sort(out_edges, in_edges)
        self.graph.build(order)
          
        open("full.dot", "wb").write(self.graph.to_dot())      
        open("partial.dot", "wb").write(self.graph.to_dot(out_edges))
        os.system("dot -Tpng full.dot > full.png")
        os.system("dot -Tpng partial.dot > partial.png")
    
    def expand_dep_id(self, abbr, parent_dir = None):
        (desc_id, func) = abbr.split(":", 1)
        desc_id = self.expand_desc_id(desc_id, parent_dir)
        return "%s:%s" % (desc_id, func)
    
    def expand_desc_id(self, abbr, parent_dir = None):
        exists = os.path.exists
        isdir = os.path.isdir
        join = os.path.join
        dirname = os.path.dirname

        if abbr[0:1] == '#':
            # Relative to cache_path
            abbr = join(dirname(SQuadManager.CachePath), abbr[1:])
        elif parent_dir is not None:
            abbr = join(parent_dir, abbr)
        else:
            raise Exception("Could not expand descriptor id.")
            
        if exists(abbr):
            if isdir(abbr):
                return join(abbr, SQuadManager.DescriptorFilename)
            return abbr
        pdb.set_trace()
        raise Exception("Attempted to load non-existent descriptor.")
        
    def follow_descriptors(self, desc_id):
        self.descriptors = []

        print "NOTICE: Loading descriptor %s" % desc_id

        load_list = [desc_id]

        while len(load_list) > 0:
            # Create a new Descriptor object
            desc = Descriptor(load_list[0])
            load_list.pop(0)
            self.descriptors.append(desc)
            
            # Normalize the import list
            for imp in desc.imports:
                if type(imp) is str:
                    imp = self.expand_desc_id(imp, desc.dir)
                    
                    # Do not load any descriptor more than once.
                    # TODO: Should descriptors be signatured (or digested?)
                    #       The directory name should be the only difference.
                    if imp in self.descriptors:
                        print "NOTICE: Duplicate descriptor import ignored."
                        continue
                    if imp in load_list:
                        continue
                    
                    load_list.append(imp)
                    
                else:
                    pdb.set_trace()
                    raise Exception("Non string given as descriptor import")

    

def main():
    # Initialize SQuad Manager
    squad = SQuadManager()

    # DEVONLY
    squad.init_cache()
    #pdb.set_trace()

    #squad.graph.build_product(["./Component/descriptor:object"])

    #pdb.set_trace()

    # Load Descriptors
    #squad.load_graph()

if __name__ == "__main__":
    main()