import sys
import os
import types
import traceback
import itertools
import functools
import platform
import inspect
import shutil
import stat
import threading
import logging

import profilehooks

import made.runtime
import made.util.misc
from made.util import pidfile
from made.util import filemonitor
from made.util import ipc
from made.util import command
from made.util import trackimports
from made.util import daemon
from made.util import enum
from made.util import misc

# Immediate:
# - Move the directory creation from madeconfig to compilersettings and
#   change sesi_sample to use the same approach.
# - We need to relink binaries if their static input libraries change.  We
#   don't need any extra dependencies for dynamic static libs.  Verify that
#   this is the case.

# Features required for SESI:
# - Fix this Windows bug: cd sample && made -f clean && made -f && rm -rf out
#   build_temp && made-f .  It only runs the commands to create directories.
#   Run made again and it builds the rest.  (Verify that this bug still
#   exists.)
# - Make a create_empty_file task and put .o files in the directories of
#   $SHL.  (After running "mir" in $H_UT, running "made" still builds things.)
# - Check that there are no duplicates in CPPFILES, CFILES, and EXPORTS.
# - Add makedepend support.
# - Use cl.exe to output include files.
# - Build more of $SHL.
# - Add support for big libraries.
# - Add the equivalent of phony targets to handle when headers are removed.
#   Perhaps add the concept of a file that isn't an input and isn't required
#   to exist but that, when changed or deleted, makes a task runnable.
# - Add support for debug builds.

# Bugs that cannot always be reproduced:
# - After running mir, running made sometimes recompiles everything!
# - made -j 8 sometimes only run one compile at a time.

# Task ordering:
# - It is very important that we preserve task ordering -- tasks defined in
#   a madefile must stay in the same order when a task's state changes, and
#   reloading a single madefile must insert the tasks in the proper place in
#   the ordering.
#   - Use Madefile.all_subtasks.  Possibly change it to be a generator?
# - Finding the next task to run needs to be efficient.  Consider using
#   heapq to find the (index of the?) next task to run.

# Code cleanup:
# - Rename sample to sample3 and create a simple, single-directory sample1.
# - Create a sample2 that shows how to customize some flags, etc.
# - Somehow clean up the libdeps InputLibrary class.
# - Using a proper unit testing framework (e.g. nose).
# - Rename (task) groups to "meta tasks".
# - Rename all occurrences of library to lib and libraries to libs.
#   Also rename all occurrences of directory to dir and directories to dirs.
# - Use properties for Task.state, etc.
# - Change file formats from dos to unix.
# - Create a reporting hook in client.py so you can have different clients,
#   instead of just printing the output to stdout/stderr.  Pass in the
#   type (command starting, command output, error message, command finished),
#   message string (e.g. command line, command output, error message),
#   optional description (e.g. command short description), and thread/process
#   index.

# Bugs:
# - To interrupt a -j 8 client on Windows you seem to have to press Ctrl+C
#   multiple times.
# - Enable checking that tasks actually created and deleted the files they
#   said they would.
# - When a build fails, display which directory the failure occured in.
# - Cleanly handle the case where the client is interrupted (currently we
#   write a stack trace to the madeserver log file).  On Windows we can't
#   interrupt a made client that is compiling.
# - Add a "dirs_that_must_be_empty" parameter, to allow a clean task that can
#   delete a directory.  Also add a "files_that_cannot_exist" to allow ordering
#   of clean tasks yet still force a directory to be deleted.
# - Running "made -v x.png" in $UT takes at least 12 hours to run (I'm not
#   sure if it ever finishes).  It seems to be stuck in graphviz's layout
#   algorithm.
# - Madefiles don't seem to reload properly when they're modified.
# - Tasks with no input files but where the output file (directory) doesn't
#   exist don't seem to run.

# Task features:
# - Allow environment variables as inputs (similar to files) so that the
#   task becomes out of date if the environment variable changes.
# - Create one clean task for a library, binary, etc. command and group the
#   files together (rm -f).  (made clean is too slow.)  (Same for copying
#   headers and compiling files.)
#     - Probably, let tests say which combinable they're in, and, when looking
#       at the runnable tasks, look for others with the same combinable and
#       use the combinable to choose when ones to actually run.
#     - Somehow, allow the command to be a callable that takes the input
#       files.  Be very careful to use the proper compiler framework state,
#       environment variable state, etc.  The commands need to somehow take
#       a snapshot of this state.
#           command_str = ...%s...
#           return lambda input_files: command_str % " ".join(input_files)
# - Somehow add support for debug builds.
#     - Probably, allow task commands to be dictionaries mapping configuration
#       names to actual command objects
# - We don't want the file notifier to prematurely trigger tasks when files are
#   partially written.  We want to wait for the task generating those files to
#   be done.
# - Add support for tasks that always run?  (Or tasks that always run when a
#   file is created?)

# Other features:
# - Add a -s client option to print out a short description of the task instead
#   of the full command.  Perhaps print the full command if it fails, though.
# - Somehow add a way to ignore the copy_headers tasks when viewing the graph
#   of dependencies.
# - Add back support for the estimated time left.  This may involve building a
#   larger set of potentially runnable tasks that accounts for tasks that
#   become runnable once the current runnables complete.  This is tricky when
#   there are tasks that create other tasks as part of their execution.

# Logging/debugging:
# - Add timestamps to the log file.
# - Add a way to see all the files being monitored.
# - Add a way to see which files a madefile depends on.
# - A way to see which tasks belong to which madefiles.
# - A way to see when a monitored file changes and tasks become runnable.

# Client
# - made [group] [-c command_type]
# - "made" -- optimized build
# - "made -c debug" -- debug build ("debug" is a configuration name)
# - "made clean" -- perform clean ("clean" is a task group name)
# - "made install" -- some custom build task set
# - implications
#     - we need to look at the set of final tasks and work back from there
#       to find all the runnable tasks
#     - so, we may not be able to create tasks from post callbacks (that's ok)
#     - so, commands make need to be created from callbacks (could be used
#       to implement debug builds, too)
#         - not as generic as dynamically forming tasks, but more feasible
#         - may make it easier to estimate time till completion

# Oddities that are probably ok:
# - When running with -j 8, headers get copied before compiling some files
#   because the first job generates UT_Version.h and the compile jobs are
#   blocked until the first job finishes.

_do_profiling = False

# TODO: We need the global lock because pyinotify runs in a separate thread.
#       However, we don't need it to synchronize accesses to the server.
#       Think about the proper way to do the locking.
_global_lock = threading.RLock()
_globally_locked = misc.globally_locked(_global_lock)

def start_server_as_daemon():
    logging.basicConfig(
        filename=os.environ["HOME"] + "/.madeserver/madeserver.log",
        format="[%(process)d] %(filename)s(%(lineno)d) %(levelname)s:"
            " %(message)s",
        level=logging.DEBUG)

    # On Windows, it's important that we don't print anything to the console
    # since the process will hang if we've been detached from the parent
    # process.  So, we redirect sys.stdout and sys.stderr to the log file.
    daemon.redirect_output_to_logger()

    start_server()

def start_server():
    if platform.system() != "Windows":
        if os.geteuid() == 0:
            sys.exit("[made] The made server cannot run as the root user")

    pidfile.write_pid_file()
    logging.info(
        "----- made server version %s starting ----" % made.version_string)

    filemonitor.start()
    os.chdir("/")
    listening_connection = ipc.start_daemon(_process_request)

    # Note that if we are told to shut down, it's possible that other threads
    # are still running (and thus a client process is still running commands).
    # In that case we'll still exit, since we marked our threads as daemon
    # threads.  This means that the client can try to notify the server of
    # completed commands when the server is no longer running or it has no
    # record of the client and command.

    sys.exit(0)

@misc.decorate_if(
    _do_profiling,
    profilehooks.profile(
        immediate=False,
        skip=1,
        entries=100))
@_globally_locked
def _process_request(request):
    commands = {
        "load_madefile": (
            lambda: _load_madefile(
                request["dir"], request["environ"])),
        "quit": lambda: ipc.shut_down_daemon() or {},
        "dump_task_graph": (
            lambda: _dump_task_graph(
                request["dir"], request["group_name"],
                request["image_file_path"], request["environ"])),
        "get_command": (
            lambda: _return_command_to_run(
                request["dir"], request["group_name"], request["pid"],
                request["environ"])),
        "command_results": (
            lambda: _record_command_results(
                request["return_code"], request["return_value"],
                request["pid"])),
    }

    callback = commands.get(request["type"])
    return (callback() if callback is not None
        else {"error": "[made] Invalid request type: %s" % request["type"]})

def _load_madefile(dir, client_environ):
    madefile, error_response = _load_madefile_and_get_errors(
        dir, client_environ)

    return (error_response if error_response is not None else {})

def _load_madefile_and_get_errors(dir, client_environ):
    """Return a (madefile, error_response) tuple.

    If no errors occurred madefile is not None and error_response is None.
    Otherwise, madefile is None and error_response is a dictionary to send
    back to the client.
    """
    _record_client_environ(client_environ)

    madefile = get_madefile(dir)
    if madefile is None:
        return None, {"error": "[made] No madefile.py found in %s" % dir}

    if len(_madefiles_with_errors) != 0:
        return None, _madefile_error_result()

    return madefile, None

def _return_command_to_run(dir, group_name, pid, client_environ):
    madefile, error_response = _load_madefile_and_get_errors(
        dir, client_environ)
    if error_response is not None:
        return error_response

    assert(madefile is not None)
    task = _get_task_to_run(madefile, group_name, pid)
    if task is None:
        # There's nothing to run.

        # TODO: Record the set of all group names as tasks are added and ensure
        #       the group name given here is valid.

        # See if there are files that should exist but don't that are
        # preventing tasks from running.
        missing_input_files = _get_missing_input_files(madefile)
        if len(missing_input_files) != 0:
            error = "[made] " + "".join(
                    "The following input file is missing:\n%s\n"
                    "needed by: %s\ncreated at:\n %s)\n\n" % (
                        file_path, task.description,
                        task.creation_stack_trace_str)
                for file_path, task in missing_input_files)
            return {"error": error}

        # See if there are other clients running tasks under this madefile.
        # In that case, we want the client to keep checking for tasks that
        # become runnable, instead of exiting right away.
        running_task = _find_first_task_for_madefile(
            madefile, Task.states.running, group_name)
        return ({} if running_task is None
            else {"active_client_pid": 
                _invert_dict(_pid_to_running_task)[running_task]})

    return {
        "commands": [command.to_dict() for command in task.commands],
        "dir": task.madefile.dir}

def _invert_dict(dictionary):
    return dict((value, key) for key, value in dictionary.iteritems())

def _madefile_error_result():
    assert(len(_madefiles_with_errors) > 0)
    error = "".join(
        "[made] Error in %s:\n%s\n" % (
            madefile.file_path(), madefile.error_message)
        for madefile in _madefiles_with_errors)
    return {"error": error}

def _record_command_results(return_code, return_value, pid):
    # If we were restarted after the child task received its command,
    # there will be no record of this child running.
    task = _pid_to_running_task.get(pid)
    if task is None:
        return {"error": "[made] Unknown client (pid %d) reported"
            " command completion (perhaps the\nserver was restarted?)" % pid}

    # Note that if this task only deletes files and does not write any, it's
    # possible for the file monitor to have notified us that the files were
    # deleted and for the task to be marked as completed instead of still
    # running.
    del _pid_to_running_task[pid]

    # Marking this task as completed will force the file monitor to check for
    # changes in the files it was supposed to output, causing other tasks to
    # potentially be marked as runnable.
    task.set_state(Task.states.completed)

    # If the task had an error, don't bother to output errors about the files
    # it was supposed to create/delete.
    # TODO: What should we do with return_value when return_code is non-zero?
    if return_code != 0:
        task.determine_state()
        return {}

    error_message = _get_task_completion_errors(task)
    if error_message != "":
        return {"error": error_message}

    task.return_value = return_value
    return {}

def _get_task_completion_errors(task):
    # See if this task actually created/deleted the files it was supposed to.
    if (not task.output_files.contains_missing_files() and
            not task.files_to_delete.any_files_exist()):
        return ""

    # The task didn't do what it was supposed to do.  Recompute its state so
    # it's not marked as completed.
    task.determine_state()

    missing_output_files = task.output_files.missing_file_paths
    undeleted_files = task.files_to_delete.existing_file_paths

    message = "[made] "
    if len(missing_output_files) != 0:
        message += ("The following output files were not created: %s" %
            " ".join(missing_output_files))
    if len(undeleted_files) != 0:
        message += ("The following files were not deleted: %s" %
            " ".join(undeleted_files))
    return message

def _get_task_to_run(madefile, group_name, pid):
    task = _find_first_task_for_madefile(
        madefile, Task.states.runnable, group_name)
    if task is None:
        return None

    task.set_state(Task.states.running)
    _pid_to_running_task[pid] = task
    return task

def _find_first_task_for_madefile(madefile, state, group_name):
    found_task_with_group_name = False
    # TODO: Keep track of just the runnable tasks, but do so in a way that
    #       preserves the order of the tasks.
    for task in _all_tasks:
        if (task.state == state and
                task.group_name == group_name and
                task.is_below_madefile(madefile)):
            return task

    return None

def _get_missing_input_files(madefile):
    """Return the (file, task) pairs that should exist but are preventing tasks
    from running.
    """
    tasks_to_look_at = list(madefile.all_subtasks())
    known_tasks = set(tasks_to_look_at)
    missing_input_files_and_tasks = []

    while len(tasks_to_look_at) > 0:
        task = tasks_to_look_at.pop(0)

        for missing_input_file in task.all_missing_input_files():
            # See if a task generates this file as an output.
            generating_task = _output_file_to_task.get(missing_input_file)
            if generating_task is not None:
                # Look at the input files required by the task generating
                # this file.
                if generating_task not in known_tasks:
                    tasks_to_look_at.append(generating_task)
                    known_tasks.add(generating_task)
            else:
                # There is no task generating this file.
                if missing_input_file not in missing_input_files_and_tasks:
                    missing_input_files_and_tasks.append(
                        (missing_input_file, task))

    return missing_input_files_and_tasks

# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

_client_environ = None

def _record_client_environ(client_environ):
    global _client_environ
    old_client_environ = _client_environ
    _client_environ = _EnvironDictRecordingAccesses(client_environ)

    if old_client_environ is None:
        return

    # See if any madefiles accessed environment variables that changed, and
    # reload those madefiles.
    madefiles_to_reload = set()
    for var_name, madefiles in old_client_environ.var_name_to_madefiles.items():
        if (old_client_environ.get(var_name) != _client_environ.get(var_name)):
            madefiles_to_reload.update(madefiles)

    _reload_madefiles(madefiles_to_reload)

class _EnvironDictRecordingAccesses(dict):
    """This dict subclass records which variables are read from it."""
    def __init__(self,  *args, **kwargs):
        dict.__init__(self, *args, **kwargs)
        self.var_name_to_madefiles = {}
        self.record_accesses = False

    def __getitem__(self, name):
        # Record the access before looking up the value, in case the lookup
        # raises an exception.
        self._record_variable_access(name)
        return dict.__getitem__(self, name)

    def __contains__(self, name):
        self._record_variable_access(name)
        return dict.__contains__(self, name)

    def get(self, name, value=None):
        self._record_variable_access(name)
        return dict.get(self, name, value)

    def _record_variable_access(self, name):
        if self.record_accesses:
            self.var_name_to_madefiles.setdefault(name, set()).add(
                _loading_madefiles[-1])

def _reload_madefiles(madefiles):
    madefiles = list(madefiles)
    madefiles.sort(lambda m0, m1: cmp(m0.dir, m1.dir))

    # Don't bother to reload a madefile if we're also going to reload its
    # parent.
    previous_madefile = None
    for madefile in madefiles:
        if (previous_madefile is not None and
                madefile.dir.startswith(previous_madefile.dir)):
            continue

        madefile.load()
        previous_madefile = madefile

# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

def _dump_task_graph(dir, group_name, image_file_path, client_environ):
    from made.util import viewdependencies

    _record_client_environ(client_environ)

    # Force madefiles to load and tasks to be created.
    get_madefile(dir)

    if len(_madefiles_with_errors) != 0:
        return _madefile_error_result()

    viewdependencies.generate_image(
        image_file_path, _madefiles.values(), group_name)
    return {}

# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

class _FileSet(object):
    def __init__(self, on_add=None, on_remove=None):
        self.existing_file_paths = []
        self.existing_file_mod_times = []
        self.existing_dir_paths = []
        self.missing_file_paths = set()
        self.on_add = on_add
        self.on_remove = on_remove

    def __repr__(self):
        return "<FileSet existing_files=%s, existing_dirs=%s, missing=%s>" % (
            self.existing_file_paths, self.existing_dir_paths,
            self.missing_file_paths)

    # TODO: This should probably be __iter__.
    @property
    def contents(self):
        return (self.existing_file_paths +
            self.existing_dir_paths +
            list(self.missing_file_paths))

#    @property
#    def contents_as_set(self):
#        return (set(self.existing_file_paths) +
#            set(self.existing_dir_paths) +
#            self.missing_file_paths)

    # TODO: This should probably be __len__.
    def is_empty(self):
        return (len(self.existing_file_paths) == 0 and
            len(self.existing_dir_paths) == 0 and
            len(self.missing_file_paths) == 0)

    # TODO: This should probably be __contains__.
    def contains(self, file_path):
        return (file_path in self.existing_file_paths or
            file_path in self.missing_file_paths or
            file_path in self.existing_dir_paths)

    def contains_missing_files(self):
        return len(self.missing_file_paths) != 0

    def any_files_exist(self):
        return (len(self.existing_file_paths) > 0 or
            len(self.existing_dir_paths) > 0)

    def add(self, file_path, update_min_max_mod_times=True):
        assert(not self.contains(file_path))

        mod_time, is_dir = _file_cache.mod_time_and_is_dir(file_path)
        if mod_time is None:
            self.missing_file_paths.add(file_path)
        elif is_dir:
            self.existing_dir_paths.append(file_path)
        else:
            self.existing_file_paths.append(file_path)
            self.existing_file_mod_times.append(mod_time)

            # Note that the min and max mod times are of the existing files,
            # and may not be None even if the set contains missing files.
            if update_min_max_mod_times:
                self._update_min_mod_time(mod_time)
                self._update_max_mod_time(mod_time)

        if self.on_add is not None:
            self.on_add(file_path)

    def remove(self, file_path):
        assert(self.contains(file_path))

        if file_path in self.missing_file_paths:
            self.missing_file_paths.remove(file_path)
        else:
            try:
                self.existing_dir_paths.remove(file_path)
            except ValueError:
                file_index = self.existing_file_paths.index(file_path)
                del self.existing_file_paths[file_index]
                del self.existing_file_mod_times[file_index]

        if self.on_remove is not None:
            self.on_remove(file_path)

    def set_to(self, file_paths):
        # The user may have duplicate items in the input sequence, so remove
        # any duplicates.
        file_paths = misc.unique(file_paths)

        if self.is_empty():
            for file_path in file_paths:
                self.add(file_path, update_min_max_mod_times=False)

            self._compute_min_mod_time()
            self._compute_max_mod_time()
        else:
            added_file_paths, removed_file_paths = (
                self._diff_file_sets(file_paths, self.contents))

            for file_path in removed_file_paths:
                self.remove(file_path)

            for file_path in added_file_paths:
                self.add(file_path)

    def _diff_file_sets(self, new_file_paths, old_file_paths):
        """Return (added_file_paths, removed_file_paths)."""
        new_set = set(new_file_paths)
        old_set = set(old_file_paths)
        return new_set - old_set, old_set - new_set

    def update_mod_time(self, file_path, mod_time_and_is_dir=None):
        """Respond to a change in a file or directory's modification time."""
        mod_time, is_dir = (
            mod_time_and_is_dir if mod_time_and_is_dir is not None
            else _file_cache.mod_time_and_is_dir(file_path))

        if mod_time is None:
            # The file/directory doesn't exist.  If it was already missing
            # then do nothing.
            if file_path in self.missing_file_paths:
                return

            self.missing_file_paths.add(file_path)

            # See if it's a directory that's been deleted.
            try:
                self.existing_dir_paths.remove(file_path)
            except ValueError:
                # A file was deleted.
                file_index = self.existing_file_paths.index(file_path)
                old_mod_time = self.existing_file_mod_times[file_index]

                del self.existing_file_paths[file_index]
                del self.existing_file_mod_times[file_index]

                assert(self.min_mod_time is not None and
                    self.max_mod_time is not None)

                if self.min_mod_time == old_mod_time:
                    self._compute_min_mod_time()

                if self.max_mod_time == old_mod_time:
                    self._compute_max_mod_time()
            return

        # The file/directory exists.  See if it was created.
        if file_path in self.missing_file_paths:
            self.missing_file_paths.remove(file_path)

            if is_dir:
                self.existing_dir_paths.append(file_path)
            else:
                self.existing_file_paths.append(file_path)
                self.existing_file_mod_times.append(mod_time)
                self._update_min_mod_time(mod_time)
                self._update_max_mod_time(mod_time)
            return

        # The file existed before and still exists.  If it's a directory,
        # though, we don't care if its contents changed.
        assert(file_path in
            (self.existing_dir_paths if is_dir else self.existing_file_paths))
        if not is_dir:
            file_index = self.existing_file_paths.index(file_path)
            old_mod_time = self.existing_file_mod_times[file_index]
            self.existing_file_mod_times[file_index] = mod_time

            if self.min_mod_time == old_mod_time and mod_time > old_mod_time:
                self._compute_min_mod_time()
            else:
                self._update_min_mod_time(mod_time)

            if self.max_mod_time == old_mod_time and mod_time < old_mod_time:
                self._compute_max_mod_time()
            else:
                self._update_max_mod_time(mod_time)

    def _compute_min_mod_time(self):
        # Note that the min and max mod times are of the existing files, and
        # may not be None even if the set contains missing files.
        self.min_mod_time = (min(self.existing_file_mod_times)
            if len(self.existing_file_paths) != 0 else None)

    def _compute_max_mod_time(self):
        # Note that the min and max mod times are of the existing files, and
        # may not be None even if the set contains missing files.
        self.max_mod_time = (max(self.existing_file_mod_times)
            if len(self.existing_file_paths) != 0 else None)

    def _update_min_mod_time(self, mod_time):
        if self.min_mod_time is None or self.min_mod_time > mod_time:
            self.min_mod_time = mod_time

    def _update_max_mod_time(self, mod_time):
        if self.max_mod_time is None or self.max_mod_time < mod_time:
            self.max_mod_time = mod_time

# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

_input_task_to_tasks = {}
_input_file_to_tasks = {}
_file_that_must_exist_to_tasks = {}
_file_to_delete_to_task = {}
_output_file_to_task = {}

_all_tasks = []
_pid_to_running_task = {}

class Task(object):
    states = enum.Enumeration(
        "completed", "running", "runnable", "missing_input_files",
        "waiting_input_tasks")

    class _UserData(object):
        """Users of task objects can store extra information with a task by
        writing to instances of this class.
        """
        # TODO: This method shouldn't exist.  Factor out the _FileCache class
        #       into a different module and let it register the necessary hooks
        #       to respond to file changes.  Then code can just import
        #       filecache to get modification times.
        def file_cache(self):
            return _file_cache

    def __init__(self, madefile, group_name="default", commands=(),
            description=None, input_files=(), files_that_must_exist=(),
            output_files=(), files_to_delete=(), input_tasks=(),
            post_callback=None):
        # Limit the stack trace to just the entries on the top, since this call
        # is somewhat costly.
        # TODO: Not storing any stack trace information is faster.  Perhaps
        #       this should be a configuration option somehow?
        self.creation_stack_trace_list = traceback.extract_stack(limit=5)

        # Note that that if a file in input_files is newer than any of the ones
        # in output_files, the task will run.  If a file in
        # files_that_must_exist is modified, though, it won't trigger the task
        # to run.  If a file in either of those sequences is missing, though,
        # it will prevent the task from running.
        self.madefile = madefile
        self.group_name = group_name
        self._description = description
        self.commands = commands[:]
        self.post_callback = post_callback
        self.return_value = None
        self.input_files = self._create_input_files()
        self.files_that_must_exist = self._create_files_that_must_exist()
        self.output_files = self._create_output_files()
        self.files_to_delete = self._create_files_to_delete()
        self.input_tasks = ()
        self.user_data = Task._UserData()
        self.state = None

        self.set_input_files(
            input_files, files_that_must_exist, recompute_state=False)
        self._set_input_tasks(input_tasks)
        self._set_output_files(output_files)
        self._set_files_to_delete(files_to_delete)

        self.madefile.add_task(self)

        self.determine_state()
        _all_tasks.append(self)

    def _create_input_files(self):
        return _FileSet(
            on_add=(
                lambda file_path: _input_file_to_tasks.setdefault(
                    file_path, []).append(self)),
            on_remove=(
                lambda file_path: _input_file_to_tasks[
                    file_path].remove(self)))

    def _create_files_that_must_exist(self):
        return _FileSet(
            on_add=(
                lambda file_path: _file_that_must_exist_to_tasks.setdefault(
                    file_path, []).append(self)),
            on_remove=(
                lambda file_path: _file_that_must_exist_to_tasks[
                    file_path].remove(self)))

    def _create_output_files(self):
        def on_add_output_file(file_path):
            other_task = _output_file_to_task.get(file_path)
            if other_task not in (None, self):
                self.madefile.add_error(
                    "Multiple madefiles generate %s:\n"
                    "%s\nvia %s\ncreated at %s\n\n"
                    "and\n%s\nvia %s\ncreated at %s" % (
                    file_path,
                    other_task.madefile.file_path(),
                    other_task.description,
                    other_task.creation_stack_trace_str,
                    self.madefile.file_path(),
                    self.description,
                    self.creation_stack_trace_str))
            _output_file_to_task[file_path] = self

        def on_remove_output_file(file_path):
            # If the user made a configuration error and had two tasks
            # generating the same file, this task may not be in the output file
            # to task mapping.
            if file_path in _output_file_to_task:
                del _output_file_to_task[file_path]

        return _FileSet(
            on_add=on_add_output_file, on_remove=on_remove_output_file)

    def _create_files_to_delete(self):
        def on_add_file_to_delete(file_path):
            other_task = _file_to_delete_to_task.get(file_path)
            if other_task not in (None, self):
                if other_task.madefile is self.madefile:
                    error = ("Multiple tasks delete %s in the madefile %s:\n"
                        "%s\nCreated at:\n%s\n\nand\n%s\ncreated at:\n%s" % (
                            file_path,
                            self.madefile.dir,
                            other_task,
                            other_task.creation_stack_trace_str,
                            self,
                            self.creation_stack_trace_str))
                else:
                    error = "Multiple madefiles delete %s (%s and %s)" % (
                        file_path,
                        other_task.madefile.dir,
                        self.madefile.dir)

                self.madefile.add_error(error)

            _file_to_delete_to_task[file_path] = self

        def on_remove_file_to_delete(file_path):
            if file_path in _file_to_delete_to_task:
                del _file_to_delete_to_task[file_path]

        return _FileSet(
            on_add=on_add_file_to_delete, on_remove=on_remove_file_to_delete)

    @property
    def creation_stack_trace_str(self):
        return "".join(traceback.format_list(self.creation_stack_trace_list))

#    @property
#    def all_input_files(self):
#        return self.input_files.contents + self.files_that_must_exist.contents

#    @property
#    def files_output_or_deleted(self):
#        return self.output_files.contents + self.files_to_delete.contents

    def set_input_files(
            self, input_files, files_that_must_exist=(), recompute_state=True):
        # Give more helpful diagnostics if the user passes in the wrong values.
        if isinstance(input_files, str):
            raise TypeError(
                "input_files must be a sequence of strings instead of a string")

# TODO: We hopefully don't need this check any more, since set_to will compute
#       the set difference.
#        # The post callback will get called every time this task gets marked
#        # as completed, so avoid doing any extra work if nothing changed.
#        if (self.input_files.contents_as_set == set(full_path_input_files) and
#                self.files_that_must_exist == files_that_must_exist):
#            return

        self.input_files.set_to(
            self.madefile.full_paths_in_sequence(input_files))
        self.files_that_must_exist.set_to(
            self.madefile.full_paths_in_sequence(files_that_must_exist))

        if recompute_state:
            self.determine_state()

    def _set_input_tasks(self, input_tasks):
        for input_task in self.input_tasks:
            _input_task_to_tasks[input_task].remove(self)

        self.input_tasks = tuple(input_tasks)
        self._add_to_list_in_dict(_input_task_to_tasks, self.input_tasks, self)

    def _set_output_files(self, output_files):
        self.output_files.set_to(
            self.madefile.full_paths_in_sequence(output_files))

    def _set_files_to_delete(self, files_to_delete):
        self.files_to_delete.set_to(
            self.madefile.full_paths_in_sequence(files_to_delete))

    def _add_to_list_in_dict(self, dictionary, keys, value):
        for key in keys:
            dictionary.setdefault(key, []).append(value)

    @property
    def description(self):
        if self._description is not None:
            return self._description
        return " && ".join(command.description for command in self.commands)

    def __repr__(self):
        return "<Task state=%s [%s] -> [%s] via '%s'>" % (
            self.state,
            ", ".join(self.input_files.contents),
            ", ".join(self.output_files.contents),
            self.description)

    def remove(self):
        _all_tasks.remove(self)

        # TODO: If this task is running and it's removed because it changed
        #       a madefile that we reloaded, what should we do?  Note that
        #       if the made client is killed we may think the task is still
        #       running even if it's not.  We should check to see if the pid
        #       is still running.
        assert(self not in _pid_to_running_task.values())

        # Remove this task from the reverse mapping dictionaries.
        self.set_input_files((), recompute_state=False)
        self._set_input_tasks(())
        self._set_output_files(())
        self._set_files_to_delete(())

    # TODO: Generalize and factor these methods somehow.
    def determine_state_on_input_file_change(
            self, file_path, mod_time, is_dir):
        self.input_files.update_mod_time(file_path, (mod_time, is_dir))
        self.determine_state()

    def determine_state_on_file_that_must_exist_change(
            self, file_path, mod_time, is_dir):
        self.files_that_must_exist.update_mod_time(
            file_path, (mod_time, is_dir))
        self.determine_state()

    def determine_state_on_deletable_file_change(
            self, file_path, mod_time, is_dir):
        self.files_to_delete.update_mod_time(file_path, (mod_time, is_dir))
        self.determine_state()

    def determine_state_on_output_file_change(
            self, file_path, mod_time, is_dir):
        self.output_files.update_mod_time(file_path, (mod_time, is_dir))
        self.determine_state()

    def determine_state(self):
        # Check if any input files/directories don't exist or if a task is
        # currently running that is writing to them.
        if (self.input_files.contains_missing_files() or
                self.files_that_must_exist.contains_missing_files()):
            self.set_state(self.states.missing_input_files)
            return

        # See if we have any input tasks that haven't completed.
        for input_task in self.input_tasks:
            if input_task.state != self.states.completed:
                self.set_state(self.states.waiting_input_tasks)
                return

        # See if we have any missing output files, or if any output files are
        # older than the newest input file.  We don't need to worry about
        # input directories that were modified after our outputs were generated,
        # though.
        if self._has_out_of_date_output():
            self.set_state(self.states.runnable)
            return

        # See if any of the files that we will delete exist.
        if self.files_to_delete.any_files_exist():
            self.set_state(self.states.runnable)
            return

        self.set_state(self.states.completed)

        # Note that running the post callback may have added input files
        # that are more recent than our outputs, making us out of date.
        # However, we don't need to compute our state again because it will
        # have done so in calling set_input_files.

    def _has_out_of_date_output(self):
        if self.output_files.contains_missing_files():
            return True

        if self.output_files.is_empty() or self.input_files.is_empty():
            return False
        assert(self.output_files.min_mod_time is not None)

        # It is only valid to call this method after you have checked that
        # no input files are missing.
        assert(not self.input_files.contains_missing_files())
        if len(self.input_files.existing_file_paths) == 0:
            return False

        assert(self.input_files.max_mod_time is not None)
        return self.input_files.max_mod_time >= self.output_files.min_mod_time

    def set_state(self, state):
        if self.state == state:
            return

        old_state = self.state
        self.state = state

        if self.state == self.states.running:
            _file_cache.mark_files_as_being_written(
                self.output_files.contents)
        elif old_state == self.states.running:
            _file_cache.unmark_files_as_being_written(
                self.output_files.contents)

        # Note that we don't always transition to completed from running.  For
        # example, we can go from runnable to completed.
        if (old_state == self.states.running or
                self.state == self.states.completed):
            # Update the file cache for files we deleted.
            for file_path in self.files_to_delete.contents:
                _file_cache.on_file_created_or_changed_or_deleted(file_path)

            # Update our record of our output files and files to delete.
            for file_path in self.output_files.contents:
                self.output_files.update_mod_time(file_path)
            for file_path in self.files_to_delete.contents:
                self.files_to_delete.update_mod_time(file_path)

        dependent_tasks = _input_task_to_tasks.get(self, ())
        if self.state == self.states.completed:
            # TODO: Catch post_callback exceptions so they can't cause the
            #       server to exit.  What should we do with the tracebacks,
            #       though?  They shouldn't be added to the madefile errors.
            #       Ideally, we could send them back to the client.  For now,
            #       we can at least log them.
            self._run_post_callback()
        else:
            for task in dependent_tasks:
                if task.state in (self.states.completed, self.states.runnable):
                    task.set_state(self.states.waiting_input_tasks)

            # Notify our dependent tasks that we're done so they can mark
            # themselves as runnable.  Note that we do not notify tasks waiting
            # on our output files -- the file cache is responsible for
            # notifying them.
            for task in dependent_tasks:
                task._on_input_task_completed(self)

    def _run_post_callback(self):
        if self.post_callback is None:
            return

        old_directory = os.getcwd()
        try:
            os.chdir(self.madefile.dir)
            self.post_callback(self)
        finally:
            os.chdir(old_directory)

    def _on_input_task_completed(self, task):
        if self.state != self.states.waiting_input_tasks:
            return

        for input_task in self.input_tasks:
            if input_task.state != self.states.completed:
                return
        self.set_state(self.states.runnable)

    def is_below_madefile(self, madefile):
        current_madefile = self.madefile
        while current_madefile is not None:
            if current_madefile == madefile:
                return True
            current_madefile = current_madefile.parent()

        return False

    def all_missing_input_files(self):
        if self.state in (self.states.completed, self.states.running,
                self.states.runnable):
            return []

        missing_input_files = list(self.input_files.missing_file_paths.union(
            self.files_that_must_exist.missing_file_paths))
        for task in self.input_tasks:
            missing_input_files += task.all_missing_input_files()

        return missing_input_files

# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

_madefiles = {}

def get_madefile(dir):
    if dir in _madefiles:
        return _madefiles[dir]

    # Make sure a madefile actually exists in this directory.  Using the file
    # cache will also ensure we get notifications about when the madefile
    # changes.
    if _file_cache.mod_time(dir + "/madefile.py") is None:
        return None

    # This madefile hasn't been loaded.  See if there is a madefile in the
    # parent directory, and be sure to load it first.
    parent_dir = os.path.dirname(dir)
    if (parent_dir != dir and parent_dir not in _madefiles and
            os.path.exists(parent_dir + "/madefile.py")):
        get_madefile(parent_dir)

    # Loading a parent madefile may have caused this one one load.
    if dir in _madefiles:
        return _madefiles[dir]

    # It's possible for a madefile to call subdirs(), call get_madefile on a
    # sub madefile, and have it look in _madefiles for the top madefile.  So,
    # we're careful to put the madefile in the dictionary before loading it.
    madefile = Madefile(dir)
    _madefiles[dir] = madefile
    madefile.load()

    return madefile

_madefiles_with_errors = []
_loading_madefiles = []
_monitored_files_reloading_madefiles = {}

class Madefile(object):
    class UserConfig(object):
        pass

    def __init__(self, dir):
        assert(dir not in _madefiles)
        self.dir = dir
        self.tasks = []
        self.sub_madefiles = []
        self.monitored_imported_modules = {}
        self.monitored_files_requiring_reload = []

        self.has_error = False
        self.error_message = ""

        # Inherit the user config from our parent madefile if we have one.
        self.user_config = self.UserConfig()
        parent_madefile = self.parent()
        if parent_madefile is not None:
            self.user_config.__dict__.update(
                parent_madefile.user_config.__dict__)

    def __repr__(self):
        return "<Madefile object from %s>" % self.dir

    def parent(self):
        parent_dir = os.path.dirname(self.dir)
        if parent_dir == self.dir:
            return None

        # We'll have already loaded our parent if we have one, so we can use
        # _madefiles instead of calling get_madefile.
        return _madefiles.get(parent_dir)

    def toplevel_madefile(self):
        parent = self.parent()
        return (self if parent is None else parent.toplevel_madefile())

    def file_path(self):
        return self.dir + "/madefile.py"

    def full_path_to_file(self, file_name):
        if os.path.isabs(file_name):
            return file_name

        joined_path = (
            self.dir + ("/" if self.dir != "/" else "") + file_name)

        # Avoid calling os.path.normpath if it's not required, since it's
        # a somewhat expensive call.
        if "./" in file_name:
            joined_path = os.path.normpath(joined_path)

        return joined_path

    # TODO: Rename this to full_paths_to_files.
    def full_paths_in_sequence(self, file_names):
        return tuple(self.full_path_to_file(file_name)
            for file_name in file_names)

    def add_error(self, message):
        self.has_error = True
        self.error_message += "[made] " + message + "\n"
        if self not in _madefiles_with_errors:
            _madefiles_with_errors.append(self)

    def load(self):
        _loading_madefiles.append(self)

        if self.has_error:
            self.has_error = False
            _madefiles_with_errors.remove(self)

        for task in self.tasks:
            task.remove()
        self.tasks = []
        old_sub_madefiles = self.sub_madefiles[:]
        self.sub_madefiles = []

        try:
            self._exec_madefile()
        except:
            self.add_error(traceback.format_exc())

        # Check for sub madefiles that were removed.
        # TODO: Won't the sub madefile objects be different?  Shouldn't we
        #       be removing them before loading this madefile?
        # TODO: Call remove() to do the above, as well as clear the set of
        #       tracked imports.
        for sub_madefile in old_sub_madefiles:
            if sub_madefile not in self.sub_madefiles:
                sub_madefile.remove()

        _loading_madefiles.pop()

    def _exec_madefile(self):
        old_directory = os.getcwd()
        os.chdir(self.dir)

        if "." not in sys.path:
            sys.path.append(".")

        # We need to reload all modules that import the changed module,
        # since they'll otherwise hold references to the old module object.

        self._initialize_runtime()
        _client_environ.record_accesses = True
        try:
            module_names = trackimports.run_script_and_get_imported_modules(
                self.file_path(), dict(
                    __builtins__=__builtins__,
                    __name__="__main__",
                    __file__=self.file_path(),
                    __doc__=None,
                    __package__=None))
        finally:
            os.chdir(old_directory)
            _client_environ.record_accesses = False

        self._record_module_dependencies(module_names)

    def _record_module_dependencies(self, module_names):
        # Only monitor files that in the project directory.
        toplevel_dir = self.toplevel_madefile().dir
        self.monitored_imported_modules = {}
        for module_name in module_names:
            module = sys.modules[module_name]
            if not hasattr(module, "__file__"):
                continue

            module_path = self._module_path(module)
            if not module_path.startswith(toplevel_dir + "/"):
                continue

            # Add this module to the list of modules we depend on and ensure
            # the file is being monitored.
            self.monitored_imported_modules[module_name] = module
            _file_cache.mod_time(module_path)

            self.require_reload_on_file_change(module_path)

    def require_reload_on_file_change(self, file_path):
        """Mark this madefile as reading the given file in order to create
        tasks, so the madefile can reload if the file changes.
        """
        self.monitored_files_requiring_reload.append(file_path)

        # Add this madefile to the list of madefiles to reload if the given
        # file changes.
        madefiles = _monitored_files_reloading_madefiles.setdefault(
            file_path, [])
        madefiles.append(self)

    def _module_path(self, module):
        assert(hasattr(module, "__file__"))
        module_path = module.__file__.replace("\\", "/")
        if not os.path.isabs(module_path):
            module_path = os.path.normpath("%s/%s" % (self.dir, module_path))

        # We want to monitor the .py files for changes, not .pyc's, since they
        # won't change if the user edits the module.
        if module_path.endswith(".pyc"):
            module_path = module_path[:-1]
        return module_path

    def _reload_imported_modules(self):
        # If any of the imported modules changes, we need to reload them all.
        # Otherwise, since we don't know which module imports which, reloading
        # just the changed module won't work, since the modules that import it
        # will still reference the old module.

        # We first need to clear all of the modules to reload out of
        # sys.modules before reloading them.  Otherwise, suppose module B was
        # the one that changed, module A imports module B, and we reload A
        # before reloading B.  In this case, A will still refer to the old B.
        module_names = self.monitored_imported_modules.keys()
        for module_name in module_names:
            del sys.modules[module_name]

        # We can't use reload when the the modules are not in sys.modules
        # any longer, so we just reimport them.  Note that if importing A
        # imports B, adding B to sys.modules, importing B later is harmless.
        # Note that __import__ doesn't return the module that was imported
        # when importing modules with dots, so we look it up in sys.modules to
        # update our dictionary of monitored modules.
        for module_name in module_names:
            __import__(module_name)
            self.monitored_imported_modules[module_name] = sys.modules[
                module_name]

    def _initialize_runtime(self):
        def current_madefile():
            # Note that a madefile can call a function declared in a different
            # madefile via user_config.  So, we need to be careful to use
            # _loading_madefiles[-1] instead of self, or we risk adding tasks
            # to the madefile containing the function that builds the task,
            # instead of the madefile actually calling the function.
            return _loading_madefiles[-1]

        def toplevel_madefile():
            return current_madefile().toplevel_madefile()

        def subdirs(*subdirs):
            for subdir_name in subdirs:
                subdir_path = current_madefile().dir + "/" + subdir_name
                sub_madefile = get_madefile(subdir_path)
                if sub_madefile is None:
                    current_madefile().add_error(
                        "%s does not contain a madefile" % subdir_path)
                current_madefile().sub_madefiles.append(sub_madefile)

        def add_user_config(config, name=None):
            if name is None:
                name = config.__name__
            current_madefile().user_config.__dict__[name] = config
            # TODO: If a parent madefile is modified and calls add_user_config,
            #       we need to discard and reload all the madefiles underneath
            #       it, since tasks they created using the old user config
            #       may not match the new versions of those function.  If a
            #       child madefile that calls add_user_config changes, though,
            #       we don't need to reload the parent madefiles.
            # TODO: Ideally, we only want to reload madefiles that actually
            #       accessed this user_config item.

        def create_task(**kwargs):
            # If the caller wants to delete files, they need to call
            # create_remove_file_task instead of passing in the files_to_delete
            # parameter.
            if "files_to_delete" in kwargs:
                raise TypeError("create_task() got an unexpected keyword"
                    " argument 'files_to_delete'")
            return Task(current_madefile(), **kwargs)

        def create_remove_file_task(
                group_name="clean", files_to_delete=(), description=None):
            # If some of the files we're to delete don't exist then it's
            # equivalent to "rm -f" instead of just "rm".
            return Task(
                current_madefile(),
                group_name=group_name,
                commands=[command.FunctionCall(
                    "rm " + ("-f " if len(files_to_delete) > 1 else "") +
                        " ".join(files_to_delete),
                    _delete_files_or_directories, files_to_delete)],
                files_to_delete=files_to_delete,
                description=description)

        def find_task_for_output_file(output_file):
            return _output_file_to_task.get(output_file)

        made.runtime.__dict__.update(dict(
            current_madefile=current_madefile,
            toplevel_madefile=toplevel_madefile,
            create_task=create_task,
            create_remove_file_task=create_remove_file_task,
            find_task_for_output_file=find_task_for_output_file,
            user_config=current_madefile().user_config,
            add_user_config=add_user_config,
            subdirs=subdirs,
            client_environ=_client_environ,
        ))

    def add_task(self, task):
        self.tasks.append(task)

    def remove(self):
        for file_path in self.monitored_files_requiring_reload:
            _monitored_files_reloading_madefiles[file_path].remove(self)

        self.monitored_files_requiring_reload = []
        self.monitored_imported_modules = []

        for task in self.tasks:
            task.remove()
        for sub_madefile in self.sub_madefiles:
            sub_madefile.remove()

    def all_subtasks(self):
        result = self.tasks[:]
        for sub_madefile in self.sub_madefiles:
            result.extend(sub_madefile.all_subtasks())
        return result

def _delete_files_or_directories(file_paths):
    # Because this function is marshalled to and run inside a different process,
    # we need to import the modules used by the function from inside the
    # function.
    import os
    import stat
    import shutil

    # If we're to delete multiple files, it's possible that some of them may
    # not exist.
    for file_path in file_paths:
        # Check if the path exists.
        try:
            stat_value = os.stat(file_path)
        except OSError:
            continue

        # See if it's a directory.
        if stat.S_ISDIR(stat_value[stat.ST_MODE]):
            shutil.rmtree(file_path)
        else:
            os.unlink(file_path)

# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

# TODO: Move this class into FileCache?
class _FileInfo(object):
    """Instaces of this class are stored in the file cache."""
    def __init__(self, mod_time, is_dir):
        self.mod_time = mod_time
        self.is_dir = is_dir

class FileCache(object):
    def __init__(self):
        self._file_infos = {}
        self.files_being_written = set()

    @classmethod
    def _stat_mod_time_and_is_dir(cls, file_name):
        try:
            stat_value = os.stat(file_name)
        except OSError:
            return None, False

        return stat_value[stat.ST_MTIME], stat.S_ISDIR(stat_value[stat.ST_MODE])

    def mod_time(self, file_path):
        # TODO: See if this is faster:
#        file_info = self._file_infos.get(file_path)
#        if file_info is None:
#            file_info = self._cache_file_info(file_path)
#        return file_info.mod_time

        if file_path not in self._file_infos:
            self._cache_file_info(file_path)
        return self._file_infos[file_path].mod_time

    def is_dir(self, file_path):
        # TODO: See if this is faster:
#        file_info = self._file_infos.get(file_path)
#        if file_info is None:
#            file_info = self._cache_file_info(file_path)
#        return file_info.is_dir

        if file_path not in self._file_infos:
            self._cache_file_info(file_path)
        return self._file_infos[file_path].is_dir

    def mod_time_and_is_dir(self, file_path):
        file_info = self._file_infos.get(file_path)
        if file_info is None:
            file_info = self._cache_file_info(file_path)
        return file_info.mod_time, file_info.is_dir

    def _cache_file_info(self, file_path):
        assert(file_path not in self._file_infos)

        # We should never be stat'ing files being written.
        assert(file_path not in self.files_being_written)

        file_info = _FileInfo(*self._stat_mod_time_and_is_dir(file_path))
        self._file_infos[file_path] = file_info

        filemonitor.add_file(
            file_path, file_info.mod_time,
            self.on_filemonitor_notification)

        return file_info

    def mod_times_for_files(self, file_paths):
        return map(self.mod_time, file_paths)

    def filter_existing_files(self, file_paths):
        return [file_path for file_path in file_paths
            if self.mod_time(file_path) is not None]

    def filter_missing_files(self, file_paths):
        return [file_path for file_path in file_paths
            if self.mod_time(file_path) is None]

    def mark_files_as_being_written(self, file_paths):
        self.files_being_written.update(file_paths)

        # Any tasks that need these files as input can no longer be marked
        # as runnable, regardless of whether or not the file exists.
        affected_tasks = set()
        for file_path in file_paths:
            # Mark the file as missing.
            self._file_infos[file_path].mod_time = None

            for task in _input_file_to_tasks.get(file_path, ()):
                affected_tasks.add(task)

            for task in _file_that_must_exist_to_tasks.get(file_path, ()):
                affected_tasks.add(task)

        for task in affected_tasks:
            task.set_state(Task.states.missing_input_files)

    def unmark_files_as_being_written(self, file_paths):
        assert(self.files_being_written.issuperset(file_paths))

        # Any tasks that were set to having missing input files when we
        # marked files as being written may now be runnable.  Process
        # changes for those files to make the appropriate notifications.
        # If the files did not change (i.e. a task did not modify an output
        # file that already existed) then be sure to explicitly mark tasks
        # that need those files as inputs as runnable.
        self.files_being_written.difference_update(file_paths)

        # Update the mod times for the files and let tasks know that they've
        # been modified.
        for file_path in file_paths:
            mod_time, is_dir = self._stat_mod_time_and_is_dir(file_path)

            if not self.on_file_created_or_changed_or_deleted(file_path):
                for task in _input_file_to_tasks.get(file_path, ()):
                    task.determine_state_on_input_file_change(
                        file_path, mod_time, is_dir)

                for task in _file_that_must_exist_to_tasks.get(file_path, ()):
                    task.determine_state_on_file_that_must_exist_change(
                        file_path, mod_time, is_dir)

    @_globally_locked
    def on_filemonitor_notification(self, file_path):
        # This callback is invoked from a separate thread by the filemonitor
        # module, so we are careful to wait until the main thread is not
        # processing any requests from the client.
        return self.on_file_created_or_changed_or_deleted(file_path)

    def on_file_created_or_changed_or_deleted(self, file_path):
        """Check if a file/directory changed from its last recorded state and
        perform any appropriate notifications.

        It is ok to call this method when the file didn't actually change,
        or when there is no last recorded state.

        Return whether or not the file/directory actually changed from it's
        last recorded state.
        """
        # If a task is writing out this file then ignore any changes to it.
        # We'll process those changes when the task is done writing the files.
        if file_path in self.files_being_written:
            return False

        file_info = self._file_infos.get(file_path)
        if file_info is None:
            file_info = self._cache_file_info(file_path)
            mod_time = file_info.mod_time
            is_dir = file_info.is_dir
        else:
            # Check if we've already handled this notification by checking if
            # our cached mod_time matches the new one.  Note that mod_time will
            # be None if the file was deleted.
            mod_time, is_dir = self._stat_mod_time_and_is_dir(file_path)
            if file_info.mod_time == mod_time and file_info.is_dir == is_dir:
                return False

            file_info.mod_time = mod_time
            file_info.is_dir = is_dir

        # If a madefile changed, reload it.
        # TODO: Handle madefiles being deleted.  If a madefile above it
        #       still references it via subdirs it will automatically generate
        #       an an error.  
        dir_path = os.path.dirname(file_path)
        if (mod_time is not None
                and os.path.basename(file_path) == "madefile.py"
                and dir_path in _madefiles):
            _madefiles[dir_path].load()

        # Check if a monitored file (e.g. a module) requires one or more
        # madefiles to be reloaded.  If such a file is deleted we'll ignore
        # the deletion, since it might be recreated again.
        if (mod_time is not None and file_path in
                _monitored_files_reloading_madefiles):
            self._reload_madefiles_affected_by_file_change(file_path)

        self._notify_tasks_affected_by_file_change(file_path, mod_time, is_dir)
        return True

    def _reload_madefiles_affected_by_file_change(self, file_path):
        # Sort the madefiles so that the toplevel ones are first.
        _reload_madefiles(_monitored_files_reloading_madefiles[file_path])

    def _notify_tasks_affected_by_file_change(
            self, file_path, mod_time, is_dir):
        for task in _input_file_to_tasks.get(file_path, ()):
            task.determine_state_on_input_file_change(
                file_path, mod_time, is_dir)

        for task in _file_that_must_exist_to_tasks.get(file_path, ()):
            task.determine_state_on_file_that_must_exist_change(
                file_path, mod_time, is_dir)

        if file_path in _file_to_delete_to_task:
            _file_to_delete_to_task[
                file_path].determine_state_on_deletable_file_change(
                    file_path, mod_time, is_dir)

        # TODO: Ensure we handle the case where output files are backdated (to
        #       a time earlier than the latest input file time).
        if mod_time is None and file_path in _output_file_to_task:
            _output_file_to_task[
                    file_path].determine_state_on_output_file_change(
                file_path, mod_time, is_dir)

_file_cache = FileCache()

#-----------------------------------------------------------------------------

if __name__ == "__main__":
    start_server_as_daemon()

