import sys
import inspect
import os.path
from time import time
from threading import currentThread
from pickle import Unpickler, Pickler
from traceback import format_tb
from new import classobj

_DO_ASYNCTRACES = False

os_path_separator = os.path.normcase('/')

def create_wrapped_exception(exception, exc_info):
    # TODO: should keep track of all the Wrapped* that we make, and re-use them
    base_klass = exception.__class__
    def wrapped_init(self, exception, exc_info):
        self.__dict__ = exception.__dict__
        WrappedExceptionMixin.__init__(self, exc_info)
    wrapped_klass = classobj("Wrapped%s" % base_klass.__name__, (base_klass, WrappedExceptionMixin), {"__init__": wrapped_init})
    return wrapped_klass(exception, exc_info)


class WrappedExceptionMixin(object):
    
    def _get_backtrace(self):
        if self.__exc_info[2]:
            # TODO: use indenting
            return "\n".join( format_tb(self.__exc_info[2]) )
        else:
            return "<traceback omitted>"
    
    original_exception = property(lambda self: self.__exc_info[0])
    original_exc_info = property(lambda self: self.__exc_info)
    original_backtrace = property(_get_backtrace)
    
    def __init__(self, exc_info):
        # FIXME: python documentation warns of circular reference:
        #
        #     Warning: Assigning the traceback return value to a local variable in
        #     a function that is handling an exception will cause a circular reference.
        #     This will prevent anything referenced by a local variable in the same
        #     function or by the traceback from being garbage collected. Since most functions
        #     don't need access to the traceback, the best solution is to use something like
        #     exctype, value = sys.exc_info()[:2] to extract only the exception type and value.
        #     If you do need the traceback, make sure to delete it after use (best done with a
        #     try ... finally statement) or to call exc_info() in a function that does not itself
        #     handle an exception. Note: Beginning with Python 2.2, such cycles are automatically
        #     reclaimed when garbage collection is enabled and they become unreachable, but it
        #     remains more efficient to avoid creating cycles.
        self.__exc_info = exc_info

def ASYNCTRACE(msg):
    if _DO_ASYNCTRACES:
        global _SCRIPT_START_TIME
        try:
            if _SCRIPT_START_TIME: pass
        except:
            _SCRIPT_START_TIME = time()
        thr = currentThread()
        print "[%s] %s: %s" % (int((time() - _SCRIPT_START_TIME)*100), thr.getName(), msg)
        sys.stdout.flush()


class PickledExecution(object):
    """Wraps execution of a Python generator function with
    extra support code that pickles all yielded values
    into stdout, and pickles all raised exceptions to stderr"""
    
    def _get_final_code(self):
        # glue the code together with support code
        final_code = '''
import sys
from StringIO import StringIO

original_stdout = sys.stdout
original_stderr = sys.stderr
sys.stdout = StringIO()
sys.stderr = StringIO()

from pickle import Pickler, Unpickler

stdout_pickler = Pickler(original_stdout)
stderr_pickler = Pickler(original_stderr)
stdin_unpickler = Unpickler(sys.stdin)

%(generator_code)s

if __name__ == '__main__':
    
    try:
        if %(generator_name)s.func_code.co_argcount == 0:
            the_generator = %(generator_name)s()
        elif %(generator_name)s.func_code.co_argcount == 1:
            the_generator = %(generator_name)s(stdin_unpickler.load)
        else:
            raise TypeError("function does not have 0 or 1 arguments")

        for item in the_generator:
            stdout_pickler.dump(item)
        
    except Exception, e:
        exc_info = sys.exc_info()
        exc_type = exc_info[0]
        exc_value = exc_info[1]
        exception_metadata = (e, (exc_type, exc_value, None))
        stderr_pickler.dump( exception_metadata )
''' % dict(generator_name = self.__generator_name, generator_code = self.__generator_code)
    
        # run a syntax check on the final code
        compile(final_code, "<string>", "exec")
    
        # the code is clean, return it
        return final_code
    
    code = property(_get_final_code)
    """the actual code that can be executed"""
    
    def __init__(self, generator):
        """given a Python generator function, creates a chunk of Python
        code (as a string), which uses stdout to communicate pickled versions
        of each yielded value, and stderr to communicate a pickled version
        of any exception that gets raised.  This code can be accessed
        via the `code` property of the PickledExecution object"""
    
        # get the raw python code
        raw_code = inspect.getsource(generator)
        # docs = inspect.getdoc(generator) or ""
        # comments = inspect.getcomments(generator) or ""
        # just_code = raw_code.replace(docs, "").replace(comments, "")
    
        # remove bad indentation in the raw code
        raw_code_lines = raw_code.split("\n")
        raw_code_indent_prefix = ""
        for character in raw_code_lines[0]:
            if character.isspace():
                raw_code_indent_prefix += character
            else:
                break
        unindented_lines = []
        for line in raw_code_lines:
            unindented_line = line.replace(raw_code_indent_prefix, "", 1)
            unindented_lines.append( unindented_line )
        unindented_raw_code = "\n".join(unindented_lines)
    
        # run a syntax check on the code
        compile(unindented_raw_code, "<string>", "exec")
        
        # store the information we need about this code
        self.__generator_code = unindented_raw_code
        self.__generator_name = generator.__name__
    
    def write_object_to_stdin(self, obj, stdin):
        """given an object and a stdin file, write a pickled object to the stream"""
        stdin_pickler = Pickler(stdin)
        stdin_pickler.dump(obj)
    
    def get_response_iterator(self, stdout, stderr):
        """given stdout/stderr associated with pickled execution, iterates
        over all unpickled objects from stdout, and raise any exceptions
        from unpickling stderr"""
        
        # create some unpicklers to read the remote pickle output
        stdout_unpickler = Unpickler(stdout)
        stderr_unpickler = Unpickler(stderr)
    
        # for each item we got, yield it
        try:
            while True:
                item = stdout_unpickler.load()
                yield item
        except EOFError:
            pass
        except Exception, e:
            raise RuntimeError( "stdout: %s, stderr: %s" % (stdout.read(), stderr.read()) )
    
        # if we had an exception, yield that
        exc = None
        exc_info = None
        try:
            exc, exc_info = stderr_unpickler.load()
        except EOFError:
            exc = None
        except Exception, e:
            raise RuntimeError( "stdout: %s, stderr: %s" % (stdout.read(), stderr.read()) )
    
        # re-raise serialized exceptions
        if exc:
            raise create_wrapped_exception( exc, exc_info )


class Singleton(object):

    @classmethod
    def get_instance(klass):
        try:
            instance = klass.instance
        except:
            klass.instance = klass()            
        return klass.instance

    def __init__(self):
        try:
            instance = self.__class__.instance
        except:
            self.__class__.instance = self
        if self.__class__.instance != self:
            raise RuntimeError("tried to instantiate multiple instances of singleton class '%s'" % self.__class__.__name__)

def absolute_directory_of_caller():
    
    frames = inspect.getouterframes(inspect.currentframe())
    return os.path.abspath(os.path.dirname(frames[2][1]))

def absolute_directory_of_object(the_object):
    
    sourcefile = inspect.getsourcefile(the_object)
    abspath = os.path.abspath(sourcefile)
    return os.path.dirname(abspath)

def relative_path_between(from_path, to_path):
    """
    Return a relative path to the target from either the current dir or an optional base dir.
    Base can be a directory specified either as absolute or relative to current dir.
    
    author: richard barran
    source: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/302594
    """
    
    target = to_path
    base = from_path

    if not os.path.exists(target):
        raise OSError, 'Target does not exist: '+target

    if not os.path.isdir(base):
        raise OSError, 'Base is not a directory or does not exist: '+base

    base_list = (os.path.abspath(base)).split(os.sep)
    target_list = (os.path.abspath(target)).split(os.sep)

    # On the windows platform the target may be on a completely different drive from the base.
    if os.name in ['nt','dos','os2'] and base_list[0] <> target_list[0]:
        raise OSError, 'Target is on a different drive to base. Target: '+target_list[0].upper()+', base: '+base_list[0].upper()

    # Starting from the filepath root, work out how much of the filepath is
    # shared by base and target.
    for i in range(min(len(base_list), len(target_list))):
        if base_list[i] <> target_list[i]: break
    else:
        # If we broke out of the loop, i is pointing to the first differing path elements.
        # If we didn't break out of the loop, i is pointing to identical path elements.
        # Increment i so that in all cases it points to the first differing path elements.
        i+=1

    rel_list = [os.pardir] * (len(base_list)-i) + target_list[i:]
    if len(rel_list) > 0:
        return os.path.join(*rel_list)
    else:
        return ""
