from Queue import Queue
from collections import deque
from itertools import chain, ifilterfalse, izip
from itertools import tee, groupby, ifilter, islice, imap
from threading import Lock, Thread


""" Provides commonly needed functionality for dealing with
iterables.  Extends itertools. """


def listiter(l, start, end=None, by=None):
    args = [start]
    if end is not None:
        args.append(end)
        if by is not None:
            args.append(by)

    for i in xrange(*args):
        yield l[i]


def rotate(lst, no):
    no = -no
    return lst[no:] + lst[:no]


def closing(matcher, s, opensym, closesym, re_flags=0):
    if len(opensym) != len(closesym):
        raise ValueError('symbols need common length')

    import re
    match = re.search(matcher, s, re_flags)
    if match is None:
        return

    prefix = s[match.start():match.end()]
    suffix = s[match.end():]

    ind = None

    count = 0
    for i, c in enumerate(''.join(x) for x in sliding_window(suffix, len(opensym))):
        if c == opensym:
            count += 1
        elif c == closesym:
            if count == 0:
                ind = i
                break
            count -= 1
    if ind is not None:
        return prefix + suffix[:ind + len(opensym)]


class pipe(object):
    def __init__(self, getter):
        self._getter = getter

    def __iter__(self):
        return self

    def next(self):
        return self._getter()


def no_list_if_one(values):
    if len(values) == 1:
        return values[0]
    return values


def position_index(values):
    return dict((x[1], x[0]) for x in enumerate(values))


def arange(value):
    if isinstance(value, int):
        return xrange(value)
    return xrange(len(value))


def dictify(f):
    def inner(*args, **kwargs):
        return dict(list(f(*args, **kwargs)))
    return inner


def uniqueify(f):
    def inner(*args, **kwargs):
        return list(unique(f(*args, **kwargs)))
    return inner


def setify(f):
    """ Decorator to convert generator into set. """
    return collect(set, set.add)(f)


def listify(f):
    """ Decorator to convert generator into list. """
    return collect(list, list.append)(f)


def collect(collector_type, adder):
    def decorate(f):
        def inner(*args, **kwargs):
            l = collector_type()
            for i in f(*args, **kwargs):
                adder(l, i)
            return l
        return inner

    return decorate


def ordinalize(values):
    if len(values) == 0:
        return []

    x = [[] for i in xrange(len(values))]

    v = [(i,) + tuple(v) for i, v in enumerate(values)]
    for i in xrange(1, len(values[0]) + 1):
        for j, item in enumerate(sort_on_index(v, i)):
            x[item[0]].append(j)

    return x


def sort_on_index(values, index):
    v = [(x[index], x) for x in values]
    v.sort()
    return [x[1] for x in v]


def column_unzip(iterable, *indexes):
    return [column(iterable, index) for index in indexes]


def column(iterable, *indexes):
    return list(icolumn(iterable, *indexes))


def icolumn(iterable, *indexes):
    multiple = len(indexes) > 1
    if not multiple:
        index = indexes[0]

    for x in iterable:
        if multiple:
            yield tuple(x[index] for index in indexes)
        else:
            yield x[index]


class pushback_itr(object):
    def __init__(self, itr):
        self.itr = iter(itr)
        self.added = deque()

    def push_back(self, value):
        self.added.append(value)

    def __iter__(self):
        while True:
            try:
                yield self.itr.next()
            except StopIteration:
                break

        while len(self.added) > 0:
            yield self.added.popleft()

def overlap(itra, itrb):
    for item in itra:
        if item in itrb:
            return True
    return False


def chunked(itr, by):
    """ Acquire items from iterator by chunks.  Adapted from
    http://stackoverflow.com/questions/1330475/how-do-i-efficiently-do-a-bulk-insert-or-update-with-sqlalchemy
    """

    itr = iter(itr)
    while True:
        yield chain([itr.next()], islice(itr, by - 1))

class iterbase(object):
    def __add__(self, other):
        return chain(iter(self), other)


class iterobj(iterbase):
    def __init__(self, itr):
        self._itr = iter(itr)

    def __add__(self, other):
        return iterobj(super(iterobj, self).__add__(other))

    def __iter__(self):
        return self

    def next(self):
        return self._itr.next()

    def __getitem__(self, slc):
        if isinstance(slc, slice):
            end = slc.stop
            return iterobj(islice(self, slc.start, end, slc.step))
        else:
            try:
                # XXX for generators this is destructive i.e., crunches lines
                # and causes later indexes to be off
                return list(islice(self, slc, slc + 1))[0]
            except IndexError:
                raise IndexError('index out of range')


class reiterable(iterobj):
    def __init__(self, itr):
        super(reiterable, self).__init__(itr)
        self._kept = deque()
        self.exhausteditr = False

    def __iter__(self):
        if self.exhausteditr:
            return iter(self._kept)

        return self

    def next(self):
        try:
            val = super(reiterable, self).next()
            self._kept.append(val)
            return val
        except StopIteration as e:
            self.exhausteditr = True
            raise e


class _maxmin(object):
    @classmethod
    def by_fun(cls, itr, op):
        kept_item = None
        prev = None
        pivot = None

        for item in itr:
            value = op(item)
            new_value = cls._comparator(pivot, value)
            if new_value != pivot:
                pivot = new_value
                kept_item = item

        return kept_item

    @classmethod
    def by_attribute(cls, itr, attr):
        return cls.by_fun(itr, lambda item: item.__dict__[attr])

    @classmethod
    def by_index(cls, itr, index):
        return cls.by_fun(itr, lambda item: item[index])


class min2(_maxmin):
    @classmethod
    def _comparator(cls, a, b):
        # min returns 'None' on min comparisons
        x = min(a, b)
        if x is None:
            if a is None:
                return b
            else:
                return a

class max2(_maxmin):
    @classmethod
    def _comparator(cls, a, b):
        return max(a, b)


def sliding_window(itr, n, accum=False):
    from collections2.lazy_itr import lazy_itr

    l = list(lazy_itr(itr))
    if not (1 <= n <= len(l)):
        raise ValueError('0 < window size needs to be <= len(itr)')

    if accum:
        start = 1
    else:
        start = n

    for i in xrange(start, n + 1):
        for j in xrange(len(l) - (i - 1)):
            yield l[j:j + i]


def pull_random(i_as_list):
    """ Pull random elements from iterable.

    :param i_as_list: iterable to pull elements from.
    :returns: generator object that produces random items from the iterable.

    """
    import random

    while i_as_list:
        yield i_as_list.pop(random.randint(0, len(i_as_list) - 1))


def filtersplit(func, iterable):
    """ Behaves like ifilter, except it returns two generators: one for the
    values that passed the filter, the other for values that did not pass the
    filter.

    ``filtersplit(lambda x: x < 0, [-2, -1, 0, 1, 2]) --> [-2, -1], [0, 1, 2]``

    :param func: the filter function
    :param iterable: the iterable.

    """
    deques = [deque(), deque()]
    itr = iter(iterable)

    def _get(index):
        while True:
            if not deques[index]:
                val = next(itr)
                if (not bool(index)) == bool(func(val)):
                    yield val
                else:
                    deques[1 - index].append(val)
            else:
                yield deques[index].popleft()

    return (_get(0), _get(1))


def iterby(iterable, step):
    """ Iterate over an iterable in steps.

    Step size is determined by step.

    ``iterby([1, 2, 3, 4, 5, 6], 2) --> [1, 2], [3, 4], [5, 6]``

    :param iterable: the iterable
    :param step: the step size

    """
    i = [-1]

    def key_func(ignore):
        i[0] += 1
        return i[0] / step

    return imap(lambda x: list(x[1]), groupby(iterable, key_func))


def iteriter(iterable, deep=False):
    """ Iterate over an iterable containing nested iterables.

    ``iteriter([[1, 2], [3, 4, 5], [6]]) --> [1, 2, 3, 4, 5, 6]``

     :param iterable: the iterable
     :param deep: if true, will iterate to arbitrary depth of iteration, like a
     tree iterator.  Otherwise, defaults to one level of nesting.

     """
    for item in iterable:
        if deep:
            if hasattr(item, '__iter__'):
                for i in iteriter(item, deep):
                    yield i
            else:
                yield item
        else:
            for i in item:
                yield i


def slicepoints(value, slicer):
    if slicer > value:
        raise ValueError("Trying to cut list more than its length")
    return [int(round(float(inc * value) / slicer)) for inc in xrange(slicer)]


def pfilterfalse(func, iterable, thread_count=None):
    """ Behaves like itertools.ifilterfalse but executes in parallel."""
    return ifilterfalse(func, iterable, thread_count)


def pfilter(func, iterable, thread_count=None):
    """ Behaves like itertools.ifilter but executes in parallel. """
    iterables = tee(iterable)
    for x, y in izip(pmap(func, iterables[0], thread_count), iterables[1]):
        if x:
            yield y


def procmap(func, iterable):
    """ Runs pmap using the number of cores on the machine.

    Warning: python multithreading is subject to GIL and this will not speed up
    CPU-bound processes. """

    import multiprocessing
    return pmap(func, iterable, thread_count=multiprocessing.cpu_count())


class _Puller(object):
    def __init__(self, iterable, q):
        self.lock = Lock()
        self.active = False
        self.iterable = iterable
        self.q = q
        self._exception = None
        self.active = True

    def __call__(self):
        try:
            for i in self.iterable:
                self.q.put(i)
        except Exception as e:
            self._exception = e

        with self.lock:
            self.active = False

        self.q.join()

    def __iter__(self):
        return self

    def next(self):
        if self._exception:
            raise self._exception

        with self.lock:
            if not self.active:
                if self._exception:
                    raise self._exception
                if self.q.empty():
                    raise StopIteration()

        v = self.q.get(True)
        self.q.task_done()
        return v


def pchain(*iterables):
    """ Behaves like itertools.chain except iterables are drawn from in
    separate threads.  XXX Warning: buggy do not use yet """

    pulls = [_Puller(i, Queue()) for i in iterables]
    ts = []
    for p in pulls:
        t = Thread(target=p)
        t.start()
        ts.append(t)

    for p in pulls:
        for i in p:
            yield i

    for t in ts:
        t.join()


def threadmap(func, iterable):
    """ A list of the values of threadimap. """
    return list(threadimap(func, iterable))


def threadimap(func, iterable):
    """ Behaves like imap, except each value in iterable has the func mapped on
    its own thread. """

    return pmap(func, iterable)


def pop(iterable):
    return next(iter(iterable))
#return list(islice(iterable, 1))[0]


def first(func, iterable):
    """ Returns the first item of an iterable that satisfies func.

    ``first(lambda x: x > 0, [-1, -2, 3]) --> 3``
    """
    try:
        return pop(ifilter(func, iterable))
    except:
        raise ValueError('No item in iterable satisfying %s' % func)


def unique(iterable, key=None):
    """ Draw unique elements from an iterable (from
    http://www.python.org/doc/3.0/library/itertools.html)

      ``unique_everseen('AAAABBBCCDAABBB') --> A B C D``

      ``unique_everseen('ABBCcAD', str.lower) --> A B C D``

    """
    seen = set()
    seen_add = seen.add
    if key is None:
        for element in iterable:
            if element not in seen:
                seen_add(element)
                yield element
    else:
        for element in iterable:
            k = key(element)
            if k not in seen:
                seen_add(k)
                yield element
