from queue import Queue
import threading
class Prefetcher(object):
    """
    This helper class enables sample enqueuing and batch dequeuing, to speed up
    batch fetching. It abstracts away the enqueuing and dequeuing logicH
    """
    def __init__(self, get_sample, dataset_size,batch_size,final_batch=True,num_threads=1,prefetch_size=200):
        """
        Args:
        :param get_sample: a function that takes a pointer (index) and returns a sample
        :param dataset_size:  total number of samples in the dataset
        :param final_batch:  True or False, whether to keep or drop the final incomplete batch
        :param num_threads:  num of parallel threads, >=1
        :param prefetch_size:  the maxmum size of the queue. Set to some positive integer to save memory, otherwise set to 0
        """
        self.full_datast_size = dataset_size
        self.final_batch = final_batch
        final_sz = self.full_datast_size % batch_size
        if not final_batch:
            dataset_size = self.full_datast_size - final_sz
        self.dataset_size = dataset_size
        self.batch_size = batch_size
        self.enqueuer = Enqueuer(get_element=get_sample,num_elements=dataset_size,
                                 num_threads=num_threads,queue_size=prefetch_size)
        # The pointer indicating whether an epoch has been fetched from a queue
        self.ptr=0
        self.ep_done = True

    def next_batch(self):
        '''
        Return a batch of samples, meanwhile indicate whether the epoch is done. The purpose of this func is mainly
        to abstract away the loop and the boundary-checking logic.
        :return:
        samples: a list of samples
        done: bool, whether the epoch is done
        '''
        # Start enqueuing and other preparation at the beginning of an epoch.
        if self.ep_done:
            self.start_ep_prefetching()
        #Whether an epoch is done.
        self.ep_done = False
        samples = []
        for _ in range(self.batch_size):
            if self.ptr >= self.dataset_size:
                self.ep_done = True
                break
            else:
                self.ptr += 1
                sample = self.enqueuer.queue.get()
                samples.append(sample)
        if self.ptr >= self.dataset_size:
            self.ep_done = True
        return samples, self.ep_done

    def start_ep_prefetching(self):
        '''Has to be called at the start of every epoch.'''
        self.enqueuer.start_ep()
        self.ptr = 0



class Counter(object):
    # A thread safe counter.
    def __init__(self, val=0, max_val=0):
        self._value = val
        self.max_value = max_val
        self._lock = threading.Lock()

    def reset(self):
        with self._lock:
            self._value = 0

    def increment(self):
        with self._lock:
            if self._value < self.max_value:
                self._value += 1
                incremented = True
            else:
                incremented = False
            return incremented, self._value


class Enqueuer(object):
    def __init__(self,get_element,num_elements,num_threads=1,queue_size=20):
        """
        Args:
        :param get_element: a function that takes a pointer and returns an elemen
        :param num_elements:  total number of elements to put into the queue
        :param num_threads:  num of parallel threads, >=1
        :param queue_size:  the maximum size of the queue. Set to some positive integer to save memory, otherwise, set to 0
        """
        self.get_element = get_element
        assert num_threads > 0
        self.num_threads = num_threads
        self.queue_size = queue_size
        self.queue = Queue(maxsize=queue_size)
        # The pointer shared by threads.
        self.ptr = Counter(max_val=num_elements)
        #The event to wake up threads, it is set at the beginning of an epoch.
        # It is cleared after after an epoch if enqueued or when the states are reset.
        self.event = threading.Event()
        # To reset states
        self.reset_event = threading.Event()
        # The event to terminate the threads.
        self.stop_event = threading.Event()
        self.threads = []
        for _ in range(num_threads):
            thread = threading.Thread(target=self.enqueue)
            # Set the thread in daemon mode, so that the main program ends normally.
            thread.daemon=True
            thread.start()
            self.threads.append(thread)

    def enqueue(self):
        while not self.stop_event.isSet():
            # If the enqueuing event is not set, the thread just waits.
            if not self.event.wait(0.5): continue
            # Increment the counter to claim that this element has been enqueued by this thread
            incremented,ptr = self.ptr.increment()
            if incremented:
                element = self.get_element(ptr-1)
                # When enqueuing, keep an eye on the stop and reset singnal.
                while not self.stop_event.isSet() and not self.reset_event.isSet():
                    try:
                        # The operation will wait at most 'timeout' for a free slot in the queue to be available.
                        self.queue.put(element, timeout=0.5)
                        break
                    except:
                        pass
            else:
                self.end_ep()
        print('Exiting thread {}!'.format(threading.current_thread().name))

    def start_ep(self):
        #Start enqueuing an epoch.
        self.event.set()

    def end_ep(self):
        # When all elements are enqueued, let threads sleep to save resources.
        self.event.clear()
        self.ptr.reset()