import sys
# sys.path.insert(0, "../../python")
sys.path.insert(0, '/home/xiaomin/wxm/mxnet/python')

import mxnet as mx
import logging
import multiprocessing as mp
import numpy as np
import random
import Queue
import atexit
from scipy.io import loadmat
from scikits import audiolab
from matplotlib.pyplot import specgram
from scipy.signal.spectral import spectrogram


def get_single_mat(item, args):
    root_dir = args['data_dir']
    folder = args['folder']

    index, _, label = item[0], item[1], item[2]
    if index == '6040' or index == '6041' or index == '55':
        index = 6039
        label = 1
    mat_path = root_dir + '/' + folder + '/' + 'train_' + str(index) + '.mat'
    mat = loadmat(mat_path)
    x = mat['x']
    x = np.expand_dims(x, 0)
    y = int(label)
    if y == 1:
        y = np.array([0, 1])
    elif y == 0:
        y = np.array([1, 0])
    return x, y, index


def get_single_wav(item, args):
    root_dir = args['data_dir']
    folder = args['folder']

    index, wav_path, label = item[0], item[1], item[2]
    wav_path = root_dir + '/' + folder + '/' + wav_path
    wav = audiolab.wavread(wav_path)
    # Pxx, freqs, bins, im = specgram(wav[0], NFFT=512, Fs=400, noverlap=154, scale='dB')
    freqs, t, Pxx = spectrogram(wav[0], nperseg=512, fs=400, noverlap=154, window='hanning', mode='magnitude')
    Pxx_max = Pxx.max()
    Pxx /= Pxx_max
    # print Pxx.max()
    # print Pxx.min()
    x = Pxx
    x = np.expand_dims(x, 0)
    y = int(label)
    if y == 1:
        y = np.array([0, 1])
    elif y == 0:
        y = np.array([1, 0])
    return x, y, index


def read_list(data_list_path):
    labels = []
    with open(data_list_path, 'r') as f:
        for line in f:
            label = line.strip('\n').split('\t')
            labels.append(label)
    return labels


class DataLoader(mx.io.DataIter):
    """Data loader for lane detection."""
    def __init__(self, data_list, input_args):
        super(DataLoader, self).__init__()
        self.input_args = input_args

        self.data_list = read_list(data_list)
        random.shuffle(self.data_list)
        self.data_path = input_args.get('data_path')
        self.data_shape = input_args.get('data_shape')
        self.multi_thread = input_args.get('multi_thread', False)
        self.n_thread = input_args.get('n_thread', 5)
        self.stop_word = input_args.get('stop_word', '==STOP--')
        self.batch_size = input_args.get('batch_size', 10)
        self.stride = input_args.get('stride')

        # Image pre-process options
        self.im_size = input_args.get('im_size')

        self.current_batch = None
        self.data_num = None
        self.current = None
        self.worker_proc = None

        if self.multi_thread:
            self.stop_flag = mp.Value('b', False)
            self.result_queue = mp.Queue(maxsize=self.batch_size*5)
            self.data_queue = mp.Queue()

    def _insert_queue(self):
        for item in self.data_list:
            self.data_queue.put(item)
        for i in range(self.n_thread):
            self.data_queue.put(self.stop_word)

    def _thread_start(self):
        self.stop_flag = False
        self.worker_proc = [mp.Process(target=DataLoader._worker,
                                       args=[self.data_queue,
                                             self.result_queue,
                                             self.stop_word,
                                             self.stop_flag,
                                             self.input_args
                                             ]) for i in range(self.n_thread)]
        [item.start() for item in self.worker_proc]

        def cleanup():
            self.shutdown()
        atexit.register(cleanup)

    @staticmethod
    def _worker(data_queue, result_queue, stop_word, stop_flag, input_args):
        count = 0
        for item in iter(data_queue.get, stop_word):
            if stop_flag == 1:
                break
            image, cls_label, index = DataLoader._get_single(item, input_args)
            result_queue.put((image, cls_label, index))
            count += 1

    @property
    def provide_label(self):
        return [('label', tuple([self.batch_size] + list(self.data_shape[1])))]

    @property
    def provide_data(self):
        return [('data', tuple([self.batch_size] + list(self.data_shape[0])))]

    def reset(self):
        self.data_num = len(self.data_list)
        self.current = 0
        self.shuffle()
        if self.multi_thread:
            self.shutdown()            # Shutdown data-reading threads
            self._insert_queue()       # Initialize data_queue
            self._thread_start()       # Start multi thread

    def shutdown(self):
        if self.multi_thread:
            # clean queue
            while True:
                try:
                    self.result_queue.get(timeout=1)
                except Queue.Empty:
                    break
            while True:
                try:
                    self.data_queue.get(timeout=1)
                except Queue.Empty:
                    break
            # stop worker
            self.stop_flag = True
            if self.worker_proc:
                for i, worker in enumerate(self.worker_proc):
                    worker.join(timeout=1)
                    if worker.is_alive():
                        logging.error('worker {} fails to join'.format(i))
                        worker.terminate()

    def shuffle(self):
        random.shuffle(self.data_list)
        pass

    def next(self):
        if self._get_next():
            return self.current_batch
        else:
            raise StopIteration

    def _get_next(self):
        batch_size = self.batch_size
        if self.current + batch_size > self.data_num:
            return False

        x = np.zeros(tuple([self.batch_size] + list(self.data_shape[0])))
        y = np.zeros(tuple([self.batch_size] + list(self.data_shape[1])))
        indexs = []
        items = self.data_list
        cnt = 0
        for i in range(self.current, self.current + batch_size):
            if self.multi_thread:
                image, cls_label, index = self.result_queue.get()
            else:
                image, cls_label, index = DataLoader._get_single(items[i], self.input_args)
            x[cnt, :, :, :] = image
            y[cnt, :] = cls_label
            indexs.append(index)
            cnt += 1
        x = mx.ndarray.array(x)
        y = mx.ndarray.array(y)
        self.current_batch = mx.io.DataBatch(data=[x], label=[y], pad=0, index=None)
        self.current += batch_size
        return True

    @staticmethod
    def _get_single(item, input_args):
        return get_single_wav(item, input_args)