from mxnet.io import DataIter
import mxnet as mx
import pickle
import random
import numpy as np
import mxnet.ndarray as nd


random.seed(123)


def parse_data(d):
    d = pickle.loads(d)
    return d


class SimpleIter(DataIter):
    def __init__(self, idx_file, data_file, shuffle=True, batch_size=10, ctx=mx.cpu()):
        super(SimpleIter, self).__init__(batch_size)
        self.batch_size = batch_size
        self.cur_batch = 0
        self.data_file = data_file
        self.idx_file = idx_file
        self.shuffle = shuffle
        self.ctx = ctx
        self.record = self._read_data()
        self.order = self.record.keys
        self.data_len = len(self.order)
        self.num_batches = self.data_len // batch_size
        self.num_batches = self.num_batches if self.data_len % batch_size == 0 else self.num_batches + 1
        self.data_gen = self._gen_data()

    def _read_data(self):
        return mx.recordio.MXIndexedRecordIO(self.idx_file, self.data_file, 'r')

    def close(self):
        self.record.close()

    def _shuffle(self):
        random.shuffle(self.order)

    def _gen_data(self):
        for i in self.order:
            yield self.record.read_idx(i)

    def __iter__(self):
        return self

    def reset(self):
        self.cur_batch = 0
        self.data_gen = self._gen_data()
        if self.shuffle:
            self._shuffle()

    def __next__(self):
        return self.next()

    def next(self):
        if self.cur_batch < self.num_batches:
            self.cur_batch += 1
            p_chars = []
            q_chars = []
            p_words = []
            q_words = []
            y1 = []
            y2 = []
            ids = []
            p_lens = []
            q_lens = []
            for i, data in zip(range(self.batch_size), self.data_gen):
                data = parse_data(data)
                p_chars.append(data['context_char_idxs'])
                q_chars.append(data['ques_char_idxs'])
                p_words.append(data['context_idxs'])
                q_words.append(data['ques_idxs'])
                y1.append(data['y1'])
                y2.append(data['y2'])
                ids.append(data['id'])
                q_lens.append(data['question_len'])
                p_lens.append(data['context_len'])

            p_chars = nd.array(np.stack(p_chars), ctx=self.ctx)
            q_chars = nd.array(np.stack(q_chars), ctx=self.ctx)
            p_words = nd.array(np.stack(p_words), ctx=self.ctx)
            q_words = nd.array(np.stack(q_words), ctx=self.ctx)

            y1 = nd.array(np.stack(y1), ctx=self.ctx)
            y2 = nd.array(np.stack(y2), ctx=self.ctx)

            return p_chars, q_chars, p_words, q_words, y1, y2, p_lens, q_lens, ids
        else:
            raise StopIteration


def test_data_iter():
    data_file = '/data/qanet/data/train.rec'
    idx_file = '/data/qanet/data/train.rec.idx'
    data_iter = SimpleIter(idx_file, data_file)
    for d in data_iter:
        print(d)
        break
