import tensorflow as tf


class DataSet:
    def __init__(self, tfrecords, batch_size, is_repeate=False):
        # self.batch_size = batch_size
        tf_dataset = tf.data.TFRecordDataset(tfrecords)
        tf_dataset = tf_dataset.map(self._parse_single_examples)
        if is_repeate:
            tf_dataset = tf_dataset.repeat()
        tf_dataset = tf_dataset.shuffle(1000)
        tf_dataset = tf_dataset.padded_batch(batch_size, padded_shapes={'data': [None, None],
                                                                        'len': {},
                                                                        'label': {}})
        self.tf_dataset = tf_dataset
        self.iterator = self.tf_dataset.make_one_shot_iterator()

    @staticmethod
    def _parse_single_examples(example_proto):
        features = {'data': tf.FixedLenFeature([], tf.string),
                    'len': tf.FixedLenFeature([], tf.int64),
                    'label': tf.FixedLenFeature([], tf.int64)}
        parsed = tf.parse_single_example(example_proto, features=features)
        # sentence_str = parsed['sentence_id']
        length = tf.cast(parsed['len'], tf.int32)
        label = tf.cast(parsed['label'], tf.int32)
        data = tf.reshape(tf.decode_raw(parsed['data'], tf.float32),
                          [length, -1])
        return {'data': data, 'len': length, 'label': label}

    def get_next(self):
        return self.iterator.get_next()
