#/usr/bin/env python
#-*-coding:utf-8-*-

import tensorflow as tf



class SegBatcher(object):
    def __init__(self, record_file_name, batch_size, num_epochs=None):
        self._batch_size = batch_size
        self._epoch = 0
        self._step = 1.
        self.num_epochs = num_epochs
        self.next_batch_op = self.input_pipeline(record_file_name, self._batch_size, self.num_epochs)


    def example_parser(self, filename_queue):
        reader = tf.TFRecordReader()
        key, record_string = reader.read(filename_queue)

        features = {
            'inputs': tf.FixedLenSequenceFeature([], tf.int64),
            'targets': tf.FixedLenSequenceFeature([], tf.int64),
            'lengths': tf.FixedLenSequenceFeature([], tf.int64),
        }

        _, example = tf.parse_single_sequence_example(serialized=record_string, sequence_features=features)
        labels = example['inputs']
        char_list = example['targets']
        sent_len = example['lengths']
        return labels, char_list, sent_len

    def input_pipeline(self, filenames, batch_size,  num_epochs=None):
        filename_queue = tf.train.string_input_producer([filenames], num_epochs=num_epochs, shuffle=True)
        labels, char_list, sent_len = self.example_parser(filename_queue)

        min_after_dequeue = 10000
        capacity = min_after_dequeue + 12 * batch_size
        next_batch = tf.train.batch([labels, char_list, sent_len], batch_size=batch_size, capacity=capacity,
                                        dynamic_pad=True, allow_smaller_final_batch=True)
        return next_batch


