# -*- coding: utf-8 -*-

# -*- coding: utf-8 -*-

import tensorflow as tf
import six
import os
import numpy as np
from random import shuffle



def _int64_feature(value):
  if not isinstance(value, list):
    value = [value]
  return tf.train.Feature(int64_list=tf.train.Int64List(value=value))


def _float_feature(value):
  """Wrapper for inserting float features into Example proto."""
  if not isinstance(value, list):
    value = [value]
  return tf.train.Feature(float_list=tf.train.FloatList(value=value))


def _bytes_feature(value):
  """Wrapper for inserting bytes features into Example proto."""
  if isinstance(value, six.string_types):           
    value = six.binary_type(value, encoding='utf-8') 
  return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))


def _convert_to_example(image_buffer, label):
    example = tf.train.Example(features=tf.train.Features(feature={
            'image/label': _int64_feature(label),
            'image/encoded': _bytes_feature(image_buffer)}))
    return example


def _process_image_files(output_directory, name, filenames, labels, num_shards):
    num_images = len(filenames)
    num_batch = np.linspace(0,num_images,num_shards+1).astype(np.int) # 将数据划分为n块[0,100,200...]

    for counter in range(num_shards):
        output_filename = '%s-%.5d-of-%.5d' % (name, counter, num_shards)
        output_file = os.path.join(output_directory, output_filename)
        start,end = num_batch[counter], num_batch[counter+1]
        writer = tf.python_io.TFRecordWriter(output_file)
        for j in range(start,end):
            filename,label = filenames[j],labels[j]
            try:
                with tf.gfile.FastGFile(filename,'rb') as f:
                    image_buffer = f.read()
            except Exception as e:
                print(e)
                continue
            example = _convert_to_example(image_buffer, label)
            writer.write(example.SerializeToString())
            print('writing {} picture, filename is {}, label is {}, shard is {}'.format(j,filename,label,counter))
        writer.close()
  
def _find_image_files(data_dir, labels_file):
    jpeg_file_path = '%s/*.jpg' % (data_dir)
    matching_files = tf.gfile.Glob(jpeg_file_path)
    labels = [0 if 'cat' in os.path.basename(file) else 1 for file in matching_files]
    c = list(zip(matching_files, labels))
    shuffle(c)
    filenames, labels = zip(*c)
    return filenames, labels
     
def _process_dataset(output_directory,name, directory, labels_file, num_shards):
    filenames, labels = _find_image_files(directory, labels_file)        
    _process_image_files(output_directory,name, filenames, labels,num_shards)    
        
def main(unused_argv):
    _process_dataset('tfdata','train', '../../cat_dog/train', '', 8)   

def input_function(filenames):
    def _parse_function(example_proto):
        features = {
                'image/label': tf.FixedLenFeature((), tf.int64, default_value=0),
                'image/encoded': tf.FixedLenFeature((), tf.string, default_value="")
            }
        parsed = tf.parse_single_example(example_proto, features)

        label = tf.cast(parsed['image/label'], tf.int32)
        encoded = tf.image.decode_image(parsed['image/encoded'])
        encoded.set_shape([None, None, None])
        encoded = tf.image.resize_images(encoded,(224,224)) #如果不缩放到相同的尺寸，就无法读取
        return encoded, label

    dataset = tf.data.TFRecordDataset(filenames)
    dataset = dataset.map(_parse_function)
    dataset = dataset.repeat()
    dataset = dataset.batch(32)
    iterator = dataset.make_one_shot_iterator()
    next_element = iterator.get_next()
    return next_element

if __name__ == '__main__':
    tf.app.run()
