import tensorflow as tf
from tensorflow.python.framework import ops
from tensorflow.python.framework import dtypes
import random

def encode_label(label):
    return int(label)

def read_label_file(file):
    filepaths = []
    labels = []
    with open(file,'r') as f:
        for line in f:
            filepath, label = line.split(",")
            filepaths.append(filepath)
            labels.append(encode_label(label))
    return filepaths, labels

def inputdata(dataset_path = "/data/ztl/uploadData/ILSVRC2012/",train_labels_file = "train.csv",IMAGE_HEIGHT = 224,IMAGE_WIDTH = 224,NUM_CHANNELS = 3,BATCH_SIZE = 1281):
    # reading labels and file path
    train_filepaths, train_labels = read_label_file(train_labels_file)

    b = random.sample(train_filepaths, 167)
    for i in b:
        train_filepaths.remove(i)

    b = random.sample(train_labels, 167)
    for i in b:
        train_labels.remove(i)


    img_nums=len(train_labels)

    print img_nums

    epochSize=img_nums//BATCH_SIZE
    # transform relative path into full path
    train_filepaths = [dataset_path + fp for fp in train_filepaths]
    # convert string into tensors
    train_images = ops.convert_to_tensor(train_filepaths, dtype=dtypes.string)
    train_labels = ops.convert_to_tensor(train_labels, dtype=dtypes.int32)
    # create input queues
    train_input_queue = tf.train.slice_input_producer(
        [train_images, train_labels],
        shuffle=False)
    # process path and string tensor into an image and a label
    file_content = tf.read_file(train_input_queue[0])
    train_image = tf.image.decode_jpeg(file_content, channels=NUM_CHANNELS)
    train_label = train_input_queue[1]

    train_image = tf.image.resize_images(train_image, [IMAGE_HEIGHT, IMAGE_WIDTH])
    train_image.set_shape([IMAGE_HEIGHT, IMAGE_WIDTH, NUM_CHANNELS])
    
    #_mean_image_subtraction(image, [_R_MEAN, _G_MEAN, _B_MEAN])

    # collect batches of images before processing
    train_image_batch, train_label_batch = tf.train.batch([train_image, train_label],batch_size=BATCH_SIZE)
    
    axis=list(range(len(train_image_batch.get_shape())-1))
    mean,variance=tf.nn.moments(train_image_batch,axis)
    #_mean_image_subtraction(train_image_batch, means)
    print "axis: ",axis
    print "input pipeline ready"

    with tf.Session() as sess:
        # initialize the variables
        sess.run(tf.global_variables_initializer())

        # initialize the queue threads to start to shovel data
        coord = tf.train.Coordinator()
        threads = tf.train.start_queue_runners(coord=coord)

        mean_result=[0,0,0]

        print "from the train set:"
        for i in range(epochSize):
            # print sess.run(train_image_batch).shape
            # print sess.run(train_label_batch).shape
            # print tf.one_hot(sess.run(train_label_batch),1000)
            mean_next=sess.run(mean)
            # print mean_next
            mean_result=sess.run(tf.add(mean_result,mean_next))
            # print mean_result

        mean_final=mean_result/epochSize
        print mean_final
        with open('mean_result.txt','w') as f:
            f.write(str(mean_final))


        # stop our queue threads and properly close the session
        coord.request_stop()
        coord.join(threads)
        sess.close()

if __name__ == '__main__':
    inputdata()
