import tensorflow as tf

# -*- coding:utf-8 -*-

import tensorflow as tf
import os, sys
import numpy as np
import matplotlib.pyplot as plt
from scipy import misc
from tf_pts_loader import pts_fts_loader , visualization


class pts_fts_generator():
    def __init__(self, data_set_dir, out_put_dir):
        self.pts_file_list = []
        self.inte_file_list = []
        self.label_file_list = []
        self.out_file_list = []
        self.data_set_dir = data_set_dir
        self.out_dir = out_put_dir

        for pts_file in os.listdir(os.path.join(self.data_set_dir, "pts")):
            if os.path.splitext(pts_file)[1] == ".csv":
                self.pts_file_list.append(os.path.join(data_set_dir, "pts", pts_file))
        for inte_file in os.listdir(os.path.join(self.data_set_dir, "intensity")):
            if os.path.splitext(inte_file)[1] == ".csv":
                self.inte_file_list.append(os.path.join(data_set_dir, "intensity", inte_file))
        for label_file in os.listdir(os.path.join(self.data_set_dir, "category")):
            if os.path.splitext(label_file)[1] == ".csv":
                self.label_file_list.append(os.path.join(data_set_dir, "category", label_file))
                self.out_file_list.append(os.path.join(self.out_dir, os.path.splitext(label_file)[0] + ".tfrecord"))
        if os.path.exists(self.out_dir) is False:
            os.makedirs(self.out_dir)
        self.generate()

    def generate(self):
        idx = 0
        for out_file_name in self.out_file_list:
            pts_file = self.pts_file_list[idx]
            inte_file = self.inte_file_list[idx]
            label_file = self.label_file_list[idx]
            loader = pts_fts_loader(pts_file, inte_file, label_file)
            data_train, data_num_train, label_train = loader.get_pts_fts_label()

            writer = tf.python_io.TFRecordWriter(out_file_name)
            example = tf.train.Example(features=tf.train.Features(feature={
                'pts_fts': tf.train.Feature(float_list=tf.train.FloatList(value=data_train.flatten())),
                'data_num_val': tf.train.Feature(int64_list=tf.train.Int64List(value=[data_num_train])),
                'label': tf.train.Feature(int64_list=tf.train.Int64List(value=label_train))}
            ))
            writer.write(example.SerializeToString())
            writer.close()
            idx+=1

class pts_fts_reader():
    def __init__(self, data_set_dir):
        self.tf_record_list = []
        for tf_file in os.listdir(os.path.join(data_set_dir)):
            if os.path.splitext(tf_file)[1] == ".tfrecord":
                self.tf_record_list.append(os.path.join(data_set_dir, tf_file))

    def get_tf_files(self):
        return self.tf_record_list

    @staticmethod
    def read(filename_queue, shuffle_batch=False, batch_size=1):
        reader = tf.TFRecordReader()
        _, serialized_example = reader.read(filename_queue)
        features = tf.parse_single_example(
            serialized_example, features={
                'pts_fts': tf.VarLenFeature(tf.float32),
                'data_num_val': tf.FixedLenFeature([], tf.int64),
                'label': tf.VarLenFeature(tf.int64)
            }
        )

        pts_fts_ = tf.sparse_tensor_to_dense(features['pts_fts'], default_value=0)
        data_num_val_ = tf.cast(features['data_num_val'], tf.int32)
        label_ = tf.sparse_tensor_to_dense(features['label'], default_value=0)

        pts_fts_ = tf.reshape(pts_fts_, [60000, 4])
        label_ = tf.reshape(label_,[60000,])

        if shuffle_batch:
            pts_fts_b, data_num_val_b, label_b = tf.train.shuffle_batch(
                [pts_fts_, data_num_val_, label_],
                batch_size=batch_size,
                num_threads=2,
                capacity=100,
                min_after_dequeue=2)
        else:
            pts_fts_b, data_num_val_b, label_b = tf.train.batch(
                [pts_fts_, data_num_val_, label_],
                batch_size=batch_size,
                num_threads=2,
                capacity=100)
        return pts_fts_b, data_num_val_b, label_b


def case1():
    gen = pts_fts_generator("./data_3d_pts_lit/training", "./data_3d_pts_lit/training/out")


def case2(data_set_dir):
    reader = pts_fts_reader(data_set_dir)

    filename_queue = tf.train.string_input_producer(reader.get_tf_files(),
                                                    num_epochs=10)
    pts_fts_b, data_num_val_b, label_b = pts_fts_reader.read(
        filename_queue, shuffle_batch=False)

    with tf.Session() as sess:
        tf.global_variables_initializer().run()
        tf.local_variables_initializer().run()

        coord = tf.train.Coordinator()
        threads = tf.train.start_queue_runners(sess=sess, coord=coord)

        for i in range(6):
            pts_fts_, data_num_val_, label_ = sess.run([pts_fts_b, data_num_val_b, label_b])
            visualization(pts_fts_,label_)
            pass
        coord.request_stop()
        coord.join(threads)


if __name__ == '__main__':
    # case1()
    case2("./data_3d_pts_lit/training/out/")
