import tensorflow as tf

# -*- coding:utf-8 -*-

import tensorflow as tf
import os, sys
import numpy as np
import matplotlib.pyplot as plt
from scipy import misc

sys.path.append("./..")
from tf_pts_loader import pts_fts_loader, visualization
from multiprocessing import cpu_count
import threadpool


def thread_pool(enter_func, param_list):
    pool = threadpool.ThreadPool(cpu_count())
    requests = threadpool.makeRequests(enter_func, param_list)
    [pool.putRequest(req) for req in requests]
    pool.wait()


def generate_one(tuple):
    sys.stdout.write(
        '\r[ converting:{} of {} ] '.format(tuple.id,tuple.len))
    sys.stdout.flush()
    loader = pts_fts_loader(tuple.pts_file, tuple.inte_file, None)
    data_train, data_num_train, _ = loader.get_pts_fts_label()

    writer = tf.python_io.TFRecordWriter(tuple.out_file_name)
    example = tf.train.Example(features=tf.train.Features(feature={
        'pts_fts': tf.train.Feature(float_list=tf.train.FloatList(value=data_train.flatten())),
        'data_num_val': tf.train.Feature(int64_list=tf.train.Int64List(value=[data_num_train]))
        }
    ))
    writer.write(example.SerializeToString())
    writer.close()

class pts_fts_generator_nolabel():
    def __init__(self, data_set_dir, out_put_dir):
        self.pts_file_list = []
        self.inte_file_list = []
        self.out_file_list = []
        self.data_set_dir = data_set_dir
        self.out_dir = out_put_dir

        for pts_file in os.listdir(os.path.join(self.data_set_dir, "pts")):
            if os.path.splitext(pts_file)[1] == ".csv":
                self.pts_file_list.append(os.path.join(data_set_dir, "pts", pts_file))
        for inte_file in os.listdir(os.path.join(self.data_set_dir, "intensity")):
            if os.path.splitext(inte_file)[1] == ".csv":
                self.inte_file_list.append(os.path.join(data_set_dir, "intensity", inte_file))
                self.out_file_list.append(os.path.join(self.out_dir, os.path.splitext(inte_file)[0] + ".tfrecord"))

        if os.path.exists(self.out_dir) is False:
            os.makedirs(self.out_dir)

        # thread_pool(generate_one, self.param_list())
        for param in self.param_list():
            generate_one(param)

    def param_list(self):
        param_list = []
        idx = 0
        for out_file_name in self.out_file_list:
            pts_file = self.pts_file_list[idx]
            inte_file = self.inte_file_list[idx]
            idx += 1
            # if idx > 2200:
            #     break
            class param:
                def __init__(self, pts_file, inte_file,out_file_name,id,len):
                    self.pts_file = pts_file
                    self.inte_file = inte_file
                    self.out_file_name = out_file_name
                    self.id = id
                    self.len = len
            param_list.append(param(pts_file, inte_file, out_file_name,idx,len(self.out_file_list)))
        return param_list


class pts_fts_reader_nolabel():
    def __init__(self, data_set_dir):
        self.tf_record_list = []
        for tf_file in os.listdir(os.path.join(data_set_dir)):
            if os.path.splitext(tf_file)[1] == ".tfrecord":
                self.tf_record_list.append(os.path.join(data_set_dir, tf_file))

    def get_tf_files(self):
        return self.tf_record_list

    @staticmethod
    def read(filename_queue, shuffle_batch=False, batch_size=1 , with_key = False):
        reader = tf.TFRecordReader()
        key, serialized_example = reader.read(filename_queue)
        features = tf.parse_single_example(
            serialized_example, features={
                'pts_fts': tf.VarLenFeature(tf.float32),
                'data_num_val': tf.FixedLenFeature([], tf.int64)
            }
        )

        pts_fts_ = tf.sparse_tensor_to_dense(features['pts_fts'], default_value=0)
        data_num_val_ = tf.cast(features['data_num_val'], tf.int32)

        pts_fts_ = tf.reshape(pts_fts_, [60000, 4])

        if with_key is True :
            if shuffle_batch:
                pts_fts_b, data_num_val_b,  key_b= tf.train.shuffle_batch(
                    [pts_fts_, data_num_val_, key],
                    batch_size=batch_size,
                    num_threads=2,
                    capacity=100,
                    min_after_dequeue=2)
            else:
                pts_fts_b, data_num_val_b, key_b= tf.train.batch(
                    [pts_fts_, data_num_val_, key],
                    batch_size=batch_size,
                    num_threads=2,
                    capacity=100)
            return pts_fts_b, data_num_val_b, key_b
        else:
            if shuffle_batch:
                pts_fts_b, data_num_val_b = tf.train.shuffle_batch(
                    [pts_fts_, data_num_val_],
                    batch_size=batch_size,
                    num_threads=2,
                    capacity=100,
                    min_after_dequeue=2)
            else:
                pts_fts_b, data_num_val_b = tf.train.batch(
                    [pts_fts_, data_num_val_],
                    batch_size=batch_size,
                    num_threads=2,
                    capacity=100)
            return pts_fts_b, data_num_val_b

import time
def case1():
    print("start converting")
    start = time.time()
    # gen = pts_fts_generator_nolabel("./data_test_pack/", "./data_test_pack/out") # 2 sec per file
    gen = pts_fts_generator_nolabel("/home/leo/Downloads/training_data_set/autodri/TestSet/",
                            "/home/leo/Downloads/training_data_set/autodri/TestSet/out")  # 2 sec per file
    print("cost:{}".format(time.time() - start))


def case2(data_set_dir):
    reader = pts_fts_reader_nolabel(data_set_dir)

    filename_queue = tf.train.string_input_producer(reader.get_tf_files(),
                                                    num_epochs=1)
    pts_fts_b, data_num_val_b, label_b ,key_b= pts_fts_reader_nolabel.read(
        filename_queue, shuffle_batch=False ,with_key=True)

    with tf.Session() as sess:
        tf.global_variables_initializer().run()
        tf.local_variables_initializer().run()

        coord = tf.train.Coordinator()
        threads = tf.train.start_queue_runners(sess=sess, coord=coord)

        for i in range(len(reader.get_tf_files())):
            pts_fts_, data_num_val_, label_ ,key = sess.run([pts_fts_b, data_num_val_b, label_b,key_b])
            print("loading :{}".format(key))
            visualization(pts_fts_[0], label_[0])
            pass
        coord.request_stop()
        coord.join(threads)


def case3(data_set_dir):
    reader = pts_fts_generator_nolabel(data_set_dir)

    filename_queue = tf.train.string_input_producer(reader.get_tf_files(),
                                                    num_epochs=1)
    pts_fts_b, data_num_val_b, label_b = pts_fts_reader_nolabel.read(
        filename_queue, shuffle_batch=False )

    with tf.Session() as sess:
        tf.global_variables_initializer().run()
        tf.local_variables_initializer().run()

        coord = tf.train.Coordinator()
        threads = tf.train.start_queue_runners(sess=sess, coord=coord)

        for i in range(len(reader.get_tf_files())):
            pts_fts_, data_num_val_, label_  = sess.run([pts_fts_b, data_num_val_b, label_b])
            visualization(pts_fts_[0], label_[0])
            pass
        coord.request_stop()
        coord.join(threads)


if __name__ == '__main__':
    case1()
    # case2("./data_3d_pts_lit/training/out/")
    # case3("./data_3d_pts1_lit/training/out/")

