#!/usr/bin/python3
"""Testing On ShapeNet Parts Segmentation Task."""

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import os
import sys
import math
import argparse
import random
import importlib
import numpy as np
import tensorflow as tf
from datetime import datetime
sys.path.append("./../")
import data_utils
import pointfly as pf
from tf_pts_generator_nolabel import pts_fts_reader_nolabel


def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--filelist', '-f', help='Path to input .h5 filelist (.txt)', required=True)
    parser.add_argument('--category', '-c', help='Path to category list file (.txt)', required=False)
    parser.add_argument('--data_folder', '-d', help='Path to *.pts directory', required=False)
    parser.add_argument('--load_ckpt', '-l', help='Path to a check point file for load', required=True)
    parser.add_argument('--repeat_num', '-r', help='Repeat number', type=int, default=1)
    parser.add_argument('--sample_num', help='Point sample num', type=int, default=1024)
    parser.add_argument('--model', '-m', help='Model to use', required=True)
    parser.add_argument('--setting', '-x', help='Setting to use', required=True)
    parser.add_argument('--save_ply', '-s', help='Save results as ply', required=True)
    args = parser.parse_args()
    print(args)

    save_path = args.save_ply
    if os.path.exists(save_path) is False:
        os.makedirs(save_path)

    model = importlib.import_module(args.model)
    setting_path = os.path.join(os.path.dirname(__file__)+"/../", args.model)
    sys.path.append(setting_path)
    setting = importlib.import_module(os.path.basename(args.setting))

    sample_num = setting.sample_num
    rotation_range = setting.rotation_range  # [0,0,0 ,u]
    rotation_range_val = setting.rotation_range_val
    scaling_range = setting.scaling_range

    # Prepare inputs
    print('{}-Preparing datasets...'.format(datetime.now()))

    evalset_reader = pts_fts_reader_nolabel(args.filelist)
    file_list = evalset_reader.get_tf_files()
    print("eval files:{}".format(len(file_list)))
    print(file_list)
    filename_queue_eval_op = tf.train.string_input_producer(file_list,
                                                            num_epochs=1)
    pts_fts_eval_op, data_num_eval_op, key_op = pts_fts_reader_nolabel.read(
        filename_queue_eval_op, batch_size=1, shuffle_batch=False, with_key=True)

    ######################################################################
    # Placeholders
    indices = tf.placeholder(tf.int32, shape=(1, None, 2), name="indices")
    is_training = tf.placeholder(tf.bool, name='is_training')
    pts_fts = tf.placeholder(tf.float32, shape=(None, setting.point_num, setting.data_dim), name='pts_fts')
    ######################################################################

    ######################################################################
    pts_fts_sampled = tf.gather_nd(pts_fts, indices=indices, name='pts_fts_sampled')
    if setting.data_dim > 3:
        points_sampled, features_sampled = tf.split(pts_fts_sampled,
                                                    [3, setting.data_dim - 3],
                                                    axis=-1,
                                                    name='split_points_features')
        if not setting.use_extra_features:
            features_sampled = None
    else:
        points_sampled = pts_fts_sampled
        features_sampled = None

    net = model.Net(points_sampled, features_sampled, is_training, setting)
    logits = net.logits
    probs_op = tf.nn.softmax(logits, name='probs')

    update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
    saver = tf.train.Saver()

    parameter_num = np.sum([np.prod(v.shape.as_list()) for v in tf.trainable_variables()])
    print('{}-Parameter number: {:d}.'.format(datetime.now(), parameter_num))

    with tf.Session() as sess:
        tf.global_variables_initializer().run()
        tf.local_variables_initializer().run()

        coord = tf.train.Coordinator()
        threads = tf.train.start_queue_runners(sess=sess, coord=coord)
        # Load the model
        saver.restore(sess, args.load_ckpt)
        print('{}-Checkpoint loaded from {}!'.format(datetime.now(), args.load_ckpt))
        for batch_idx in range(len(file_list)):
            pts_fts_eval, data_num_eval, key= sess.run(
                [pts_fts_eval_op, data_num_eval_op, key_op])
            print("loading :{} batch idx:{} all_data:{}".format(key,batch_idx,len(file_list)))
            out_file_name = os.path.splitext(str(key))[0] + ".csv"
            predictions = [(-1, 0.0)] * data_num_eval[0]
            cnt_0 = sample_num
            loop_cnt = 0
            while loop_cnt < setting.eval_rpt_num or cnt_0 > 0:
                cnt_0 = 0
                offset = int(random.gauss(0, sample_num * setting.sample_num_variance))
                offset = max(offset, -sample_num * setting.sample_num_clip)
                offset = min(offset, sample_num * setting.sample_num_clip)
                sample_num_eval = sample_num + offset
                xforms_np, rotations_np = pf.get_xforms(1,
                                                        rotation_range=rotation_range,
                                                        scaling_range=scaling_range,
                                                        order=setting.rotation_order)
                sam_index = pf.get_indices(1, sample_num_eval, data_num_eval)
                _, probs = sess.run([update_ops, probs_op],
                                    feed_dict={
                                        pts_fts: pts_fts_eval,
                                        indices: sam_index,
                                        is_training: False,
                                    })
                probs_2d = np.reshape(probs, (sample_num * 1, -1))
                for idx in range(sample_num):
                    point_idx = sam_index[0][idx][1]
                    probs = probs_2d[idx]
                    confidence = np.amax(probs)
                    label = np.argmax(probs)
                    if confidence > predictions[point_idx][1]: #and confidence > 0.8 :
                        predictions[point_idx] = (label, confidence)
                    # else:
                    #     predictions[point_idx] = (0.0, 0.0)
                for pre in predictions:
                    if pre == (-1, 0.0):
                        cnt_0 += 1
                sys.stdout.write(
                    '\r[ filled data per sample: {} rpt: {} ] '.format(sample_num - cnt_0, loop_cnt))
                sys.stdout.flush()
                loop_cnt += 1
            idx = len(predictions)

            out_path = os.path.join(save_path, os.path.basename(out_file_name))
            with open(out_path, 'w') as file_seg:
                for label, _ in predictions:
                    file_seg.write('%d\n' % (label))
            print("write: {} done.".format(out_path))


        coord.request_stop()
        coord.join(threads)
        print('{}-Done!'.format(datetime.now()))


if __name__ == '__main__':
    # -f ./data_test_pack/out/ -m pointcnn_seg -x shapenet_x8_2048_fps_autodri -s ./model_out/ -l ../model_save/seg2048_404000/ckpts/iter-404000
    main()
