#! /usr/bin/env python
import tensorflow as tf
from ABSA.utils import load_w2v, load_inputs2, batch_iter, score_BIO
from sklearn.metrics import f1_score

tf.app.flags.DEFINE_string('test_file_path_2', 'data/multi/lap_5_test.txt', 'testing file_2')
# tf.app.flags.DEFINE_string('test_file_path_2', 'data/multi/res_5_test.txt', 'testing file_2')

tf.app.flags.DEFINE_string('embedding_file_path', 'data/laptop/laptop_2014_840b_300.txt', 'embedding file')
# tf.app.flags.DEFINE_string('embedding_file_path', 'data/restaurant/restaurant_2014_840b_300.txt', 'embedding file')

tf.app.flags.DEFINE_integer('embedding_dim', 300, 'dimension of word embedding')

tf.flags.DEFINE_string("checkpoint_dir", "data/laptop/22checkpoint2", "Checkpoint directory")
# tf.flags.DEFINE_string("checkpoint_dir", "data/restaurant/2checkpoint4", "Checkpoint directory")

tf.app.flags.DEFINE_integer('max_sentence_len', 80, 'max number of tokens per sentence')
tf.app.flags.DEFINE_integer('max_target_len', 10, 'max target length')
tf.app.flags.DEFINE_integer('n_class', 3, 'number of distinct class')

tf.app.flags.DEFINE_string('prob_file', 'prob_14lap_label.txt', 'prob')
tf.app.flags.DEFINE_string('true_file', 'true_14lap_label.txt', 'prob')

tf.flags.DEFINE_boolean("allow_soft_placement", True, "Allow device soft device placement")
tf.flags.DEFINE_boolean("log_device_placement", False, "Log placement of ops on devices")

FLAGS = tf.flags.FLAGS

word_id_mapping, _ = load_w2v(FLAGS.embedding_file_path, FLAGS.embedding_dim)

te_x, te_sen_len, te_target_word, te_tar_len, te_y, te_position, _ = load_inputs2(
    FLAGS.test_file_path_2,
    word_id_mapping,
    FLAGS.max_sentence_len,
    FLAGS.max_target_len)


print("\nTest...\n")
checkpoint_file = tf.train.latest_checkpoint(FLAGS.checkpoint_dir)
graph = tf.Graph()
with graph.as_default():
    session_conf = tf.ConfigProto(
        allow_soft_placement=FLAGS.allow_soft_placement,
        log_device_placement=FLAGS.log_device_placement)
    sess = tf.Session(config=session_conf)
    with sess.as_default():
        saver = tf.train.import_meta_graph("{}.meta".format(checkpoint_file))
        saver.restore(sess, checkpoint_file)

        x = graph.get_operation_by_name("inputs/input_x").outputs[0]
        y_ = graph.get_operation_by_name("inputs/input_y_2").outputs[0]
        sen_len = graph.get_operation_by_name("inputs/input_sen_len").outputs[0]
        target_words = graph.get_operation_by_name("inputs/input_target").outputs[0]
        tar_len = graph.get_operation_by_name("inputs/input_tar_len").outputs[0]

        true_y = graph.get_operation_by_name("true_y_2").outputs[0]
        pred_y = graph.get_operation_by_name("pred_y_2").outputs[0]
        acc_num = graph.get_operation_by_name("acc_number").outputs[0]

        position = graph.get_operation_by_name("inputs/position").outputs[0]
        keep_prob1 = graph.get_operation_by_name("input_keep_prob1").outputs[0]

        def dev_step(te_x_f, te_sen_len_f, te_target, te_tl, te_yi, te_x_poisition):
            feed_dict = {
                x: te_x_f,
                y_: te_yi,
                sen_len: te_sen_len_f,
                target_words: te_target,
                tar_len: te_tl,
                position: te_x_poisition,
                keep_prob1: 1.0
            }

            tf_true, tf_pred, acc1 = sess.run([true_y, pred_y, acc_num], feed_dict)
            acc = 0
            num = 0
            pre_label, true_label = [], []
            length = len(te_x_f)
            acc += acc1
            num += length
            pre_label += list(tf_pred)
            true_label += list(tf_true)

            return pre_label, true_label, acc, num


        batches_test = batch_iter(
            list(zip(te_x, te_sen_len, te_target_word, te_tar_len, te_y, te_position)), 500, 1, False)

        label_pp = []
        label_tt = []
        accuracy, number = 0, 0
        for batch_ in batches_test:
            te_x_batch, te_sen_len_batch, te_target_batch, te_tar_len_batch, te_y_batch, te_position_batch = zip(
                *batch_)
            labell_p, labell_t, _acc, num = dev_step(te_x_batch, te_sen_len_batch, te_target_batch, te_tar_len_batch, te_y_batch,
                                          te_position_batch)
            label_pp += labell_p
            label_tt += labell_t
            accuracy += _acc
            number += num

        acc = accuracy / number
        print('all samples={}, correct prediction={}'.format(number, acc))
        F1 = f1_score(label_tt, label_pp, average=None)
        print('F1:', F1, 'avg=', sum(F1) / FLAGS.n_class)
        print('Max acc={}\n'.format(acc))

        # fp = open(FLAGS.prob_file, 'w')
        # for ws in label_pp:
        #     fp.write(' '.join([str(w) for w in ws]) + '\n')
        #
        # fp = open(FLAGS.true_file, 'w')
        # for ws in label_tt:
        #     fp.write(' '.join([str(w) for w in ws]) + '\n')
