import numpy as np
import pymysql
import os
import pickle as pkl
import tensorflow as tf
from tensorflow.contrib import learn
import data_loader

tf.flags.DEFINE_string('model_dir', './model/1601623821',
                       '''Restore the model from this run''')
tf.flags.DEFINE_string('params_dir', './params',
                       '''Restore the model from this run''')
tf.flags.DEFINE_integer('batch_size', 16, 'Test batch size')
FLAGS = tf.flags.FLAGS


def main(_):
    print("====================推断程序====================\n")
    print("=====> 推断程序开始 <=====")
    db = pymysql.connect(
        host='127.0.0.1',
        user='root',
        passwd='password',
        db='ep2',
        charset='utf8',
        port=3306,
        autocommit=True
    )
    # Restore parameters
    with open(os.path.join(FLAGS.params_dir, 'params.pkl'), 'rb') as f:
        params = pkl.load(f, encoding='bytes')
    # Restore vocabulary processor
    vocab_processor = learn.preprocessing.VocabularyProcessor.restore(os.path.join(FLAGS.params_dir, 'vocab'))

    # Load test data
    data, times, lengths, _ = data_loader.load_data_db(db,
                                                       sw_path=params['stop_word_file'],
                                                       min_frequency=params['min_frequency'],
                                                       max_length=params['max_length'],
                                                       language=params['language'],
                                                       vocab_processor=vocab_processor,
                                                       shuffle=False)
    feed_dict = {'input_x': data.tolist(), "input_y": [0] * len(data.tolist())}
    graph = tf.Graph()
    with tf.Session(graph=tf.Graph()) as sess:
        sess = tf.Session()

        tf.saved_model.loader.load(sess, ['serve'], FLAGS.model_dir)
        graph = tf.get_default_graph()

        # Get tensors
        input_x = graph.get_tensor_by_name('input_x:0')
        input_y = graph.get_tensor_by_name('input_y:0')
        keep_prob = graph.get_tensor_by_name('keep_prob:0')
        predictions = graph.get_tensor_by_name('softmax/predictions:0')
        accuracy = graph.get_tensor_by_name('accuracy/accuracy:0')

        # Generate batches
        batches = data_loader.batch_iter_db(data, lengths, FLAGS.batch_size, 1)

        num_batches = int(len(data) / FLAGS.batch_size)
        all_predictions = []
        # Test
        for batch in batches:
            x_test, dummy, x_lengths = batch
            if params['clf'] == 'cnn':
                feed_dict = {input_x: x_test, input_y: dummy, keep_prob: 1.0}
                batch_predictions, _ = sess.run([predictions, accuracy], feed_dict)
            else:
                batch_size = graph.get_tensor_by_name('batch_size:0')
                sequence_length = graph.get_tensor_by_name('sequence_length:0')
                feed_dict = {input_x: x_test, input_y: dummy, batch_size: FLAGS.batch_size, sequence_length: x_lengths,
                             keep_prob: 1.0}

                batch_predictions, _ = sess.run([predictions, accuracy], feed_dict)
            all_predictions = np.concatenate([all_predictions, batch_predictions])
    # Save all predictions
    sql = "insert into flow(label,time) values "
    first = False
    for i, time in zip(range(len(all_predictions)), times):
        # csvwriter.writerow([i, all_predictions[i], time])
        if not first:
            first = True
        else:
            sql += ','
        sql += "('" + str(int(all_predictions[i])) + "','" + time + "')"

    print("=====> 成功完成推断 <=====")
    cursor = db.cursor()
    cursor.execute('use ep2')
    if first:
        cursor.execute(sql)
    cursor.close()
    db.close()
    print("=====> 成功将推断结果写入数据库 <=====")
    print("\n===============================================")


def run():
    main('')


if __name__ == "__main__":
    tf.app.run()
