# -- encoding:utf-8 --

import os
import tensorflow as tf

from bert import modeling
from run_classifier import create_model, FLAGS, IMDBProcessor, SentimentCorpProcessor, WeiBoProcessor

if __name__ == '__main__':
    with tf.Graph().as_default():
        # 一、模型构建、训练(模型执行图及模型参数恢复)
        output_dir = FLAGS.output_dir
        ckpt = tf.train.get_checkpoint_state(output_dir)
        if not (ckpt and ckpt.model_checkpoint_path):
            raise Exception("没有训练好的模型文件!!!")

        # 1. 构建会话
        graph = tf.get_default_graph()
        sess = tf.Session()
        # 2. 执行图构建
        processors = {
            "imdb": IMDBProcessor,
            "corp": SentimentCorpProcessor,
            "weibo": WeiBoProcessor
        }
        task_name = FLAGS.task_name.lower()  # task的名称
        if task_name not in processors:
            raise ValueError("Task not found: %s" % (task_name))
        processor = processors[task_name]()
        label_list = processor.get_labels()
        num_labels = len(label_list)
        bert_config = modeling.BertConfig.from_json_file(FLAGS.bert_config_file)
        input_ids = tf.placeholder(dtype=tf.int32, shape=[None, FLAGS.max_seq_length], name='input_id')
        input_mask = tf.placeholder(dtype=tf.int32, shape=[None, FLAGS.max_seq_length], name='input_mask')
        labels = tf.placeholder(dtype=tf.int32, shape=[None], name='label')
        _, _, _, probabilities, predictions = create_model(
            bert_config, is_training=False,
            input_ids=input_ids,
            input_mask=input_mask,
            segment_ids=None,
            labels=labels,
            num_labels=num_labels,
            use_one_hot_embeddings=False,
            num_filters=[128, 128, 128],
            region_sizes=[2, 3, 4]
        )
        # print(input_ids, input_mask, probabilities, predictions)
        # 对输出进行重命名
        # probabilities = tf.identity(probabilities, name='probabilities')
        # predictions = tf.identity(predictions, name='predictions')
        # 3. 模型参数恢复
        tf.train.Saver().restore(sess, ckpt.model_checkpoint_path)

        # 二、将模型输出为pb文件(仅保存output_node_names涉及到的执行图)
        output_node_names = [
            "input_id",
            "input_mask",
            "TEXT_CNN/project/probabilities",
            "TEXT_CNN/project/predictions"
        ]
        convert_graph_def = tf.graph_util.convert_variables_to_constants(
            sess=sess,
            input_graph_def=sess.graph.as_graph_def(),
            output_node_names=output_node_names)
        # 将对象序列化为二进制字符串输出即可
        pb_path = os.path.join(output_dir, "model.pb")
        with tf.gfile.GFile(pb_path, 'wb') as writer:
            writer.write(convert_graph_def.SerializeToString())

    print('pb文件保存成功！！!')
