#!/usr/bin/env python
#-*-coding:utf-8-*-


import tensorflow as tf
import pickle
import numpy as np
from tensorflow.contrib.crf import viterbi_decode

def decode(logits, trans, sequence_lengths, tag_num):
    viterbi_sequences = []
    small = -1000.0
    start = np.asarray([[small] * tag_num + [0]])
    for logit, length in zip(logits, sequence_lengths):
        score = logit[:length]
        pad = small * np.ones([length, 1])
        logits = np.concatenate([score, pad], axis=1)
        logits = np.concatenate([start, logits], axis=0)
        viterbi_seq, viterbi_score = viterbi_decode(logits, trans)
        viterbi_sequences += [viterbi_seq]
    return viterbi_sequences


class Predictor(object):
    def __init__(self, map_file, checkpoint_dir):
        with open(map_file, "rb") as f:
            char_to_id, id_to_char, tag_to_id, id_to_tag = pickle.load(f)

        tf_config = tf.ConfigProto()
        tf_config.gpu_options.allow_growth = True
        self.tag_num = len(tag_to_id)

        self.tf_config = tf_config
        self.char_to_id = char_to_id
        self.id_to_tag = {v:k for k, v in tag_to_id.items()}
        self.id_to_char = id_to_char
        self.checkpoint_dir = checkpoint_dir

        self.graph = tf.Graph()
        self.checkpoint_file = tf.train.latest_checkpoint(self.checkpoint_dir)
        print(self.checkpoint_file)
        with self.graph.as_default():
            session_conf = tf.ConfigProto(
                allow_soft_placement = True,
                log_device_placement = False )
            self.sess = tf.Session(config=session_conf)

            with self.sess.as_default():
                saver = tf.train.import_meta_graph("{}.meta".format(self.checkpoint_file))
                saver.restore(self.sess, self.checkpoint_file)
                self.char_inputs = self.graph.get_operation_by_name("chatInputs").outputs[0]
                self.dropout = self.graph.get_operation_by_name("dropout").outputs[0]
                self.trans = self.graph.get_operation_by_name("crf_loss/transitions").outputs[0].eval()
                self.logits = self.graph.get_operation_by_name("project/logits").outputs[0]
                self.pred = self.graph.get_operation_by_name("project/output/pred").outputs[0]


    def predict(self, text):
        char_id_list = []

        for w in list(text):
            if w in self.char_to_id:
                char_id_list.append(self.char_to_id.get(w))
            else:
                char_id_list.append(self.char_to_id["<OOV>"])
        print("seg inputs:",char_id_list)
        input_x = np.array(char_id_list).reshape(1, len(char_id_list))

        feed_dict = {
            self.char_inputs: input_x,
            self.dropout: 1.0
        }

        logits, pred = self.sess.run([self.logits, self.pred], feed_dict=feed_dict)

        path = decode(logits, self.trans, [input_x.shape[1]], self.tag_num)
        path = path[0][1:]
        print("seg path", path)
        tags = [self.id_to_tag[p] for p in path]
        return tags


if __name__ == '__main__':
    checkpoint_dir = "./results/seg/ckpt"
    map_file = "./results/seg/maps.pkl"
    predictor = Predictor(map_file=map_file, checkpoint_dir=checkpoint_dir)
    words = predictor.predict("大数据 算法工程师 abc")
    print(words)