from model import Model
import argparse
import tensorflow as tf
import pickle as pkl
import embeddings.predict_fake_data as fake
# parser = argparse.ArgumentParser()
# parser.add_argument('--word_dim', type=int,default=111, help='dimension of word vector', required=False)
# parser.add_argument('--sentence_length', type=int, default=30, help='max sentence length', required=False)
# parser.add_argument('--class_size', type=int,default=5, help='number of classes', required=False)
# parser.add_argument('--rnn_size', type=int, default=256, help='hidden dimension of rnn')
# parser.add_argument('--num_layers', type=int, default=1, help='number of layers in rnn')
# parser.add_argument('--input_embed', type=str,default='D:/workspace/python/pythonWorkSpace/ner-lstm/embeddings/test_a_embed.pkl', help='location of input pickle embedding', required=False)
# parser.add_argument('--restore', type=str, default='D:/workspace/python/pythonWorkSpace/ner-lstm/data/model/model_max.ckpt',help="path of saved model", required=False)
# args = parser.parse_args()
# model = Model(args)
# inp = pkl.load(open(args.input_embed, 'rb'))
# sess = tf.Session()
# saver = tf.train.Saver()
# saver.restore(sess, args.restore)
# pred = sess.run(model.prediction, {model.input_data: inp})
# pkl.dump(pred, open('D:/workspace/python/pythonWorkSpace/ner-lstm/data/eval/predictions.npy', 'wb'))
# print(pred)


parser = argparse.ArgumentParser()
parser.add_argument('--word_dim', type=int,default=111, help='dimension of word vector', required=False)
parser.add_argument('--sentence_length', type=int, default=30, help='max sentence length', required=False)
parser.add_argument('--class_size', type=int,default=5, help='number of classes', required=False)
parser.add_argument('--rnn_size', type=int, default=256, help='hidden dimension of rnn')
parser.add_argument('--num_layers', type=int, default=1, help='number of layers in rnn')
parser.add_argument('--input_embed', type=str,default='D:/workspace/python/pythonWorkSpace/ner-lstm/embeddings/test_a_embed.pkl', help='location of input pickle embedding', required=False)
parser.add_argument('--restore', type=str, default='D:/workspace/python/pythonWorkSpace/ner-lstm/data/model/model_max.ckpt',help="path of saved model", required=False)
args = parser.parse_args()
# with tf.Graph().as_default():
#     model = Model(args)
#     inp = pkl.load(open(args.input_embed, 'rb'))
#     sess = tf.Session()
#     saver = tf.train.Saver()
#     saver.restore(sess, 'D:/workspace/python/pythonWorkSpace/ner-lstm/data/model/model_max.ckpt')
#     pred = sess.run(model.prediction, {model.input_data: inp})
#     pkl.dump(pred, open('D:/workspace/python/pythonWorkSpace/ner-lstm/data/eval/predictions.npy', 'wb'))
#     print(pred)

# 使用自己造的数据
with tf.Graph().as_default():
    model = Model(args)
    inp = pkl.load(open(args.input_embed, 'rb'))
    sess = tf.Session()
    saver = tf.train.Saver()
    saver.restore(sess, 'D:/workspace/python/pythonWorkSpace/ner-lstm/data/model/model_max.ckpt')
    pred = sess.run(model.prediction, {model.input_data: fake.abc})
    pkl.dump(pred, open('D:/workspace/python/pythonWorkSpace/ner-lstm/data/eval/predictions.npy', 'wb'))
    print(pred)
