import Net
import tensorflow as tf
from self_factory import DictionaryMap
from self_factory import Data2Index
import numpy as np
from self_factory import log_func

class evaluate():
    def __init__(self):
        self.embedding_size = 100
        self.hidden_size = 200
        self.batch_size = 1

        self.test_w2i, self.test_i2w = DictionaryMap.dictionary_from_pkl_file('maps.pkl')
        self.vocab_size = len(self.test_w2i)
        print('vocab_size:', self.vocab_size)
        self.test_Net = Net.net(True, self.batch_size, self.vocab_size,
                             self.embedding_size, self.hidden_size)
        self.test_Net.seq2seq()


    def get_data(self):
        #word = input('input:')
        #words = Data2Index.str2list(word)
        sentences = ['早上好', '明天去哪玩','明天去哪玩' ,'今天天气怎么样？','今天好开心',
                    '上课要好好学习','明天不用上班', '你是什么星座？','你一点也不漂亮','没钱吃饭了']
        data = [Data2Index.str2list(w) for w in sentences]
        print(data)
        data_len = [len(w) for w in data]
        batch_data = Data2Index.str2index(data, self.test_w2i)
        max_batch_length = max(data_len)
        # padding
        batch_data_padded = [np.pad(d, ((0, max_batch_length - len(d))), 'constant', constant_values=((0, 0)))
                            for d in batch_data]
        return np.asarray(batch_data_padded, np.int32), [np.asarray(data_len, np.int32)]


    def test_line(self,data):
        saver = tf.train.Saver()
        with tf.Session() as s:

            saver.restore(s, 'ckpt/m')
            feed = {self.test_Net.input: data[0], self.test_Net.input_length: data[1],
                    self.test_Net.target: data[0], self.test_Net.target_length: data[1]
                    }
            results = s.run(self.test_Net.prediction_logits, feed_dict=feed)

            print([[self.test_i2w[i] for i in result] for result in results])

    @staticmethod
    def input_test_data(test_w2i):
        sentences = ['早上好', '明天去哪玩', '周末回家一趟', '今天天气怎么样？', '今天好开心',
                     '上课要好好学习', '明天不用上班', '你是什么星座？', '你一点也不漂亮', '没钱吃饭了']

        data = [Data2Index.str2list(w) for w in sentences]
        data_len = [len(w) for w in data]
        batch_data = Data2Index.str2index(data, test_w2i)
        max_batch_length = max(data_len)
        # padding
        batch_data_padded = [np.pad(d, ((0, max_batch_length - len(d))), 'constant', constant_values=((0, 0)))
                             for d in batch_data]
        return np.asarray(batch_data_padded, np.int32), np.asarray(data_len, np.int32), sentences

    @staticmethod
    def evaluate_test_data(test_Net, s, test_i2w, data):
        feed = {test_Net.input: data[0], test_Net.input_length: data[1],
                test_Net.target: data[0], test_Net.target_length: data[1]
                }
        results = s.run(test_Net.prediction_logits, feed_dict=feed)

        return [[test_i2w[i] for i in result] for result in results]



# test_data = evaluate.input_test_data(evaluate().test_w2i)
# test_data_ids = test_data[0:2]
# test_data_w = test_data[2]
