import data_input
import Net
import os
import numpy as np
import tensorflow as tf
from self_factory import log_func
from self_factory import DictionaryMap
from self_factory import Data2Index
from config import *
#from bottle import *
from urllib.parse import *


class evaluate():
    @staticmethod
    def input_test_data(test_w2i, test_data):
        # 输入：字典和字符串
        # 输出：经过pad过的index数据，数据真实长度，句子本身
        sentences = test_data

        data = [Data2Index.str2list(w, is_cut=configer.is_cut) for w in sentences[0:configer.beam_width]]
        data_len = [len(w) for w in data]
        batch_data = Data2Index.str2index(data, test_w2i)
        max_batch_length = max(data_len)
        # padding
        batch_data_padded = [np.pad(d, ((0, max_batch_length - len(d))), 'constant', constant_values=((0, 0)))
                             for d in batch_data]
        return np.asarray(batch_data_padded, np.int32), np.asarray(data_len, np.int32), sentences

    @staticmethod
    def evaluate_test_data(test_Net, s, test_i2w, data):
        feed = {test_Net.input: data[0], test_Net.input_length: data[1],
                test_Net.target: data[0], test_Net.target_length: data[1]
                }
        results = s.run(test_Net.prediction, feed_dict=feed)

        return [[test_i2w[i] for i in result] for result in results]


    @staticmethod
    def evaluate_beamsearch_data(test_Net, s, test_i2w, data):
        feed = feed = {test_Net.input: data[0], test_Net.input_length: data[1],
                test_Net.target: data[0], test_Net.target_length: data[1]
                }
        results = s.run(test_Net.prediction, feed_dict=feed)
        # 返回结果是[batch_size, beam_size, seq_len]
        return [[[test_i2w[i[n]] for i in result] for n in range(configer.beam_width)]for result in results]


def build_pipeline_graph():
    embedding_size = FLAGS.embedding_size
    hidden_size = FLAGS.hidden_size
    batch_size = FLAGS.batch_size
    is_inference = FLAGS.is_inference
    # data
    batch_data = data_input.getBatch(os.path.join('data', configer.map_name))
    next_batch = batch_data.get_batch(batch_size=batch_size, epoch=1)
    vocab_size = len(batch_data.w2i)
    print('vocab_size:', vocab_size)
    # net
    net = Net.net(is_inference, batch_size, vocab_size, embedding_size, hidden_size)
    net.build_seq2seq()
    return batch_data.w2i, batch_data.i2w, next_batch, net


def train_projection():
    # prepare parameter
    logger = log_func.my_log('ChatBot_train')
    w2i, i2w, next_batch, net = build_pipeline_graph()
    # projection
    init = tf.global_variables_initializer()
    saver = tf.train.Saver(max_to_keep=10, keep_checkpoint_every_n_hours=1)

    with tf.Session() as s:
        if configer.is_restore:
            saver.restore(s, 'ckpt/m')
        else: s.run(init)
        writer = tf.summary.FileWriter('log/', s.graph)
        logger.info(tf.trainable_variables())
        logger.info(FLAGS.flag_values_dict())
        FLAGS.append_flags_into_file('config')

        for i in range(1000000):
            data = s.run(next_batch)
            feed = {
                    net.input: data[0], net.input_length: data[1],
                    net.target: data[2], net.target_length: data[3]
                    }
            _, loss = s.run([net.train_op, net.loss], feed_dict=feed)
            step = s.run(net.global_step)
            summary_merge = s.run(net.summary_merge, feed_dict=feed)
            writer.add_summary(summary_merge, step)
            if i%100 == 0:
                saver.save(s, 'ckpt/m')
                logger.info('result: step: %s  loss: %s\n' % (step, loss))


# def test_projetion():
#     logger = log_func.my_log('ChatBot_test')
#     logger.info(FLAGS.flag_values_dict())
#     #FLAGS.read_flags_from_files('config')
#     w2i, i2w = DictionaryMap.dictionary_from_pkl_file(configer.map_name)
#     vocab_size = len(w2i)
#     net = Net.net(configer.is_inference, configer.batch_size,
#                   vocab_size, FLAGS.embedding_size,
#                   FLAGS.hidden_size)
#     net.build_seq2seq()
#
#     saver = tf.train.Saver()
#     with tf.Session() as sess:
#         saver.restore(sess, 'ckpt/m')
#
#         @route('/<data_in>')
#         def run_http(data_in):
#             strin = unquote(data_in)
#             print('------------------------------------------'+strin)
#             if strin == 'favicon.ico':
#                 pass
#             else:
#                 str = [strin]
#                 batch_data = evaluate.input_test_data(w2i, str)
#                 if configer.is_BeamSearch:
#                     results = evaluate.evaluate_beamsearch_data(net, sess, i2w, batch_data)
#                 else:
#                     results = evaluate.evaluate_test_data(net, sess, i2w, batch_data)
#                 res = [''.join(result).replace('PAD','') for result in results[0]]
#                 logger.info('result:%s\n'%res)
#                 return template('{{strr}}', strr = res)
#         run(host='localhost')


if __name__ == '__main__':

    #FLAGS.read_flags_from_files('config')
    if configer.is_rewrite_map:
        maps = data_input.get_dict([configer.sentence_name, configer.label_name])
        data_input.dict_2_pkl_file(maps, os.path.join('data', configer.map_name))
    if configer.is_inference:
        print('test')
        test_projetion()
    else:
        print('train')
        train_projection()
