import models.text_layer as text_layer
import os,sys
import numpy as np
import keras.backend as K
import datetime
from models.utils import *
from keras.optimizers import Adam
from keras.utils import to_categorical
from models.callbacks import HyperbolicTangentLR
from keras.callbacks import ModelCheckpoint
from keras.callbacks import TensorBoard
from keras.callbacks import EarlyStopping
from keras.utils import plot_model

def getTimeNow(format="%Y-%m-%d_%H-%M-%S"):
    '''获取当前时间，返回'''
    curtime = datetime.datetime.now()
    time_str = curtime.strftime(format)
    return time_str
class TextCapsnet(object):
    def __init__(self, args, 
                 seq_len=800, 
                 num_classes=10, 
                 vocab_size=30000,
                 x_train=None,
                 y_train=None, 
                 x_test=None,
                 y_test=None, 
                 pretrain_vec=None,
                 pretrain_cw=None):
        
        self.args = args
        self.model_name = args.saveName
        self.init_lr = args.init_lr
        self.batch_size = args.batch_size
        self.epochs = args.epochs
        self.l2 = args.l2
        self.routing = args.routing
        self.embedding_size = args.embedding_size
        self.dropout_ratio = args.dropout_ratio
        self.num_filter = args.num_filter
        self.filter_size = args.filter_size
        
        self.num_capsule = args.num_cap
        self.len_ui = args.len_ui
        self.len_vj = args.len_vj
        
        self.num_classes = num_classes
        self.sequence_length = seq_len
        self.vocab_size = vocab_size
        
        self.x_train = x_train
        self.y_train = y_train
        self.x_test = x_test
        self.y_test = y_test
        self.start_time = getTimeNow(format="%m-%d_%H-%M")
        self.pretrain_vec = pretrain_vec
        self.pretrain_cw = pretrain_cw
    def data_generator(self,data, targets, batch_size):
        batches = (len(data) + batch_size - 1)//batch_size
        while(True):
         for i in range(batches):
              X = data[i*batch_size : (i+1)*batch_size]
              Y = targets[i*batch_size : (i+1)*batch_size]
              yield (X, Y)

    def train(self):
        if self.args.dual_channel == 0:
            print("\n")
            print("使用单通道词向量")
            model = text_layer.get_model(self, summary=True)
        else:
            print("\n")
            print("使用双通道词向量")
            model = text_layer.get_GRU_model(self,summary=True)
        
        # sys.exit(1)

        # callbacks
        filepath = os.path.join(self.args.save_ckpt, "{}_checkpoint.h5".format(self.args.dataset))
        early_stop = EarlyStopping(monitor='val_acc',patience=10,verbose=3,mode='max')
        lr_scheduler = HyperbolicTangentLR(init_lr=self.init_lr, max_epoch=self.epochs, L=-6, U=3)
        ckpt_callback = ModelCheckpoint(filepath, 
                                        monitor='val_acc', 
                                        save_best_only=True, save_weights_only=True,verbose=1, mode='max')
        # log_dir
        log_dir = "/home/stu/LRR/text-capsule-network/log/{}_{}/{}".format(self.args.dataset,self.model_name,self.start_time)
        png_file = "/home/stu/LRR/text-capsule-network/log/{}_{}_{}.png".format(self.args.dataset,self.model_name,self.start_time)
        plot_model(model,to_file=png_file,show_shapes=True)
        tb_callbacks = TensorBoard(
            log_dir= log_dir,
            histogram_freq=0,  # 按照何等频率（epoch）来计算直方图，0为不计算
            batch_size=self.batch_size,
            write_graph=True,  # 是否存储网络结构图
            write_grads=True, # 是否可视化梯度直方图
            write_images=True,# 是否可视化参数
            embeddings_freq=0,  # 被选中的嵌入层会被保存的频率（在训练轮中）
            embeddings_layer_names=None,  # 一个列表，会被监测层的名字。 如果是 None 或空列表，那么所有的嵌入层都会被监测。
            embeddings_metadata=None # 一个字典，对应层的名字到保存有这个嵌入层元数据文件的名字。
        )

        # train
        # history = model.fit(self.x_train, self.y_train, batch_size=self.batch_size, epochs=self.epochs,
        #           validation_data=[self.x_test, self.y_test],
        #           callbacks=[lr_scheduler, ckpt_callback,tb_callbacks,early_stop])
        
        history = model.fit_generator(
            generator=self.data_generator(self.x_train,self.y_train,self.batch_size) ,
            steps_per_epoch = (len(self.x_train)+self.batch_size-1)//self.batch_size,
            epochs=self.epochs,
            verbose=2,
            validation_data=[self.x_test, self.y_test],
            callbacks=[lr_scheduler, ckpt_callback,tb_callbacks,early_stop]
        )
        history_dir = "/home/stu/LRR/text-capsule-network/history/"
        name = "{}.{}.pkl".format(self.model_name,self.start_time)
        ensure_dir(history_dir)
        pkl_save(data=history,pkl_path=history_dir+name)
        model.load_weights(filepath)
        #test_loss, test_acc = model.evaluate(self.x_test, self.y_test)
        test_acc = model.evaluate(self.x_test, self.y_test)
        # print(self.x_test)
        # print(self.y_test)
        print(test_acc)
        # print("TEST ACC : {:.4f}".format(test_acc))
        
        
