from __future__ import absolute_import, division, print_function, unicode_literals
import numpy as np
import math
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers,regularizers
from tensorflow.keras.callbacks import ModelCheckpoint,EarlyStopping,ReduceLROnPlateau


def Model_Dense(boardSize):
    input=keras.layers.Input(shape=(boardSize**2+1,)) #361棋子+1颜色
    #特征提取网络
    feature=keras.layers.Dense(16*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(input[:,:-1])
    feature=keras.layers.Dense(32*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(feature)
    #feature=keras.layers.Dense(32*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(feature)
    feature=keras.layers.Dense(64*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(feature)
    #feature=layers.Dropout(0.2)(feature)
    #feature=keras.layers.Dense(10240, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(feature)
    #feature=keras.layers.Dense(64*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(feature)
    #feature=layers.Dropout(0.2)(feature)
    feature=keras.layers.Dense(32*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(feature)
    #feature=layers.Dropout(0.2)(feature)
    #feature=keras.layers.Dense(16*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',kernel_regularizer=regularizers.l2(0.001),activation='tanh')(feature)
    #feature=keras.layers.Dense(16*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(feature)
    #把颜色和特征提取网络作为逻辑网络的输入
    #trans_color=keras.layers.Reshape((1,))(input1[:,-1]) #可以转换数组形状或者直接采用下面这个取值法，目的是为了保持形状
    lnk=keras.layers.concatenate([feature, input[:,boardSize**2:boardSize**2+1]], axis=-1)
    logic = keras.layers.Dense(32*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(lnk)
    #logic = keras.layers.Dense(32*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(lnk)
    #logic=layers.Dropout(0.2)(logic)
    logic = keras.layers.Dense(32*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(lnk)
    #logic=layers.Dropout(0.2)(logic)
    logic = keras.layers.Dense(64*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(lnk)
    #logic = keras.layers.Dense(64*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(lnk)
    #logic=layers.Dropout(0.2)(logic)
    #logic = keras.layers.Dense(64*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(logic)
    #logic=layers.Dropout(0.2)(logic)
    logic = keras.layers.Dense(32*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(logic)
    #logic=layers.Dropout(0.2)(logic)
    #logic = keras.layers.Dense(16*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',kernel_regularizer=regularizers.l2(0.001),activation='sigmoid')(logic)
    logic = keras.layers.Dense(16*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='sigmoid')(logic)
    #logic = keras.layers.Dense(512, activation='relu')(logic)
    output = keras.layers.Dense(boardSize**2,activation='softmax')(logic)
    return keras.models.Model(inputs=input, outputs=output)

def Model_CNN(boardSize):
    index=int(np.sqrt(boardSize))
    half_ind=math.ceil(index/2)
    input=keras.layers.Input(shape=(boardSize**2+1,))
    reshape=keras.layers.Reshape((boardSize,boardSize,1))(input[:,:-1])
    '''
    feature0=keras.layers.Conv2D(16*8, 1, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(reshape)
    feature1=keras.layers.Conv2D(16*8, 2, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(reshape)
    #feature1=keras.layers.Conv2D(max(2**half_ind,128), 2, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature1)
    feature2=keras.layers.Conv2D(16*8, 3, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(reshape)
    #feature2=keras.layers.Conv2D(max(2**half_ind,128), 2, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature2)
    #feature3=keras.layers.AveragePooling2D(pool_size=3, strides=1, padding='same')(reshape)
    feature3=keras.layers.Conv2D(16*8, 4, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(reshape)
    #feature3=keras.layers.Conv2D(64, 2, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature3)
    feature=keras.layers.concatenate([feature0,feature1,feature2,feature3], axis=-1)
    feature=keras.layers.Conv2D(128, 2, strides=1, padding='valid', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    #feature=keras.layers.Conv2D(max(2**half_ind,256), 3, strides=1, padding='valid', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    feature=keras.layers.Conv2D(1024*4,boardSize-3,activation='tanh',kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    '''
    feature=keras.layers.Conv2D(3**4, 2, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(reshape)
    feature=keras.layers.Conv2D(3**2, 2, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    #feature=keras.layers.Conv2D(3**4, 2, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    feature=keras.layers.Conv2D(3**2*boardSize**2,boardSize,activation='tanh',kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    feature=keras.layers.Flatten()(feature)
    #feature=keras.layers.Dense(64*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(feature)
    #feature=keras.layers.Dense(16*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(feature)
    lnk=keras.layers.concatenate([feature, input[:,boardSize**2:boardSize**2+1]], axis=-1)
    logic = keras.layers.Dense(1024*4, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(lnk)
    #logic=layers.Dropout(0.2)(logic)
    logic = keras.layers.Dense(1024*2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(logic)
    #logic=layers.Dropout(0.2)(logic)
    logic = keras.layers.Dense(1024*2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(logic)
    #logic=layers.Dropout(0.2)(logic)
    logic = keras.layers.Dense(16*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',kernel_regularizer=regularizers.l2(0.001),activation='sigmoid')(logic)
    logic = keras.layers.Dense(512, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(logic)
    logic = keras.layers.Dense(256, kernel_initializer='random_uniform',bias_initializer='zeros',activation='sigmoid')(logic)
    #logic = keras.layers.Dense(512, activation='relu')(logic)
    output = keras.layers.Dense(boardSize**2,activation='softmax')(logic)
    return keras.models.Model(inputs=input, outputs=output)

def Model_PD(boardSize,type='C'):   #D表示全连接网络，C表示卷积网络
    input=keras.layers.Input(shape=(boardSize**2+1,))
    if type=='D':
        feature=keras.layers.Dense(64*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(input[:,:-1])
        feature=keras.layers.Dense(32*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(feature)
        feature=keras.layers.Dense(32*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(feature)
        feature=keras.layers.Dense(16*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(feature)
        feature=keras.layers.Dense(16*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(feature)
        lnk=keras.layers.concatenate([feature, input[:,boardSize**2:boardSize**2+1]], axis=-1)
        logic = keras.layers.Dense(32*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(lnk)
        logic = keras.layers.Dense(64*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(logic)
        logic = keras.layers.Dense(32*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(logic)
        logic = keras.layers.Dense(32*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(logic)
        logic = keras.layers.Dense(16*boardSize**2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='sigmoid')(logic)
    elif type=='C':
        reshape=keras.layers.Reshape((boardSize,boardSize,1))(input[:,:-1])
        feature=keras.layers.Conv2D(3**4, 2, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(reshape)
        feature=keras.layers.Conv2D(3**4, 2, strides=1, padding='valid', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
        feature=keras.layers.Conv2D(3**4, 2, strides=1, padding='valid', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
        feature=keras.layers.Conv2D(3**4*boardSize**2,boardSize-3,activation='tanh',kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
        feature=keras.layers.Flatten()(feature)
        lnk=keras.layers.concatenate([feature, input[:,boardSize**2:boardSize**2+1]], axis=-1)
        logic = keras.layers.Dense(1024*4, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(lnk)
        #logic = keras.layers.Dense(1024*2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='elu')(logic)
        #logic = keras.layers.Dense(1024*2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='elu')(logic)
        logic = keras.layers.Dense(512*2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(logic)
        #logic = keras.layers.Dense(256, kernel_initializer='random_uniform',bias_initializer='zeros',activation='softplus')(logic)
    else:
        None
    output = keras.layers.Dense(boardSize**2,activation='softmax')(logic)
    return keras.models.Model(inputs=input, outputs=output)

def Model_AC(boardSize):
    input=keras.layers.Input(shape=(boardSize**2+1,))
    reshape=keras.layers.Reshape((boardSize,boardSize,1))(input[:,:-1])
    feature=keras.layers.Conv2D(3**4, 2, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(reshape)
    feature=keras.layers.Conv2D(3**4, 2, strides=1, padding='valid', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    feature=keras.layers.Conv2D(3**4, 2, strides=1, padding='valid', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    feature=keras.layers.Flatten()(feature)
    lnk=keras.layers.concatenate([feature, input[:,boardSize**2:boardSize**2+1]], axis=-1)
    actor=keras.layers.Dense(1024*4, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(lnk)
    actor=keras.layers.Dense(1024*1, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(actor)
    actor_output=keras.layers.Dense(boardSize**2,activation='softmax')(actor)
    critic=keras.layers.Dense(1024*4, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(lnk)
    critic=keras.layers.Dense(1024*2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(critic)
    critic_ouput=keras.layers.Dense(1,activation='tanh')(actor)
    return keras.models.Model(inputs=input, outputs=[actor_output,critic_ouput])

def alphago_simple(boardSize,n):
    input=keras.layers.Input(shape=(boardSize,boardSize,n))
    feature=keras.layers.Conv2D(3**4, 5, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(input)
    #feature=keras.layers.Conv2D(3**3, 4, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    #feature=keras.layers.Conv2D(3**2, 3, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    #feature=keras.layers.Conv2D(3**2, 2, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    feature=keras.layers.Conv2D(1, 2, strides=1, padding='valid', activation='relu', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    feature=keras.layers.Flatten()(feature)
    feature=keras.layers.Dense(1024, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(feature)
    output=keras.layers.Dense(boardSize**2,activation='softmax')(feature)
    return keras.models.Model(inputs=input, outputs=output)

def alphago_complicate(boardSize,n):
    input=keras.layers.Input(shape=(boardSize,boardSize,n))
    feature=keras.layers.Conv2D(n**2, 5, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(input)
    #feature=keras.layers.Conv2D(n**2, 4, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    #feature=keras.layers.Conv2D(n**2, 3, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    #feature=keras.layers.Conv2D(n**2, 2, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    #feature=keras.layers.Conv2D(boardSize**2, 4, strides=1, padding='same', activation='relu', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    #feature=keras.layers.Conv2D(boardSize**2, 3, strides=1, padding='same', activation='relu', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    feature=keras.layers.Conv2D(boardSize**2, 2, strides=1, padding='same', activation='relu', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    feature=keras.layers.Conv2D(1, 2, strides=1, padding='valid', activation='relu', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    feature=keras.layers.Flatten()(feature)
    output=keras.layers.Dense(boardSize**2,activation='softmax')(feature)
    return keras.models.Model(inputs=input, outputs=output)

def alphago_value(boardSize,n):
    input1=keras.layers.Input(shape=(boardSize,boardSize,n),name='input1')
    input2=keras.layers.Input(shape=(boardSize*boardSize,),name='input2')
    feature=keras.layers.Conv2D(n**2, 5, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(input1)
    #feature=keras.layers.Conv2D(n**2, 4, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    #feature=keras.layers.Conv2D(n**2, 3, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    #feature=keras.layers.Conv2D(n**2, 2, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    #feature=keras.layers.Conv2D(boardSize**2, 4, strides=1, padding='same', activation='relu', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    #feature=keras.layers.Conv2D(boardSize**2, 3, strides=1, padding='same', activation='relu', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    feature=keras.layers.Conv2D(boardSize**2, 2, strides=1, padding='same', activation='relu', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    feature=keras.layers.Conv2D(1, 2, strides=1, padding='valid', activation='relu', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    output1=keras.layers.Flatten()(feature)
    lnk=keras.layers.concatenate([output1, input2])
    feature=keras.layers.Dense(1024*2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(lnk)
    #feature=keras.layers.Dense(1024/2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(feature)
    output=keras.layers.Dense(1,activation='tanh')(feature)
    return keras.models.Model(inputs={'input1':input1,'input2':input2}, outputs=output)

def alphago_zero(boardSize):
    input=keras.layers.Input(shape=(boardSize,boardSize,2))    #一层棋形+一层落子方
    feature=keras.layers.Conv2D(50, 4, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(input)
    feature=keras.layers.Conv2D(50, 3, strides=1, padding='same', activation='tanh', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    feature=keras.layers.Conv2D(50, 2, strides=1, padding='same', activation='relu', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    feature=keras.layers.Conv2D(50, 2, strides=1, padding='valid', activation='relu', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    feature=keras.layers.Conv2D(10, 2, strides=1, padding='valid', activation='relu', kernel_initializer='random_uniform', bias_initializer='zeros')(feature)
    feature=keras.layers.Flatten()(feature)
    actor=keras.layers.Dense(1024*4, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(feature)
    actor=keras.layers.Dense(1024*2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='relu')(actor)
    actor_output=keras.layers.Dense(boardSize**2+1,activation='softmax')(actor) #最后一位表示PASS
    critic=keras.layers.Dense(1024*4, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(feature)
    critic=keras.layers.Dense(1024*2, kernel_initializer='random_uniform',bias_initializer='zeros',activation='tanh')(critic)
    critic_ouput=keras.layers.Dense(1,activation='tanh')(actor)
    return keras.models.Model(inputs=input, outputs=[actor_output,critic_ouput])



class DenseModel():
    def __init__(self,dataGenerator=None,boardSize=19,dataSize=1024,model='dense',isAlphaGo=None,dataG_s=None,dataG_c=None,dataG_v=None):
        if model=='dense':
            self.model=Model_Dense(boardSize)
        elif model=="cnn":
            self.model=Model_CNN(boardSize)
        elif model=='pd_dense':
            self.model=Model_PD(boardSize)
        elif model=='ac':
            self.model=Model_AC(boardSize)
        elif model=='alpha_simple':
            self.model=alphago_simple(boardSize,46)
        elif model=='alpha_complicate':
            self.model=alphago_complicate(boardSize,46)
        elif model=='alpha_value':
            self.model=alphago_value(boardSize,46)
        elif model=='AGZ':
            self.model=alphago_zero(boardSize)
        else:
            self.model=Model_Dense(boardSize)
        if dataGenerator and isAlphaGo is None:
            self.dset=tf.data.Dataset.from_generator(dataGenerator, args=[dataSize,boardSize],output_types=(tf.int32,tf.int32),output_shapes = (tf.TensorShape([boardSize**2+1]),tf.TensorShape([boardSize**2])))
        elif isAlphaGo:
            if dataG_s:
                self.dset=tf.data.Dataset.from_generator(dataG_s, args=[dataSize,boardSize,'policy'],output_types=(tf.int32,tf.float32),output_shapes = ((boardSize,boardSize,46),(boardSize**2)))
            elif dataG_c:
                self.dset=tf.data.Dataset.from_generator(dataG_c, args=[dataSize,boardSize,'policy'],output_types=(tf.int32,tf.float32),output_shapes = ((boardSize,boardSize,46),(boardSize**2)))
            elif dataG_v:
                self.dset=tf.data.Dataset.from_generator(dataG_v, args=[dataSize,boardSize,'value'],output_types=({"input1":tf.int32,"input2":tf.int32},tf.float32),output_shapes=({"input1":(boardSize,boardSize,46),"input2":(boardSize**2)},(1,)))
            else:
                self.dset=None
        else:
            self.dset=None
    def plot_model(self,fileName='modelplot.png'):
        keras.utils.plot_model(self.model, fileName)

    def model_predict_ac(self,npInput):
        actions,value=self.model.predict(npInput)
        return actions,value

    def compile_ac(self):
        self.model.compile(optimizer=keras.optimizers.SGD(learning_rate=0.001),
            loss=['categorical_crossentropy', 'mse'],
            loss_weights=[1,.5],
            metrics=['accuracy'])

    def compile_alpha_go_value(self):
        self.model.compile(optimizer=keras.optimizers.SGD(learning_rate=0.001),
        loss=keras.losses.MeanSquaredError(),
        metrics=['accuracy'])

    def fit_ac(self,x,y1,y2,batch_size=512,epochs=1):
        self.model.fit(x,[y1,y2],batch_size=batch_size,epochs=epochs)

    def p_d_compile(self):  #要用最土的梯度下降，别的优化并不是针对强化学习的
        self.model.compile(optimizer=keras.optimizers.SGD(learning_rate=0.0001),
            loss=keras.losses.CategoricalCrossentropy(from_logits=False),
            metrics=['accuracy'])

    def fit_all_data(self,x,y,batch_size=512,epochs=1,earlystop=0,checkpoint=False):
        fileName='./weights/dense_weights_dl.hdf5'
        if earlystop and checkpoint:
            callbacks=[
                ModelCheckpoint(filepath=fileName, monitor='loss',verbose=1, save_best_only=True), #verbose显示进度条
                EarlyStopping(monitor='loss',patience=earlystop, verbose=1, restore_best_weights=True),  #val_loss,val_acc,acc都是可选项,我们这里选择loss，是想评估网络是否饱和
            ]
        elif checkpoint:
            callbacks=[
                ModelCheckpoint(filepath=fileName, monitor='loss',verbose=1, save_best_only=True), #verbose显示进度条
            ]
        elif earlystop:
            callbacks=[
                EarlyStopping(monitor='loss',patience=earlystop, verbose=1, restore_best_weights=True),
            ]
        else:
            callbacks=[
            ]
        self.model.fit(x,y, batch_size=batch_size,epochs=epochs,callbacks=callbacks)

    def model_compile(self):
        self.model.compile(optimizer=keras.optimizers.Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, amsgrad=False),
            loss=keras.losses.CategoricalCrossentropy(from_logits=False),
            metrics=['accuracy'])

    def model_fit(self,batch_size=128,epochs=1,earlystop=0,checkpoint=False):
        dst = self.dset.batch(batch_size,drop_remainder=True).prefetch(tf.data.experimental.AUTOTUNE)
        assert (batch_size/16-int(batch_size/16))==0
        vali_data=self.dset.batch(int(batch_size/16)).take(1)
        #fileName='weights.{epoch:d}-{loss:.4f}.hdf5'
        fileName='dense_weights.hdf5'
        if earlystop and checkpoint:
            callbacks=[
                ModelCheckpoint(filepath='./weights/'+fileName, monitor='loss',verbose=1, save_best_only=True), #verbose显示进度条
                EarlyStopping(monitor='loss',patience=earlystop, verbose=1, restore_best_weights=True),  #val_loss,val_acc,acc都是可选项,我们这里选择loss，是想评估网络是否饱和
                ReduceLROnPlateau(monitor='val_loss',verbose=1,factor=0.1,patience=earlystop,min_lr=0.001)
            ]
        elif checkpoint:
            callbacks=[
                ModelCheckpoint(filepath='./weights/'+fileName, monitor='loss',verbose=1, save_best_only=True), #verbose显示进度条
                ReduceLROnPlateau(monitor='val_loss', verbose=1,factor=0.1,patience=earlystop,min_lr=0.001)
            ]
        elif earlystop:
            callbacks=[
                EarlyStopping(monitor='loss',patience=earlystop, verbose=1, restore_best_weights=True),
                ReduceLROnPlateau(monitor='val_loss',verbose=1, factor=0.1,patience=earlystop,min_lr=0.001)
            ]
        else:
            callbacks=[
                ReduceLROnPlateau(monitor='val_loss', verbose=1,factor=0.1,patience=earlystop,min_lr=0.001)
            ]
        self.model.fit(dst, epochs=epochs,validation_data=vali_data,callbacks=callbacks)


    def show_summary(self):
        self.model.summary()

    def model_save(self,fileName):
        self.model.reset_metrics()
        self.model.save(fileName)

    def model_load(self,fileName):
        self.model=keras.models.load_model(fileName)

    def model_save_weights(self,fileName):
        self.model.save_weights(fileName)

    def model_load_weights(self,fileName):
        self.model.load_weights(fileName)

    def model_predict(self,npInput):
        return self.model.predict(npInput)

    def model_eval(self):
        eval_data=self.dset.batch(128).take(1)
        test_loss, test_acc = self.model.evaluate(eval_data, verbose=1)
        print('\nTest accuracy:', test_acc,'\tTest loss:', test_loss)
