# -*- coding: utf-8 -*-
"""
Created on Fri Apr  2 23:25:14 2021

@author: DELL
"""

from keras.models import Model
from keras.layers import Input, Add, Dense, Activation, ZeroPadding2D, AveragePooling2D,\
BatchNormalization, Flatten, Conv2D, MaxPooling2D
from keras.initializers import glorot_uniform
def identity_block (X,f,filters,stage,block):
    #定义命名规则
    conv_name_base = 'res' +str(stage)+ block + '_branch'
    bn_name_base = 'bn' + str(stage) + block + '_branch'
    #定义卷积核个数
    F1, F2, F3=filters
    #保存输入数据，准备近道连接
    X_shortcut = X
    #定义卷积层1*1；卷积核个数为F1，步长1
    X = Conv2D(filters=F1,kernel_size=(1,1),strides=(1,1),padding='valid',\
               name=conv_name_base+'2a',kernel_initializer=glorot_uniform(seed=0))(X)
    #归一化层
    X = BatchNormalization(axis=3, name=bn_name_base + '2a')(X)
    #激活层Relu
    X = Activation('relu')(X)
   
    #定义卷积层f*f；卷积核个数为F2，步长1
    X = Conv2D(filters=F2,kernel_size=(f,f),strides=(1,1),padding='same',\
               name=conv_name_base+'2b',kernel_initializer=glorot_uniform(seed=0))(X)
    #归一化层
    X = BatchNormalization(axis=3, name=bn_name_base + '2b')(X)
    #激活层Relu
    X = Activation('relu')(X)
    
    #定义卷积层1*1；卷积核个数为F3，步长1
    X = Conv2D(filters=F3,kernel_size=(1,1),strides=(1,1),padding='valid',\
               name=conv_name_base+'2c',kernel_initializer=glorot_uniform(seed=0))(X)
    #归一化层
    X = BatchNormalization(axis=3, name=bn_name_base + '2c')(X)
    #近道连接与输出加在一起
    X = Add() ([X,X_shortcut])
    #激活层Relu
    X = Activation('relu')(X)
    #返回激活函数输出的X
    return X

#%%
def convolutional_block (X,f,filters,stage,block,s=2):
    #定义命名规则
    conv_name_base = 'res' +str(stage)+ block + '_branch'
    bn_name_base = 'bn' + str(stage) + block + '_branch'
    #定义卷积核个数
    F1, F2, F3=filters
    #保存输入数据，准备近道连接
    X_shortcut = X
    #定义卷积层1*1；卷积核个数为F1，步长1
    X = Conv2D(filters=F1,kernel_size=(1,1),strides=(s,s),\
               name=conv_name_base+'2a',kernel_initializer=glorot_uniform(seed=0))(X)
    #归一化层
    X = BatchNormalization(axis=3, name=bn_name_base + '2a')(X)
    #激活层Relu
    X = Activation('relu')(X)
   
    #定义卷积层f*f；卷积核个数为F2，步长1
    X = Conv2D(filters=F2,kernel_size=(f,f),strides=(1,1),padding='same',\
               name=conv_name_base+'2b',kernel_initializer=glorot_uniform(seed=0))(X)
    #归一化层
    X = BatchNormalization(axis=3, name=bn_name_base + '2b')(X)
    #激活层Relu
    X = Activation('relu')(X)
    
    #定义卷积层1*1；卷积核个数为F3，步长1
    X = Conv2D(filters=F3,kernel_size=(1,1),strides=(1,1),\
               name=conv_name_base+'2c',kernel_initializer=glorot_uniform(seed=0))(X)
    #归一化层
    X = BatchNormalization(axis=3, name=bn_name_base + '2c')(X)
    
    #在近道连接加入卷积和归一化层
    X_shortcut=Conv2D(F3,kernel_size=(1,1),strides=(s,s),name=conv_name_base+'1',\
                      kernel_initializer=glorot_uniform(seed=0))(X_shortcut)
    #归一化层
    X_shortcut=BatchNormalization(axis=3,name=bn_name_base+'1')(X_shortcut)
    
    #近道连接与输出加在一起
    X = Add() ([X,X_shortcut])
    #激活层Relu
    X = Activation('relu')(X)
    #返回激活函数输出的X
    return X
#%%
EPOCHS=5 #10
BATCH_SIZE = 64
CLASS_NUM = 5
norm_size = 28
optimizer = 'adam'
objective = 'categorical_crossentropy'
#定义resNet函数
def resNet():
    #定义输入数据的形状
    X_input = Input([13,14,100])
    #像素填充处理
    X=ZeroPadding2D((3,3))(X_input)
    
    #搭建stage1
    X = Conv2D(64,(7,7),strides=(2,2),name='conv1')(X)
    X = BatchNormalization(axis=3,name='bn_conv1')(X)
    X = Activation('relu')(X)
    X = MaxPooling2D((3,3),strides=(2,2))(X)
    
    #搭建stage2
    X = convolutional_block(X,f=3,filters=[64,64,256],stage=2,block='a',s=1)
    X = identity_block(X,3,[64,64,256],stage=2,block='b')
    X = identity_block(X,3,[64,64,256],stage=2,block='c')

    #搭建stage3
    X = convolutional_block(X,f=3,filters=[128,128,512],stage=3,block='a',s=2)
    X = identity_block(X,3,[128,128,512],stage=3,block='b')
    X = identity_block(X,3,[128,128,512],stage=3,block='c')
    X = identity_block(X,3,[128,128,512],stage=3,block='d')    
    
    #搭建stage4
    X = convolutional_block(X,f=3,filters=[256,256,1024],stage=4,block='a',s=2)
    X = identity_block(X,3,[256,256,1024],stage=4,block='b')
    X = identity_block(X,3,[256,256,1024],stage=4,block='c')
    X = identity_block(X,3,[256,256,1024],stage=4,block='d')     
    X = identity_block(X,3,[256,256,1024],stage=4,block='e')    
    X = identity_block(X,3,[256,256,1024],stage=4,block='f')    
    
    #搭建stage5
    X = convolutional_block(X,f=3,filters=[512,512,2048],stage=5,block='a',s=2)
    X = identity_block(X,3,[512,512,2048],stage=5,block='b')
    X = identity_block(X,3,[512,512,2048],stage=5,block='c')
    
    #平均池化层
    X = AveragePooling2D((2,2),name='avg_pool',padding='same')(X)
    #平坦层
    X = Flatten()(X)
    X = Dense(5,activation='softmax',name='fc')(X)
    model = Model(inputs=X_input, outputs=X)
    model.compile(loss=objective,optimizer=optimizer,metrics=['accuracy'])
    return model
if __name__ == "__main__":
    Model()
    model = resNet()
    print(model.summary())