# coding:utf-8
# coding:utf-8
import os

from keras import Input, Model
from keras.layers import Conv2D, BatchNormalization, add, ZeroPadding2D, MaxPooling2D, AvgPool2D, Flatten, Dense, \
    AveragePooling2D
from keras.metrics import top_k_categorical_accuracy
from keras.models import load_model
from keras.preprocessing.image import ImageDataGenerator
from keras.utils import plot_model

NB_CLASS=20
IM_WIDTH=224 #图片宽度
IM_HEIGHT=224 #图片高度
batch_size=32 #批的大小

train_root = "/home/hadoop/execu/net/VOCdevkit/VOC2012/train_root/"
validation_root = "/home/hadoop/execu/net/VOCdevkit/VOC2012/valid_root/"
test_root = "/home/hadoop/execu/net/VOCdevkit/VOC2012/test_root/"

EPOCH=60

# train data
train_datagen = ImageDataGenerator(
    width_shift_range=0.1,
    height_shift_range=0.1,
    shear_range=0.1,
    zoom_range=0.1,
    horizontal_flip=True,
    rescale=1./255
)

train_generator = train_datagen.flow_from_directory(
    train_root,
    target_size=(IM_WIDTH, IM_HEIGHT),
    batch_size=batch_size,
    shuffle=True
)

# vaild data
valid_datagen = ImageDataGenerator(
    width_shift_range=0.1,
    height_shift_range=0.1,
    shear_range=0.1,
    zoom_range=0.1,
    horizontal_flip=True,
    rescale=1./255
)
valid_generator = train_datagen.flow_from_directory(
    validation_root,
    target_size=(IM_WIDTH, IM_HEIGHT),
    batch_size=batch_size,
)

# test data
test_datagen = ImageDataGenerator(
    rescale=1./255
)
test_generator = train_datagen.flow_from_directory(
    test_root,
    target_size=(IM_WIDTH, IM_HEIGHT),
    batch_size=batch_size,
)
#卷积+batch normation
def Conv2d_BN(x,nb_filter,kernel_size,strides=(1,1),padding="same",name=None):
    if name is not None:
        bn_name = name + '_bn'
        conv_name = name + '_conv'
    else:
        bn_name = None
        conv_name = None
    x = Conv2D(nb_filter, kernel_size, padding=padding, strides=strides, activation='relu', name=conv_name)(x)
    x = BatchNormalization(axis=3)(x)
    return x


# 模型的trick是将进行了一种跨连接操作，将特征跨过一定的操作后在后面进行求和。这个意义一个是减轻梯度消失，
# 还有个目的其实让后续的卷积结果变得越来越强
# REW:building block 算一个层
def identity_block(inpt,nb_filter,kernel_size,strides=(1,1),with_conv_shortcut=False):
    x = Conv2d_BN(inpt,nb_filter=nb_filter,kernel_size=kernel_size,strides=strides,padding="same")
    x = Conv2d_BN(x, nb_filter=nb_filter, kernel_size=kernel_size, padding='same')
    if with_conv_shortcut:
        shortcut=Conv2d_BN(inpt,nb_filter=nb_filter, strides=strides, kernel_size=kernel_size)
        x = add([x,shortcut])
        return x
    else:
        x = add([x,inpt])
        return x


# 两种结构之一
def bottleneck_Block(inpt,nb_filters,strides=(1,1),with_conv_shortcut=False):
    k1,k2,k3=nb_filters
    x = Conv2d_BN(inpt,nb_filter=k1,kernel_size=1,strides=strides)
    x = Conv2d_BN(x,nb_filter=k2,kernel_size=3,strides=strides,padding='same')
    x = Conv2d_BN(x,nb_filter=k3,kernel_size=1,strides=strides,padding='same')
    if with_conv_shortcut:
        shortcut = Conv2d_BN(inpt, nb_filter=k3, strides=strides, kernel_size=1)
        x = add([x, shortcut])
        return x
    else:
        x = add([x, inpt])
        return x


def resnet_34(width,height,channel,classes):
    inpt = Input(shape=(width, height, channel))
    x = ZeroPadding2D((3, 3))(inpt)

    # conv1
    x = Conv2d_BN(x, nb_filter=64, kernel_size=(7, 7), strides=(2, 2), padding='valid')  #valid 不填充
    x = MaxPooling2D(pool_size=(3, 3), strides=(2, 2), padding='same')(x)

    # conv2_x
    x = identity_block(x,nb_filter=64,kernel_size=(3,3))  #整个为一层
    x = identity_block(x,nb_filter=64,kernel_size=(3,3))
    x = identity_block(x,nb_filter=64,kernel_size=(3,3))

    # conv3_x
    # FAQ:步长为什么为2？
    x = identity_block(x,nb_filter=128,kernel_size=(3,3),strides=(2,2),with_conv_shortcut=True)
    x = identity_block(x,nb_filter=128,kernel_size=(3,3))
    x = identity_block(x,nb_filter=128,kernel_size=(3,3))
    x = identity_block(x,nb_filter=128,kernel_size=(3,3))

    # conv4_x  FIXME:不同维度间的映射
    x = identity_block(x,nb_filter=256,kernel_size=(3,3),strides=(2,2),with_conv_shortcut=True)
    x = identity_block(x,nb_filter=256,kernel_size=(3,3))
    x = identity_block(x,nb_filter=256,kernel_size=(3,3))
    x = identity_block(x,nb_filter=256,kernel_size=(3,3))
    x = identity_block(x,nb_filter=256,kernel_size=(3,3))
    x = identity_block(x,nb_filter=256,kernel_size=(3,3))

    # conv5_x
    x = identity_block(x,nb_filter=512,kernel_size=(3,3),strides=(2,2),with_conv_shortcut=True)
    x = identity_block(x,nb_filter=512,kernel_size=(3,3))
    x = identity_block(x,nb_filter=512,kernel_size=(3,3))
    x = AveragePooling2D(pool_size=(7,7))(x)
    x = Flatten()(x)
    x = Dense(classes,activation="softmax")(x)

    model = Model(inputs=inpt,outputs=x)

    return model

def resnet_50(width,height,channel,classes):
    print("build 模型")
    inpt = Input(shape=(height,width,channel))
    x = ZeroPadding2D((3, 3))(inpt)

    #conv1
    x = Conv2d_BN(x,nb_filter=64,kernel_size=(7,7),strides=(2,2),padding='valid')
    x = MaxPooling2D(pool_size=(3,3),strides=(2,2),padding="same")(x)

    # conv2
    x = bottleneck_Block(x,nb_filters=[64,64,256],strides=(1,1),with_conv_shortcut=True)
    x = bottleneck_Block(x, nb_filters=[64, 64, 256])
    x = bottleneck_Block(x, nb_filters=[64, 64, 256])

    # conv3
    x = bottleneck_Block(x,nb_filters=[128,128,512],strides=(2,2),with_conv_shortcut=True)
    x = bottleneck_Block(x,nb_filters=[128,128,512])
    x = bottleneck_Block(x,nb_filters=[128,128,512])
    x = bottleneck_Block(x,nb_filters=[128,128,512])

    # conv4
    x = bottleneck_Block(x, nb_filters=[256, 256, 1024], strides=(2, 2), with_conv_shortcut=True)
    x = bottleneck_Block(x, nb_filters=[256, 256, 1024])
    x = bottleneck_Block(x, nb_filters=[256, 256, 1024])
    x = bottleneck_Block(x, nb_filters=[256, 256, 1024])
    x = bottleneck_Block(x, nb_filters=[256, 256, 1024])
    x = bottleneck_Block(x, nb_filters=[256, 256, 1024])

    # conv5
    x = bottleneck_Block(x, nb_filters=[512, 512, 2048], strides=(2, 2), with_conv_shortcut=True)
    x = bottleneck_Block(x, nb_filters=[512, 512, 2048])
    x = bottleneck_Block(x, nb_filters=[512, 512, 2048])

    x = AveragePooling2D(pool_size=(7, 7))(x)
    x = Flatten()(x)
    x = Dense(classes, activation='softmax')(x)

    model = Model(inputs=inpt, outputs=x)
    print("build finish!")
    return model


def acc_top2(y_true, y_pred):
    # 计算top - k正确率, 当预测值的前k个值中存在目标类别即认为预测正确
    return top_k_categorical_accuracy(y_true, y_pred, k=2)


def check_print():
    # Create a Keras Model
    model = resnet_50(IM_WIDTH,IM_HEIGHT,3,NB_CLASS)
    model.summary()
    # Save a PNG of the Model Build
    plot_model(model, to_file='resnet.png')
    model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['acc',top_k_categorical_accuracy])
    print('Model Compiled')
    return model


if __name__ == '__main__':
    if os.path.exists('resnet_50.h5'):
        model=load_model('resnet_50.h5')
    else:
        model=check_print()
    model.fit_generator(train_generator,validation_data=valid_generator,epochs=EPOCH,
                        steps_per_epoch=train_generator.n/batch_size,validation_steps=valid_generator.n/batch_size)
    model.save('resnet_50.h5')
    loss, acc, top_acc = model.evaluate_generator(test_generator, steps=test_generator.n / batch_size)
    print('Test result:loss:%f,acc:%f,top_acc:%f' % (loss, acc, top_acc))
