import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout, BatchNormalization
from tensorflow.keras.optimizers import SGD
from tensorflow.keras.preprocessing.image import ImageDataGenerator

IMSIZE = 224

# 数据增强器设置
train_datagen = ImageDataGenerator(
    rescale=1. / 255,
    rotation_range=20,
    width_shift_range=0.2,
    height_shift_range=0.2,
    shear_range=0.2,
    zoom_range=0.2,
    horizontal_flip=True,
    fill_mode='nearest'
)
train_generator = train_datagen.flow_from_directory(
    '../data/flower_learn_data/trains',
    target_size=(IMSIZE, IMSIZE),
    batch_size=16,
    class_mode='categorical'
)

validation_datagen = ImageDataGenerator(rescale=1. / 255)
validation_generator = validation_datagen.flow_from_directory(
    '../data/flower_learn_data/tests',
    target_size=(IMSIZE, IMSIZE),
    batch_size=16,
    class_mode='categorical'
)

# 手动搭建AlexNet模型
with tf.device('/GPU:0'):
    model = Sequential()

    # 第一个卷积层
    model.add(Conv2D(96, (11, 11), strides=(4, 4), activation='relu', padding='valid', input_shape=(IMSIZE, IMSIZE, 3)))
    model.add(BatchNormalization())
    model.add(MaxPooling2D(pool_size=(3, 3), strides=(2, 2)))

    # 第二个卷积层
    model.add(Conv2D(256, (5, 5), strides=(1, 1), activation='relu', padding='same'))
    model.add(BatchNormalization())
    model.add(MaxPooling2D(pool_size=(3, 3), strides=(2, 2)))

    # 第三个卷积层
    model.add(Conv2D(384, (3, 3), strides=(1, 1), activation='relu', padding='same'))

    # 第四个卷积层
    model.add(Conv2D(384, (3, 3), strides=(1, 1), activation='relu', padding='same'))

    # 第五个卷积层
    model.add(Conv2D(256, (3, 3), strides=(1, 1), activation='relu', padding='same'))
    model.add(MaxPooling2D(pool_size=(3, 3), strides=(2, 2)))

    # 全连接层
    model.add(Flatten())
    model.add(Dense(4096, activation='relu'))
    model.add(Dropout(0.5))

    # 全连接层
    model.add(Dense(4096, activation='relu'))
    model.add(Dropout(0.5))

    # 输出层
    model.add(Dense(5, activation='softmax'))

    # 编译模型
    model.compile(optimizer=SGD(learning_rate=0.001), loss='categorical_crossentropy', metrics=['accuracy'])

    # 添加自动停止功能
    early_stopping = tf.keras.callbacks.EarlyStopping(
        monitor='val_loss',
        patience=10,
        restore_best_weights=True
    )

    # 训练模型
    history = model.fit(
        train_generator,
        steps_per_epoch=train_generator.samples // train_generator.batch_size,
        epochs=100,
        validation_data=validation_generator,
        validation_steps=validation_generator.samples // validation_generator.batch_size,
        callbacks=[early_stopping]
    )

    # 评估模型
    accuracy = model.evaluate(validation_generator)[1]
    print("准确率:", accuracy)
