import pandas as pd
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
import seaborn as sns
import os

from tensorflow.python.keras.preprocessing.image_dataset import image_dataset_from_directory

os.environ['TF_CPP_MIN_LOG_LEVEL'] = '1'

from tensorflow.core.protobuf.config_pb2 import ConfigProto
from tensorflow.python.client.session import InteractiveSession

config = ConfigProto()
config.gpu_options.allow_growth = True
session = InteractiveSession(config=config)

from tensorflow.python.keras.layers import RandomRotation, RandomFlip

os.environ['TF_CPP_MIN_LOG_LEVEL'] = '1'   # 去掉加载GPU的警告


# 导入数据集并查看
train_table = pd.read_csv("input/paddy-disease-classification/train.csv")
print(train_table)

# 进行数据分析，查看每个特征的图像数量
# 查看label标签下的特征有哪些
labels = list(np.unique(train_table['label']))
print(labels)
# ['bacterial_leaf_blight', 'bacterial_leaf_streak', 'bacterial_panicle_blight',
# 'blast', 'brown_spot', 'dead_heart', 'downy_mildew', 'hispa', 'normal',
# 'tungro']

# 按标签分组的图片数量、年龄和品种
print("Number of images, age and variety grouped by label:\n", train_table.groupby('label').nunique())
print("\n\nNumber of images, label and variety grouped by age:\n", train_table.groupby('age').nunique())
print("\n\nNumber of images, label and age grouped by variety:\n", train_table.groupby('variety').nunique())

plt.figure(figsize=(20,10))
sns.histplot(data=train_table, x='label')
plt.xticks(rotation=45)
plt.show()

#Let's see the distribution of paddy disease with age
print("Total Images for each age category:\n", train_table['age'].value_counts())

sns.catplot(data=train_table, x='label', y='age')
plt.xticks(rotation=45)


sns.catplot(data=train_table, y='label', x='age', kind='box')
plt.xticks(rotation=45)


plt.show()

#Let's see the distribution of variety of paddy disease with variety

print("Total Images for each variety category:\n", train_table['variety'].value_counts())

sns.histplot(data=train_table, y='label', hue='variety')
plt.show()

train_data= tf.keras.preprocessing.image_dataset_from_directory("kaggle/input/paddy-disease-classification/train_images/",
                                                        labels='inferred',
                                                        label_mode='categorical',
                                                        class_names=labels,
                                                        color_mode='rgb',
                                                        image_size=(400,400),
                                                        shuffle=True,
                                                        validation_split=0.1,
                                                        subset='training',
                                                        seed=42)
#
val_data= tf.keras.preprocessing.image_dataset_from_directory("kaggle/input/paddy-disease-classification/train_images/",
                                                        labels='inferred',
                                                        label_mode='categorical',
                                                        class_names=labels,
                                                        color_mode='rgb',
                                                        image_size=(400,400),
                                                        shuffle=True,
                                                        validation_split=0.1,
                                                        subset='validation',
                                                        seed=42)


data_augmentation=tf.keras.Sequential([

    RandomRotation(0.45),
    RandomFlip('horizontal')
])

base_model=tf.keras.applications.EfficientNetB2(include_top=False, pooling='avg', weights='imagenet', input_shape=(400,400,3))

image_batch, label_batch= next(iter(train_data))
feature_batch=base_model(image_batch)
feature_batch.shape

inputs=tf.keras.Input(shape=(400,400,3))
x=data_augmentation(inputs)
x=base_model(x, training=False)
x=tf.keras.layers.Dense(512,activation='relu')(x)
x=tf.keras.layers.Dropout(0.4)(x)
x=tf.keras.layers.Dense(128,activation='relu')(x)
x=tf.keras.layers.Dropout(0.4)(x)
output=tf.keras.layers.Dense(10, activation='softmax')(x)
model=tf.keras.Model(inputs,output)

model.summary()

callback1=tf.keras.callbacks.EarlyStopping(monitor='accuracy', patience=2, restore_best_weights=True)

for layer in base_model.layers:
    layer.trainable=False

model.summary()

model.compile(loss=tf.keras.losses.CategoricalCrossentropy(), optimizer=tf.keras.optimizers.Adam(learning_rate=0.001), metrics=['accuracy'])

# plot_model(model)

history=model.fit(train_data, validation_data=val_data, epochs=1, callbacks=[callback1])#测试

pd.DataFrame(history.history).plot()
plt.xlabel('epochs')
plt.ylabel('loss & accuracy')
plt.show()

for layer1 in base_model.layers[280:]:
    layer1.trainable=True

model.summary()

model.compile(loss=tf.keras.losses.CategoricalCrossentropy(), optimizer=tf.keras.optimizers.Adam(learning_rate=0.00001), metrics=['accuracy'])

callback2=tf.keras.callbacks.EarlyStopping(monitor='accuracy', patience=3, restore_best_weights=True)

history_tuned=model.fit(train_data, validation_data=val_data, epochs=1, initial_epoch=history.epoch[-1], callbacks=[callback2] )#改步长

pd.DataFrame(history_tuned.history).plot()
plt.xlabel('epochs')
plt.ylabel('loss & accuracy')


test_data=image_dataset_from_directory("kaggle/input/paddy-disease-classification/test_images/",
                                                        # labels=None,
                                                        label_mode=None,
                                                        color_mode='rgb',
                                                        image_size=(400,400),
                                                        shuffle=False)



plt.show()

