import tensorflow as tf
import random
import matplotlib.pyplot as plot
import numpy as np
import pathlib

print('Tensorflow version: {}'.format(tf.__version__))

data_dir = 'D:/2_class'
data_root = pathlib.Path(data_dir)
for item in data_root.iterdir():
    print(item)

all_img_path = list(data_root.glob('*/*'))
image_count = len(all_img_path)
all_img_path = [str(p) for p in all_img_path]
random.shuffle(all_img_path)

label_names = sorted(p.name for p in data_root.glob('*/') if p.is_dir())
label_to_index = dict((name, index) for index, name in enumerate(label_names))
all_img_label = [label_to_index[pathlib.Path(p).parent.name] for p in all_img_path]

def preprocess(path):
    image = tf.io.read_file(path)
    image = tf.image.decode_jpeg(image, channels=3)
    image = tf.image.resize(image, [256,256])
    image = tf.cast(image, tf.float32)
    image = image/255.0
    return image

path_ds = tf.data.Dataset.from_tensor_slices(all_img_path)
image_ds = path_ds.map(preprocess)
AUTOTUNE = tf.data.experimental.AUTOTUNE
image_ds = path_ds.map(preprocess, num_parallel_calls=AUTOTUNE)
label_ds = tf.data.Dataset.from_tensor_slices(tf.cast(all_img_label, tf.int64))

dataset = tf.data.Dataset.zip((image_ds, label_ds))
test_count = int(image_count*0.2)
train_count = image_count - test_count
train_data = dataset.skip(test_count)
test_data = dataset.take(test_count)

size = 8
train_data = train_data.apply(
  tf.data.experimental.shuffle_and_repeat(buffer_size=train_count))
train_data = train_data.batch(size)
train_data = train_data.prefetch(buffer_size=AUTOTUNE)
test_data = test_data.batch(size)

# 建立模型
model = tf.keras.Sequential() # 顺序模型
model.add(tf.keras.layers.Conv2D(64, (3, 3), input_shape=(256, 256, 3), activation='relu'))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.Conv2D(64, (3, 3), activation='relu'))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.MaxPooling2D())

model.add(tf.keras.layers.Conv2D(128, (3, 3), activation='relu'))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.Conv2D(128, (3, 3), activation='relu'))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.MaxPooling2D())

model.add(tf.keras.layers.Conv2D(256, (3, 3), activation='relu'))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.Conv2D(256, (3, 3), activation='relu'))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.MaxPooling2D())

model.add(tf.keras.layers.GlobalAveragePooling2D())
model.add(tf.keras.layers.Dense(256, activation='relu'))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.Dense(6, activation='softmax'))

model.compile(optimizer='adam',
              loss='sparse_categorical_crossentropy',
              metrics=['acc']
)
steps_per_epoch = train_count//size
validation_steps = test_count//size

history = model.fit(train_data, epochs=10, steps_per_epoch=steps_per_epoch, validation_data=test_data, validation_steps=validation_steps)