import os
import tensorflow as tf
import numpy as np
from tensorflow import keras

# In[1]:
tf.random.set_seed(22)
np.random.seed(22)
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
assert tf.__version__.startswith('2.')

def load_imag(path):
    label = tf.constant(1,tf.int32)if tf.strings.regex_full_match(path,'.*automobile.*')else tf.constant(0,tf.int32)

    img = tf.io.read_file(path)
    img = tf.image.decode_jpeg(img)
    img = tf.image.resize(img,size=(32,32))/255.0

    return img,label

db_train = tf.data.Dataset.list_files('../data/cifar2/train/*/*.jpg')\
            .map(load_imag,tf.data.experimental.AUTOTUNE).shuffle(buffer_size=1000).batch(100)\
            .prefetch(tf.data.experimental.AUTOTUNE)

db_test = tf.data.Dataset.list_files('../data/cifar2/test/*/*.jpg')\
            .map(load_imag,tf.data.experimental.AUTOTUNE).batch(100)\
            .prefetch(tf.data.experimental.AUTOTUNE)

# 卷积初始化 卷积3*3 步长1 0补边    批量归一化    relu
class ConvBNRelu(keras.Model):

    def __init__(self, ch, kernelsz=3, strides=1, padding='same'):
        super(ConvBNRelu, self).__init__()

        self.model = keras.models.Sequential([
            keras.layers.Conv2D(ch, kernelsz, strides=strides, padding=padding),
            keras.layers.BatchNormalization(),
            keras.layers.ReLU()
        ])

    def call(self, x, training=None):
        x = self.model(x, training=training)

        return x

# inception模块
class InceptionBlk(keras.Model):

    def __init__(self, ch, strides=1):
        super(InceptionBlk, self).__init__()

        self.ch = ch
        self.strides = strides

        self.conv1 = ConvBNRelu(ch, strides=strides) # 3*3
        self.conv2 = ConvBNRelu(ch, kernelsz=5, strides=strides)# 5*5
        self.conv3_1 = ConvBNRelu(ch, kernelsz=3, strides=strides)# 3*3
        self.conv3_2 = ConvBNRelu(ch, kernelsz=1, strides=1) # 1 * 1

        self.pool = keras.layers.MaxPooling2D(3, strides=1, padding='same') # 3 * 3
        self.pool_conv = ConvBNRelu(ch, strides=strides) # 3 * 3

    def call(self, x, training=None):
        x1 = self.conv1(x, training=training)

        x2 = self.conv2(x, training=training)

        x3_1 = self.conv3_1(x, training=training)
        x3_2 = self.conv3_2(x3_1, training=training)

        x4 = self.pool(x)
        x4 = self.pool_conv(x4, training=training)

        # concat along axis=channel    num,  w, h , c
        x = tf.concat([x1, x2, x3_2, x4], axis=3)

        return x

class Inception(keras.Model):
    # num_layers:InceptionBlk数量
    def __init__(self, num_layers, num_classes, init_ch=16, **kwargs):
        super(Inception, self).__init__(**kwargs)

        self.out_channels = init_ch
        self.num_layers = num_layers

        self.conv1 = ConvBNRelu(init_ch)
        # 动态模块
        self.blocks = keras.models.Sequential() # 动态模块

        for block_id in range(num_layers): # 2

            for layer_id in range(2):

                if layer_id == 0:
                    # 步长越大尺寸越小     降采样
                    block = InceptionBlk(self.out_channels, strides=2)

                else:
                    block = InceptionBlk(self.out_channels, strides=1)

                self.blocks.add(block)

            # enlarger out_channels per block
            self.out_channels *= 2
        # 全局平均池化   每个维度求均值
        self.avg_pool = keras.layers.GlobalAveragePooling2D()
        self.fc = keras.layers.Dense(1, activation='sigmoid')

    def call(self, x, training=None):

        out = self.conv1(x, training=training)

        out = self.blocks(out, training=training)
        # 平均池化后，做多分类处理
        out = self.avg_pool(out)
        out = self.fc(out)
        return out

# build model and optimizer
batch_size = 32
epochs = 1
model = Inception(2, 1)
# derive input shape for every layers.
model.build(input_shape=(None, 32, 32, 3))
model.summary()

optimizer = keras.optimizers.Adam(learning_rate=0.001)
criteon = keras.losses.CategoricalCrossentropy(from_logits=True)

model.compile(optimizer=optimizer, loss='binary_crossentropy', metrics=['accuracy'])
model.fit(db_train, epochs=10)