import keras
from keras.layers import Input, Conv2D, DepthwiseConv2D, Activation, Reshape, concatenate, BatchNormalization
from keras.activations import relu
from keras.models import Model
from keras.utils import plot_model

import tensorflow as tf

import numpy as np

def Relu6(x, max_value=6):
    return relu(x, max_value=max_value)

backbone_layer_name = []
save_layer = True
def Conv(x, filter, filter_shape = (3,3), stride=2, has_relu = True):
    _x = Conv2D(filter, filter_shape, strides=stride, padding="same")(x)
    if save_layer: backbone_layer_name.append(_x.name)
    _x = BatchNormalization()(_x)
    if has_relu: _x = Activation("relu")(_x)
    return _x

def dsConv(x, filter_shape = (3, 3), stride=2):
    _x = DepthwiseConv2D(filter_shape, strides=stride, padding="same")(x)
    if save_layer: backbone_layer_name.append(_x.name)
    _x = BatchNormalization()(_x)
    _x = Activation("relu")(_x)
    return _x

def backbone(input, detect_branch=[]):
    x = input #keras.backend.resize_images(input, 1, 1, "channels_last")
    x = Conv(x, 8)
    x = dsConv(x, stride=1)

    #x = TFLite_Detection_PostProcess()([x,x1])

    x = Conv(x, 16, (1, 1), 1)
    x = dsConv(x)

    x = Conv(x, 16, (1, 1), 1)
    x = dsConv(x, stride=1)

    x = Conv(x, 16, (1,1), 1)
    x = dsConv(x)

    x = Conv(x, 32, (1,1), 1)
    x = dsConv(x, stride=1)

    x = Conv(x, 32, (1,1), 1)
    x = dsConv(x, stride=1)

    x = Conv(x, 32, (1,1), 1)
    x = dsConv(x, stride=1)

    x = Conv(x, 32, (1,1), 1)
    detect_branch += [x]
    x = dsConv(x)

    x = Conv(x, 64, (1,1), 1)
    x = dsConv(x, stride=1)

    x = Conv(x, 64, (1,1), 1)
    x = dsConv(x, stride=1)

    x = Conv(x, 64, (1, 1), 1)
    detect_branch += [x]
    x = dsConv(x)

    x = Conv(x, 128, (1,1), 1)
    x = dsConv(x, stride=1)

    x = Conv(x, 128, (1,1), 1)
    detect_branch += [x]

    x = Conv(x, 32, (1,1), 1)
    x = dsConv(x)

    x = Conv(x, 128, (1,1), 1)
    detect_branch += [x]

    return Model(input, x)

def class_leaf(input, conv_filter, has_ds=True):
    x = dsConv(input, stride=1) if(has_ds) else input  
    x = Conv(x, conv_filter, (1,1), 1, has_relu=False)
    x = Reshape((-1, 2))(x)
    return x

def regress_leaf(input, conv_filter, has_ds=True):
    x = dsConv(input, stride=1) if(has_ds) else input  
    x = Conv(x, conv_filter, (1,1), 1, has_relu=False)
    x = Reshape((-1, 4))(x)
    return x

box_nums = [3, 2, 2, 3]

def main_model(shape, num_classes, mode="Train"):
    global save_layer
    branch = []

    _in = Input(shape)
    back_bone_model = backbone(_in, branch)

    save_layer = False
    class_concat = []
    regress_concat = []
    for idx, b in enumerate(branch):
        has_ds = False if (idx == len(branch) - 1) else True
        class_concat.append(class_leaf(b, box_nums[idx] * num_classes, has_ds)) # classification 
        regress_concat.append(regress_leaf(b, box_nums[idx] * 4, has_ds))  # 4-coordinates
        
    class_output = concatenate(class_concat, axis = 1)
    class_output = Activation("softmax")(class_output)

    regress_output = concatenate(regress_concat, axis = 1)

    if mode == "Train":
        output = concatenate([regress_output, class_output], axis=-1)  

    if mode == "Val":
        output = TFLite_Detection_PostProcess(1)([regress_output, class_output])


    return Model(back_bone_model.input, outputs = output), backbone_layer_name

def class_model(input, class_num = 1):
    back_bone_model = backbone(input)
    from keras.layers import GlobalAveragePooling2D, Flatten, Dense
    x = GlobalAveragePooling2D()(back_bone_model.output)
    x = Dense(class_num)(x)
    x = Activation('softmax')(x)
    
    return Model(back_bone_model.input, output = x)

def scheduler(epoch):
    if epoch < 5:
        return 0.0001
    elif epoch < 20:
        return 0.001
    elif epoch < 40:
        return 0.0001

if __name__ == "__main__":
    model = main_model((256, 256, 3), 2, 'Train')[0]
    model.summary()
    plot_model(model, "model_od.png", True)
    model.save("test_od.h5")
    from keras.optimizers import Adam 
    from keras.utils import to_categorical
    from sklearn.model_selection import train_test_split

    epoch = 100

    model = class_model(Input((192, 256, 3)), 2)
    model.summary()
    model.save("test.h5")

    train_people = np.load("./class_dataset/train_people.npy")
    train_back = np.load("./class_dataset/train_back.npy")
    train_data = np.vstack([train_people, train_back])
    train_label = np.asarray([0] * len(train_back) + [1] * len(train_people))

    train_x, test_x ,train_y, test_y = train_test_split(train_data, train_label, test_size=0.2, random_state=0)


    train_x = (train_x[:10000] / 128.0 - 1).astype("float32")
    test_x = (test_x[:int(10000 * 0.2)] / 128.0 - 1).astype("float32")

    train_y = to_categorical(train_y[:10000], num_classes=2).astype("float32")
    test_y = to_categorical(test_y[:int(10000 * 0.2)], num_classes=2).astype("float32")   
    
    opt = Adam(lr=0.001)
    model.compile(opt, loss="categorical_crossentropy", metrics=["acc"])
    checkpoint = keras.callbacks.ModelCheckpoint(filepath="./models/model_{epoch:02d}_{val_acc:.02f}.h5", save_best_only=True) # {epoch:02d}_{val_acc:.02f}
    earlystop = keras.callbacks.EarlyStopping(patience=20)
    reduce_lr = keras.callbacks.LearningRateScheduler(scheduler)

    callback = [checkpoint, earlystop, reduce_lr]
    model.fit(train_x, train_y, batch_size=32, epochs=epoch, validation_data=[test_x, test_y], shuffle=True, callbacks=callback)

    print()