import Util as util
from keras.models import Sequential
from keras.layers import Dense
from keras.layers.advanced_activations import LeakyReLU
from keras.layers import Dropout
from keras.layers.normalization import BatchNormalization
import os


# base model
class BaseModelCreator:

    # init
    def __init__(self, layerCount=0, nx=0, dropout=0):
        self.updateParamter(layerCount, nx, dropout)
        # saved file path
        self.modelFilePath = util.getModelFilePath(self.modelName())
        self.weightsFilePath = util.getModelWeightsFilePath(self.modelName())
        self.trainLossFilePath = util.getModelTrainLossFilePath(self.modelName())
        self.trainLossProgressFilePath = util.getModelTrainLossProgressFilePath(self.modelName())
        self.testLossFilePath = util.getModelTestLossFilePath(self.modelName())
        self.tensorBoardLogPath = util.getModelTestLossFilePath(self.modelName()) + '_tbLog'

        # create saved file dir
        self.modelDir = util.getModelDir(self.modelName())
        if not os.path.exists(self.modelDir):
            os.makedirs(self.modelDir)

    # parameter update (need overwrite)
    def updateParamter(self, layerCount, nx, dropout):
        pass

    # model name (need overwrite)
    def modelName(self):
        pass

    # model (need overwrite)
    def getModel(self, dataTrainShape):
        pass

# one layer model
class OneLayerModelCreator(BaseModelCreator):
    # model name
    def modelName(self):
        return 'one_layer'

    # model
    def getModel(self, dataTrainShape):

        assert (isinstance(dataTrainShape, tuple) or dataTrainShape.shape[1] <= 0)

        if os.path.exists(self.modelFilePath) and os.path.exists(self.weightsFilePath):
            model = util.loadModel(self.modelFilePath)
            model.load_weights(self.weightsFilePath)
            return model
        else:
            model = Sequential()
            model.add(Dense(1, input_shape=(dataTrainShape[1],), activation='sigmoid'))
            return model

# two layer model
class TwoLayerModelCreator(BaseModelCreator):
    # model name
    def modelName(self):
        return 'two_layer'

    # model
    def getModel(self, dataTrainShape):

        assert (isinstance(dataTrainShape, tuple) or dataTrainShape.shape[1] <= 0)

        if os.path.exists(self.modelFilePath) and os.path.exists(self.weightsFilePath):
            model = util.loadModel(self.modelFilePath)
            model.load_weights(self.weightsFilePath)
            return model
        else:
            model = Sequential()
            model.add(Dense(128, input_shape=(dataTrainShape[1],)))
            model.add(LeakyReLU(alpha=0.1))
            model.add(BatchNormalization())
            model.add(Dense(1, activation='sigmoid'))
            return model


# three layer model
class ThreeLayerModelCreator(BaseModelCreator):
    # model name
    def modelName(self):
        return 'three_layer'

    # model
    def getModel(self, dataTrainShape):

        assert (isinstance(dataTrainShape, tuple) or dataTrainShape.shape[1] <= 0)

        if os.path.exists(self.modelFilePath) and os.path.exists(self.weightsFilePath):
            model = util.loadModel(self.modelFilePath)
            model.load_weights(self.weightsFilePath)
            return model
        else:
            model = Sequential()
            model.add(Dense(128, input_shape=(dataTrainShape[1],), activation='relu'))
            model.add(Dense(128, activation='relu'))
            model.add(Dense(1, activation='sigmoid'))
            return model

# three layer model with 4960_512_1
class ThreeLayerModelCreator_4960_512_1(BaseModelCreator):
    # model name
    def modelName(self):
        return 'three_layer_4960_512_1'

    # model
    def getModel(self, dataTrainShape):

        assert (isinstance(dataTrainShape, tuple) or dataTrainShape.shape[1] <= 0)

        if os.path.exists(self.modelFilePath) and os.path.exists(self.weightsFilePath):
            model = util.loadModel(self.modelFilePath)
            model.load_weights(self.weightsFilePath)
            return model
        else:
            model = Sequential()
            model.add(Dense(4960, input_shape=(dataTrainShape[1],), activation='relu'))
            model.add(Dense(512, activation='relu'))
            model.add(Dense(1, activation='sigmoid'))
            return model

# three layer model with 16_8_1
class ThreeLayerModelCreator_16_8_1(BaseModelCreator):
    # model name
    def modelName(self):
        return 'three_layer_16_8_1'

    # model
    def getModel(self, dataTrainShape):

        assert (isinstance(dataTrainShape, tuple) or dataTrainShape.shape[1] <= 0)

        if os.path.exists(self.modelFilePath) and os.path.exists(self.weightsFilePath):
            model = util.loadModel(self.modelFilePath)
            model.load_weights(self.weightsFilePath)
            return model
        else:
            model = Sequential()
            model.add(Dense(16, input_shape=(dataTrainShape[1],)))
            model.add(LeakyReLU(alpha=0.1))
            model.add(Dense(8))
            model.add(LeakyReLU(alpha=0.1))
            model.add(Dense(1, activation='sigmoid'))
            return model

class ThreeLayerModelCreator_DropLayer_Norm_32_8_1(BaseModelCreator):
    # model name
    def modelName(self):
        return 'three_layer_drop_norm_32_8_1'

    # model
    def getModel(self, dataTrainShape):

        assert (isinstance(dataTrainShape, tuple) or dataTrainShape.shape[1] <= 0)

        if os.path.exists(self.modelFilePath) and os.path.exists(self.weightsFilePath):
            model = util.loadModel(self.modelFilePath)
            model.load_weights(self.weightsFilePath)
            return model
        else:
            model = Sequential()
            model.add(BatchNormalization(input_shape=(dataTrainShape[1],)))
            model.add(Dense(32))
            model.add(LeakyReLU(alpha=0.1))
            model.add(BatchNormalization())
            model.add(Dropout(0.1))
            model.add(Dense(8))
            model.add(BatchNormalization())
            model.add(Dropout(0.1))
            model.add(LeakyReLU(alpha=0.1))
            model.add(Dense(1, activation='sigmoid'))
            return model

class ThreeLayerModelCreator_DropLayer_Norm_64_32_8_1(BaseModelCreator):
    # model name
    def modelName(self):
        return 'three_layer_drop_norm_64_32_8_1'

    # model
    def getModel(self, dataTrainShape):

        assert (isinstance(dataTrainShape, tuple) or dataTrainShape.shape[1] <= 0)

        if os.path.exists(self.modelFilePath) and os.path.exists(self.weightsFilePath):
            model = util.loadModel(self.modelFilePath)
            model.load_weights(self.weightsFilePath)
            return model
        else:
            model = Sequential()
            model.add(Dense(64, input_shape=(dataTrainShape[1],), activation='relu'))
            model.add(Dropout(0.15))

            model.add(BatchNormalization())
            model.add(Dense(64, activation='relu'))
            model.add(Dropout(0.15))
            
            model.add(BatchNormalization())
            model.add(Dense(32, activation='relu'))
            model.add(Dropout(0.15))
            
            model.add(BatchNormalization())
            model.add(Dense(32, activation='relu'))
            model.add(Dropout(0.15))
            
            model.add(BatchNormalization())
            model.add(Dense(8, activation='relu'))
            model.add(Dropout(0.15))
            
            model.add(BatchNormalization())
            model.add(Dense(8, activation='relu'))
            model.add(Dropout(0.15))
            
            model.add(BatchNormalization())
            model.add(Dense(1, activation='sigmoid'))
            return model

class DynamicLayerModelCreator(BaseModelCreator):
    def updateParamter(self, layerCount, nx, dropout):
        self.layerCount = layerCount
        self.nx = nx
        self.dropout = dropout

    # model name
    def modelName(self):
        return str(self.layerCount) + '_layer_' +  str(self.nx) + '_nx_' + str(self.dropout) + '_dropout' 

    # model
    def getModel(self, dataTrainShape):

        assert (isinstance(dataTrainShape, tuple) or dataTrainShape.shape[1] <= 0)

        if os.path.exists(self.modelFilePath) and os.path.exists(self.weightsFilePath):
            model = util.loadModel(self.modelFilePath)
            model.load_weights(self.weightsFilePath)
            return model
        else:
            model = Sequential()
            for i in range(self.layerCount):
                if i == 0:
                    model.add(Dense(self.nx, input_shape=(dataTrainShape[1],), activation='relu'))
                    if self.dropout != 0:
                        model.add(Dropout(self.dropout))
                else:
                    model.add(BatchNormalization())
                    model.add(Dense(self.nx, activation='relu'))
                    if self.dropout != 0:
                        model.add(Dropout(self.dropout))

            model.add(BatchNormalization())
            model.add(Dense(1, activation='sigmoid'))

            return model