import tensorflow as tf
import sys
from tensorflow.keras import regularizers
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, Conv3D, MaxPooling3D, Flatten, Reshape, LSTM, Dense, Dropout, BatchNormalization, Lambda, GlobalAveragePooling1D
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.optimizers.schedules import ExponentialDecay

class NetBuilder():
    def __init__(self,args):
        self.args = args
        self.input_shape = (args.time_steps, args.n_rois, args.n_rois, args.n_channels)
        self.optimizer = Adam(learning_rate=0.01)
        
    def CRNN_LSTM(self) -> tf.Module:
        inputs = Input(shape=self.input_shape, batch_size=self.args.batch_size)
        # convolutional layers
        x = Conv3D(filters=self.args.n_conv_filters[0], kernel_size=self.args.kernel_sizes[0], strides=self.args.stride_sizes[0],
                activation='relu', padding='same')(inputs)
        x = BatchNormalization()(x)
        x = MaxPooling3D(pool_size=(2, 2, 2))(x)
        x = Conv3D(filters=self.args.n_conv_filters[1], kernel_size=self.args.kernel_sizes[1], strides=self.args.stride_sizes[1],
                activation='relu', padding='same')(x)
        x = BatchNormalization()(x)
        x = MaxPooling3D(pool_size=(2, 2, 2))(x)
        x = Conv3D(filters=self.args.n_conv_filters[2], kernel_size=self.args.kernel_sizes[2], strides=self.args.stride_sizes[2],
                activation='relu', padding='same')(x)
        x = BatchNormalization()(x)

        # reshape to meet the input shape of LSTM
        x = Lambda(lambda x: tf.reshape(x, (tf.shape(x)[0], tf.shape(x)[1], -1)))(x)
        
        # LSTM layer
        x = LSTM(self.args.lstm_uints, return_sequences=True)(x)
        
        # reshape sequences to meet the input shape of Dense
        x = Flatten()(x)
        
        # Dense layers
        x = Dense(32, activation='relu',kernel_regularizer=regularizers.L1L2(l1=0.01, l2=0.01))(x)
        x = Dropout(self.args.dropout_rate)(x)
        x = Dense(16, activation='relu')(x)
        x = Dropout(self.args.dropout_rate)(x)
        x = Dense(4, activation='relu')(x)

        # Output layer
        outputs = Dense(self.args.n_classes, activation='softmax')(x)
        model = Model(inputs=inputs, outputs=outputs)
        model.compile(optimizer=self.optimizer, loss='categorical_crossentropy', metrics=['accuracy'])
        return model
def buildCrnn(args) -> tf.Module:
    netBuilder = NetBuilder(args)
    crnn_model = netBuilder.CRNN_LSTM()
    crnn_model.summary()
    return crnn_model




    
        