import torch.nn as nn
import torch
import torch.nn as nn

class LSTMDenseNet(nn.Module):
    def __init__(self, input_dim, output_dim):
        super(LSTMDenseNet, self).__init__()

        # define hyperparameters
        layer_input_size = 16
        layer_list = [256, 'D', 128, 'B', 128, 'D', 64, 'B', 32, 'D', 16, 'B']

        # build model

        self.input_layer = nn.Sequential(nn.LSTM(input_dim, layer_input_size),
                                         nn.Linear(layer_input_size, layer_input_size),
                                         nn.Tanh())

        self.layers = nn.ModuleList()
        for i, layer in enumerate(layer_list):
            if layer == 'D':
                self.layers.append(nn.Dropout(0.01))
            elif layer == 'B':
                self.layers.append(nn.BatchNorm1d(layer_input_size))
            else:
                self.layers.append(nn.Linear(layer_input_size, layer))
                self.layers.append(nn.Tanh())
                layer_input_size = layer

        self.output_layer = nn.Linear(layer_input_size, output_dim)
        self.output_activation = nn.Sigmoid()

    def forward(self, x):
        x = self.input_layer(x)
        for layer in self.layers:
            x = layer(x)

        x = self.output_layer(x)
        x = self.output_activation(x)

        return x
if __name__ == "__main__":
# define input and output dimensions
    input_dim = 10
    output_dim = 1

    # instantiate the model
    model = LSTMDenseNet(input_dim, output_dim)

    # print the model summary
    #summary(model, input_size=(input_dim,))

    # generate random input tensor
    x = torch.randn(32, input_dim, 1)

    # forward pass
    output = model(x)

    # print the shape of the output tensor
    print(output.shape)