import os
import torch
from torch import nn

class NeuralNetwork(nn.Module):
    def __init__(self):
          super().__init__()
          # self.flatten = nn.Flatten()
          self.linear_relu_stack = nn.Sequential(
            nn.Linear(16*16,128),
            nn.BatchNorm1d(128),
            nn.LeakyReLU(negative_slope=0.01) ,
            nn.Dropout(0.5),
            nn.Linear(128,32),
            nn.BatchNorm1d(32),
            nn.LeakyReLU(negative_slope=0.01) ,
            nn.Dropout(0.5),
            nn.Linear(32,10),
          )
          self.conv_relu_stack = nn.Sequential(
            nn.Conv2d(1,6,5),
            nn.LeakyReLU(negative_slope=0.01) ,
            nn.MaxPool2d(2,2),
            nn.Conv2d(6,16,5),
            nn.LeakyReLU(negative_slope=0.01) ,
            nn.MaxPool2d(2,2),
          )

    def forward(self,x):
        # print(f"orignal x:{x.shape}")
        # x = self.flatten(x)
        # print(f"forward x:{x.shape}")
        logits = self.conv_relu_stack(x)
        logits = logits.reshape(-1,16*4*4)
        logits = self.linear_relu_stack(logits) 
        return logits