import torch
import torch.nn as nn
import torch.nn.functional as F
from base import BaseModel
torch.set_default_dtype(torch.float32)

class ConvBlock(BaseModel):
    def __init__(self, in_channels, out_channels, bn_check=False, drop_rate=0.5, n_block=2, kernel_size=3, stride=1, padding=1):
        '''
            ConvBlock comprises (Conv -> Dropout -> BatchNorm (if bn_check) -> Relu )^n_block
        '''
        super().__init__()
        self.module = torch.nn.Sequential()

        # check whether using BatchNorm or not
        n = 4 if bn_check else 3
        for i in range(0, n_block*n, n):
            self.module.add_module(str(i), nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding))
            self.module.add_module(str(i+1), nn.Dropout(drop_rate))
            if bn_check:
                self.module.add_module(str(i+2), nn.BatchNorm2d(out_channels))
                self.module.add_module(str(i+3), nn.ReLU(inplace=True))
            else:
                self.module.add_module(str(i+2), nn.ReLU(inplace=True))
            in_channels = out_channels

    def forward(self, x):
        return self.module(x)

class ConvBlock3D(BaseModel):
    def __init__(self, in_channels, out_channels, bn_check=False, drop_rate=0.5, n_block=2, kernel_size=3, stride=1, padding=1):
        '''
            ConvBlock comprises (Conv -> Dropout -> BatchNorm (if bn_check) -> Relu )^n_block
        '''
        super().__init__()
        self.module = torch.nn.Sequential()

        # check whether using BatchNorm or not
        n = 4 if bn_check else 3
        for i in range(0, n_block*n, n):
            self.module.add_module(str(i), nn.Conv3d(in_channels, out_channels, kernel_size, stride, padding))
            self.module.add_module(str(i+1), nn.Dropout3d(drop_rate))
            if bn_check:
                self.module.add_module(str(i+2), nn.BatchNorm3d(out_channels))
                self.module.add_module(str(i+3), nn.ReLU(inplace=True))
            else:
                self.module.add_module(str(i+2), nn.ReLU(inplace=True))
            in_channels = out_channels

    def forward(self, x):
        return self.module(x)
