

import torch
import torch.nn as nn 


class Conv(nn.Module):
    def __init__(self, in_channels, out_channels) -> None:
        super().__init__()
        self.conv = nn.Sequential(
            nn.Conv2d(in_channels, out_channels, (3, 3), (1, 1), (1, 1), 1, 1, bias=True),
            nn.BatchNorm2d(out_channels),
            nn.ReLU(),
        )
    def forward(self, x):
        return self.conv(x)

class TwoConv(nn.Module):
    def __init__(self, in_channels, out_channels) -> None:
        super().__init__()
        self.conv1 = Conv(in_channels, out_channels)
        self.conv2 = Conv(out_channels, out_channels)

    def forward(self, x):
        return self.conv2(self.conv1(x))

 
class Upsample(nn.Module):
    def __init__(self, in_channels, out_channels) -> None:
        super().__init__()
        self.ups = nn.Upsample(scale_factor=2, mode="bilinear")
        self.conv_up = TwoConv(in_channels, out_channels)
    
    def forward(self, x, x_concat):
        x = self.ups(x)
        x = torch.cat([x, x_concat], dim=1)

        x = self.conv_up(x)
        return x 
    

class BaseDecoder(nn.Module):
    def __init__(self, out_channels=1) -> None:
        super().__init__()
        self.mid1 = TwoConv(256, 128)
        # self.mid2 = TwoConv(2048, 1024)

        self.up_1 = TwoConv(128+128, 64)
        self.up_2 = Upsample(64+64, 32)
        self.up_3 = Upsample(32+32, 32)
        self.up_4 = Upsample(32+32, 32)
        # self.up_5 = Upsample(64+64, 64)
        self.up_5 = nn.Upsample(scale_factor=2, mode="bilinear")
        self.out_layer = nn.Conv2d(32, out_channels, 1, 1, 0, bias=False)
        

    def forward(self, skips):
        assert len(skips) == 5
        ## x4: 2048, 26, 26
        ## x3: 1024 26 26
        ## x2 512 52 52
        ## x1 256 104 104
        ## x0 64 208 208
        x4, x3, x2, x1, x0 = skips
        x = self.mid1(x4)
        x = torch.cat([x, x3], dim=1)

        x = self.up_1(x)
        x = self.up_2(x, x2)
        x = self.up_3(x, x1)
        x = self.up_4(x, x0)
        x = self.up_5(x)

        x = self.out_layer(x)

        return x 
