""" Full assembly of the parts to form the complete network """
import sys,os
curpath=os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.join(curpath))
from unet_parts import *
from unet_parts import _upsample_like


class UNet(nn.Module):
    def __init__(self, n_channels, n_classes, bilinear=True):
        super(UNet, self).__init__()
        self.n_channels = n_channels
        self.n_classes = n_classes
        self.bilinear = bilinear

        self.inc = DoubleConv(n_channels, 64)
        self.down1 = Down(64, 128)
        self.down2 = Down(128, 256)
        self.down3 = Down(256, 512)
        factor = 2 if bilinear else 1
        self.down4 = Down(512, 1024 // factor)
        self.up1 = Up(1024, 512 // factor, bilinear)
        self.up2 = Up(512, 256 // factor, bilinear)
        self.up3 = Up(256, 128 // factor, bilinear)
        self.up4 = Up(128, 64, bilinear)
        self.outc = OutConv(64, n_classes)

    def forward(self, x):
        x1 = self.inc(x)
        x2 = self.down1(x1)
        x3 = self.down2(x2)
        x4 = self.down3(x3)
        x5 = self.down4(x4)
        x = self.up1(x5, x4)
        x = self.up2(x, x3)
        x = self.up3(x, x2)
        x = self.up4(x, x1)
        logits = self.outc(x)
        return logits


'U2NET'
class U2NET(nn.Module):

    def __init__(self, n_channels=3, n_classes=3, bilinear=True):
        super(U2NET, self).__init__()
        self.n_channels = n_channels
        self.n_classes = n_classes
        self.bilinear = bilinear
        self.stage1 = RSU7(n_channels, 32, 64)
        self.pool12 = nn.MaxPool2d(2, stride=2, ceil_mode=True)

        self.stage2 = RSU6(64, 32, 128)
        self.pool23 = nn.MaxPool2d(2, stride=2, ceil_mode=True)

        self.stage3 = RSU5(128, 64, 256)
        self.pool34 = nn.MaxPool2d(2, stride=2, ceil_mode=True)

        self.stage4 = RSU4(256, 128, 512)
        self.pool45 = nn.MaxPool2d(2, stride=2, ceil_mode=True)

        self.stage5 = RSU4F(512, 256, 512)
        self.pool56 = nn.MaxPool2d(2, stride=2, ceil_mode=True)

        self.stage6 = RSU4F(512, 256, 512)

        # decoder
        self.stage5d = RSU4F(1024, 256, 512)
        self.stage4d = RSU4(1024, 128, 256)
        self.stage3d = RSU5(512, 64, 128)
        self.stage2d = RSU6(256, 32, 64)
        self.stage1d = RSU7(128, 16, 64)

        # side 从上采样最快的开始，输入通道就是decoder的输出通道
        self.side1 = nn.Conv2d(64, n_classes, 3, padding=1)
        self.side2 = nn.Conv2d(64, n_classes, 3, padding=1)
        self.side3 = nn.Conv2d(128, n_classes, 3, padding=1)
        self.side4 = nn.Conv2d(256, n_classes, 3, padding=1)
        self.side5 = nn.Conv2d(512, n_classes, 3, padding=1)
        self.side6 = nn.Conv2d(512, n_classes, 3, padding=1)

        self.outconv = nn.Conv2d(6 * n_classes, n_classes, 1)

    def forward(self, x):
        hx = x
        # stage 1
        hx1 = self.stage1(hx)  # [2, 64, 224, 224]
        hx = self.pool12(hx1)  # [2, 64, 112, 112]

        # stage 2
        hx2 = self.stage2(hx)  # [2, 128, 112, 112]
        hx = self.pool23(hx2)  # [2, 128, 56, 56]
        # stage 3
        hx3 = self.stage3(hx)  # [2, 256, 56, 56]
        hx = self.pool34(hx3)  # [2, 256, 28, 28]

        # stage 4
        hx4 = self.stage4(hx)  # [2, 512, 28, 28]
        hx = self.pool45(hx4)  # [2, 512, 14, 14]

        # stage 5
        hx5 = self.stage5(hx)  # [2, 512, 14, 14]
        hx = self.pool56(hx5)  # [2, 512, 7, 7]

        # stage 6
        hx6 = self.stage6(hx)  # [2, 512, 7, 7]
        hx6up = _upsample_like(hx6, hx5)  # [2, 512, 14, 14]

        # -------------------- decoder --------------------
        hx5d = self.stage5d(torch.cat([hx6up, hx5], dim=1))  # [2, 512, 14, 14]
        hx5dup = _upsample_like(hx5d, hx4)  # [2, 512, 28, 28]

        hx4d = self.stage4d(torch.cat((hx5dup, hx4), 1))  # [2, 256, 28, 28]
        hx4dup = _upsample_like(hx4d, hx3)  # [2, 256, 56, 56]

        hx3d = self.stage3d(torch.cat([hx4dup, hx3], 1))  # [2, 128, 56, 56]
        hx3dup = _upsample_like(hx3d, hx2)  # [2, 128, 112, 112]

        hx2d = self.stage2d(torch.cat([hx3dup, hx2], 1))  # [2, 128, 56, 56]
        hx2dup = _upsample_like(hx2d, hx1)  # [2, 64, 224, 224]

        hx1d = self.stage1d(torch.cat([hx2dup, hx1], dim=1))  # [2, 64, 224, 224]

        # side output
        d1 = self.side1(hx1d)

        d2 = self.side2(hx2d)  # [2, 1, 112, 112]
        d2 = _upsample_like(d2, d1, scale=2)  # [2, 1, 224, 224]

        d3 = self.side3(hx3d)
        d3 = _upsample_like(d3, d1, scale=4)

        d4 = self.side4(hx4d)
        d4 = _upsample_like(d4, d1, scale=8)

        d5 = self.side5(hx5d)
        d5 = _upsample_like(d5, d1, scale=16)

        d6 = self.side6(hx6)
        d6 = _upsample_like(d6, d1, scale=32)

        d0 = self.outconv(torch.cat([d1, d2, d3, d4, d5, d6], dim=1))

        # d0 = torch.sigmoid(d0)
        # d1 = torch.sigmoid(d1)
        # d2 = torch.sigmoid(d2)
        # d3 = torch.sigmoid(d3)
        # d4 = torch.sigmoid(d4)
        # d5 = torch.sigmoid(d5)
        # d6 = torch.sigmoid(d6)

        return d0


if __name__ == '__main__':
    x = torch.randn(2, 3, 512, 512)
    net = U2NET(n_channels=3,n_classes=3)
    for tag , value in net.named_parameters():
        print(value.grad)
    # print(net)
    # print(net.stage1.rebnconvin.conv_s1.weight.grad)
    y = net(x)
    print(y.shape)


