import torch
import torch.nn as nn


class DoubleConv(nn.Module):
    def __init__(self, in_ch, out_ch):
        super(DoubleConv, self).__init__()
        self.conv = nn.Sequential(
            nn.Conv2d(in_ch, out_ch, 3, padding=1),
            nn.BatchNorm2d(out_ch),
            nn.ReLU(inplace=True),
            nn.Conv2d(out_ch, out_ch, 3, padding=1),
            nn.BatchNorm2d(out_ch),
            nn.ReLU(inplace=True)
        )

    def forward(self, x):
        return self.conv(x)


class Decoder_1(nn.Module):
    def __init__(self):
        super(Decoder_1, self).__init__()
        self.deconv1 = nn.ConvTranspose2d(in_channels=240, out_channels=120, kernel_size=2, stride=2)
        self.deconv2 = nn.ConvTranspose2d(in_channels=240, out_channels=120, kernel_size=2, stride=2)
        self.deconv3 = nn.ConvTranspose2d(in_channels=240, out_channels=120, kernel_size=2, stride=2)
        self.deconv4 = nn.ConvTranspose2d(in_channels=120, out_channels=60, kernel_size=2, stride=2)
        self.deconv5 = nn.ConvTranspose2d(in_channels=64, out_channels=32, kernel_size=2, stride=2)

        ###             上采样后接两个卷积效果会好很多            ##############
        self.conv1 = DoubleConv(360, 240)
        self.conv2 = DoubleConv(360, 240)
        self.conv3 = DoubleConv(184, 120)
        self.conv4 = DoubleConv(124, 64)
        self.conv5 = DoubleConv(32, 1)
        self.conv6 = nn.Conv2d(1, 1, 1)  ### 1*1conv  ??????????????? 对吗？？？

    def forward(self, feats_nerve):  ## x is a list

        # for idx ,feats in enumerate(feats[:-1]):##如何逆序遍历？

        # out1,out2,out3,out4=net(img)
        # print(out1.shape)  #torch.Size([1, 64, 96, 96])
        # print(out2.shape)  #torch.Size([1, 128, 48, 48])
        # print(out3.shape)  #torch.Size([1, 256, 24, 24])
        # print(out4.shape)  #torch.Size([1, 512, 12, 12])

        up1 = self.deconv1(feats_nerve[4])  ##  12->24
        out1 = torch.cat((up1, feats_nerve[3]), dim=1)  ## torch.size:[1,480,24,24]
        out1 = self.conv1(out1)  ## channels 减半torch.size:[1,240,24,24]

        up2 = self.deconv2(out1)  ##  24->48
        # print("up2", up2.shape)
        # print("feats[2]", feats[2].shape)
        out2 = torch.cat((up2, feats_nerve[2]), dim=1)  ## [1,480,48,48]
        out2 = self.conv2(out2)  ## [1,240,48,48]

        up3 = self.deconv3(out2)  ## 48->96        print(out3.shape)
        # print("up3",up3.shape)
        # print("feats[1]",feats[1].shape)

        out3 = torch.cat((up3, feats_nerve[1]), dim=1)  ## [1,184,96,96]
        out3 = self.conv3(out3)  ## [1,240,96,96]
        # print(111)
        # print(out3.shape)

        up4 = self.deconv4(out3)  ##96->192
        out4 = torch.cat((up4, feats_nerve[0]), dim=1)  ## [1,304,192,192]
        out4 = self.conv4(out4)  ##  [1, 240, 192,192]

        out = self.deconv5(out4)  ##   [1,1,384,384]
        out = self.conv5(out)
        out = self.conv6(out)  ### 1*1 conv
        out = nn.Sigmoid()(out)

        return out
