import torch
import torch.nn as nn


class DoubleConv(nn.Module):
    def __init__(self, in_ch, out_ch):
        super(DoubleConv, self).__init__()
        self.conv = nn.Sequential(
            nn.Conv2d(in_ch, out_ch, 3, padding=1),
            nn.BatchNorm2d(out_ch),
            nn.ReLU(inplace=True),
            nn.Conv2d(out_ch, out_ch, 3, padding=1),
            nn.BatchNorm2d(out_ch),
            nn.ReLU(inplace=True)
        )

    def forward(self, x):
        return self.conv(x)


class Decoder(nn.Module):
    def __init__(self):
        super(Decoder, self).__init__()
        self.deconv1 = nn.ConvTranspose2d(in_channels=512, out_channels=256, kernel_size=2, stride=2)
        self.deconv2 = nn.ConvTranspose2d(in_channels=256, out_channels=128, kernel_size=2, stride=2)
        self.deconv3 = nn.ConvTranspose2d(in_channels=128, out_channels=64, kernel_size=2, stride=2)
        self.deconv4 = nn.ConvTranspose2d(in_channels=64, out_channels=64, kernel_size=2, stride=2)
        # self.deconv5_1 = nn.ConvTranspose2d(in_channels=64, out_channels=64, kernel_size=2, stride=2)
        # self.deconv5_2 = nn.ConvTranspose2d(in_channels=64, out_channels=64, kernel_size=2, stride=2)
        self.deconv5 = nn.ConvTranspose2d(in_channels=64, out_channels=32, kernel_size=2, stride=2)

        # self.conv1 = nn.Conv2d(in_channels=512, out_channels=256, kernel_size=3, padding=1)
        # self.conv2 = nn.Conv2d(in_channels=256, out_channels=128, kernel_size=3, padding=1)
        # self.conv3 = nn.Conv2d(in_channels=128, out_channels=64, kernel_size=3, padding=1)
        # self.conv4 = nn.Conv2d(in_channels=128, out_channels=64, kernel_size=3, padding=1)
        self.conv1 = DoubleConv(512, 256)
        self.conv2 = DoubleConv(256, 128)
        self.conv3 = DoubleConv(128, 64)
        self.conv4 = DoubleConv(128, 64)
        self.conv5_1 = DoubleConv(32, 32)
        self.conv5_2 = DoubleConv(32, 32)
        # self.conv5_1 = nn.Conv2d(in_channels=64, out_channels=classes, kernel_size=3, padding=1)
        # self.conv5_2 = nn.Conv2d(in_channels=64, out_channels=classes, kernel_size=3, padding=1)
        self.conv61 = nn.Conv2d(32, 1, 1)  ### 1*1conv  ??????????????? 对吗？？？
        self.conv62 = nn.Conv2d(32, 1, 1)
        self.sigmoid1 = nn.Sigmoid()
        self.sigmoid2 = nn.Sigmoid()

    def forward(self, feats):  ## x is a list

        # for idx ,feats in enumerate(feats[:-1]):##如何逆序遍历？

        up1 = self.deconv1(feats[4])  ##  12->24
        out1 = torch.cat((up1, feats[3]), dim=1)
        out1 = self.conv1(out1)  # out1 [256,24,24]optimizer.step()

        up2 = self.deconv2(out1)  ##  24->48
        out2 = torch.cat((up2, feats[2]), dim=1)
        out2 = self.conv2(out2)  ##out2 [128,48,48]

        up3 = self.deconv3(out2)  ## 48->96
        out3 = torch.cat((up3, feats[1]), dim=1)
        out3 = self.conv3(out3)  ## out3 [64,96,96]

        up4 = self.deconv4(out3)  ##96->192
        out4 = torch.cat((up4, feats[0]), dim=1)
        out4 = self.conv4(out4)  ## out4 [64, 192,192]

        # out_1 = self.deconv5_1(out4)  ##   [1,1,384,384]
        # out_1 = self.conv5_1(out_1)
        # out11 = nn.Sigmoid()(out_1)
        # # out_1 = torch.softmax(out_1, dim=1)  ## softmax用于多类别
        # out_2 = self.deconv5_2(out4)  ##   [1,1,384,384]
        # out_2 = self.conv5_2(out_2)
        # # out_2 = torch.softmax(out_2, dim=1)
        # out22 = nn.Sigmoid()(out_2)

        out = self.deconv5(out4)  ##   [1,1,384,384]
        out1 = self.conv5_1(out)
        out2 = self.conv5_2(out)
        out10 = self.conv61(out1)
        out20 = self.conv62(out2)  ### 1*1 conv

        out1 = self.sigmoid1(out10)
        out2 = self.sigmoid2(out20)

        return out1, out2
