import torch
import torch.nn as nn
class DoubleConv(nn.Module):
    def __init__(self,in_ch, out_ch):
        super(DoubleConv, self).__init__()
        self.conv = nn.Sequential(
            nn.Conv2d(in_ch,out_ch,3,padding=1),
            nn.BatchNorm2d(out_ch),
            nn.ReLU(inplace=True),
            nn.Conv2d(out_ch,out_ch,3,padding=1),
            nn.BatchNorm2d(out_ch),
            nn.ReLU(inplace=True)
        )
    def forward(self,x):
        return self.conv(x)


class SKFusion(nn.Module):
    def __init__(self, dim, height=2, reduction=8):
        super(SKFusion, self).__init__()

        self.height = height
        d = max(int(dim / reduction), 4)

        self.avg_pool = nn.AdaptiveAvgPool2d(1)
        self.mlp = nn.Sequential(
            nn.Conv2d(dim, d, 1, bias=False),
            nn.ReLU(),
            nn.Conv2d(d, dim * height, 1, bias=False)
        )

        self.softmax = nn.Softmax(dim=1)

    def forward(self, in_feats):
        B, C, H, W = in_feats[0].shape

        in_feats = torch.cat(in_feats, dim=1)   ## out: [1, 120+240,24,24]
        print("111", in_feats.shape)
        in_feats = in_feats.view(B, self.height, C, H, W)
        print("222", in_feats.shape)

        feats_sum = torch.sum(in_feats, dim=1)
        attn = self.mlp(self.avg_pool(feats_sum))
        attn = self.softmax(attn.view(B, self.height, C, 1, 1))

        out = torch.sum(in_feats * attn, dim=1)
        return out


class Decoder(nn.Module):
    def __init__(self, embed_dims=[120]):
        super(Decoder, self).__init__()
        self.deconv1 = nn.ConvTranspose2d(in_channels=240,out_channels=120,kernel_size=2,stride=2)
        self.deconv2 = nn.ConvTranspose2d(in_channels=240, out_channels=120, kernel_size=2, stride=2)
        self.deconv3 = nn.ConvTranspose2d(in_channels=240, out_channels=120, kernel_size=2, stride=2)
        self.deconv4 = nn.ConvTranspose2d(in_channels=120, out_channels=60, kernel_size=2, stride=2)
        self.deconv5 = nn.ConvTranspose2d(in_channels=64, out_channels=32, kernel_size=2, stride=2)

  ###             上采样后接两个卷积效果会好很多            ##############
        self.conv1 = DoubleConv(360,240)
        self.conv2 = DoubleConv(360,240)
        self.conv3 = DoubleConv(184,120)
        self.conv4 = DoubleConv(124,64)
        self.conv5 = DoubleConv(32,1)
        self.conv6 = nn.Conv2d(1,1,1)       ### 1*1conv  ??????????????? 对吗？？？

        self.fusion1 = SKFusion(embed_dims[0])
        # self.fusion2 = SKFusion(embed_dims[1])


    def forward(self, feats):  ## x is a list

        # for idx ,feats in enumerate(feats[:-1]):##如何逆序遍历？



        # out1,out2,out3,out4=net(img)
        # print(out1.shape)  #torch.Size([1, 64, 96, 96])
        # print(out2.shape)  #torch.Size([1, 128, 48, 48])
        # print(out3.shape)  #torch.Size([1, 256, 24, 24])
        # print(out4.shape)  #torch.Size([1, 512, 12, 12])



        up1 = self.deconv1(feats[4])    ##  12->24
        # out1 = torch.cat((up1,feats[3]),dim=1)   ## torch.size:[1,480,24,24]
        out1 = self.fusion1([up1,feats[3]])
        print("out1.shape:", out1.shape)
        out1=self.conv1(out1)           ## channels 减半torch.size:[1,240,24,24]

        up2=self.deconv2(out1)          ##  24->48
        # print("up2", up2.shape)
        # print("feats[2]", feats[2].shape)
        out2=torch.cat((up2,feats[2]),dim=1)    ## [1,480,48,48]
        out2=self.conv2(out2)                   ## [1,240,48,48]

        up3=self.deconv3(out2)          ## 48->96        print(out3.shape)
        # print("up3",up3.shape)
        # print("feats[1]",feats[1].shape)

        out3=torch.cat((up3,feats[1]),dim=1)    ## [1,184,96,96]
        out3=self.conv3(out3)                   ## [1,240,96,96]
        # print(111)
        # print(out3.shape)

        up4=self.deconv4(out3)           ##96->192
        out4=torch.cat((up4,feats[0]),dim=1)   ## [1,304,192,192]
        out4=self.conv4(out4)                  ##  [1, 240, 192,192]

        out = self.deconv5(out4)                  ##   [1,1,384,384]
        out = self.conv5(out)
        out = self.conv6(out)                     ### 1*1 conv
        out = nn.Sigmoid()(out)

        return out


