import torch
import torch.nn as nn
import torch.nn.functional as F


class SDNet(nn.Module):
    def __init__(self):
        super(SDNet, self).__init__()
        self.conv11 = nn.Sequential((nn.Conv2d(1, 16, 5, 1, 2)), nn.LeakyReLU())
        self.conv11e = nn.Sequential((nn.Conv2d(1, 16, 1, 1, 0)), nn.ReLU())
        self.conv12 = nn.Sequential((nn.Conv2d(1, 16, 5, 1, 2)), nn.LeakyReLU())
        self.conv12e = nn.Sequential((nn.Conv2d(1, 16, 1, 1, 0)), nn.ReLU())


        self.conv21 = nn.Sequential((nn.Conv2d(16, 16, 3, 1, 1)), nn.LeakyReLU())
        self.conv21e = nn.Sequential((nn.Conv2d(16, 16, 3, 1, 1)), nn.LeakyReLU())
        self.conv22 = nn.Sequential((nn.Conv2d(16, 16, 3, 1, 1)), nn.LeakyReLU())
        self.conv22e = nn.Sequential((nn.Conv2d(16, 16, 3, 1, 1)), nn.LeakyReLU())

        self.conv31 = nn.Sequential((nn.Conv2d(32, 16, 3, 1, 1)), nn.LeakyReLU())
        self.conv32 = nn.Sequential((nn.Conv2d(32, 16, 3, 1, 1)), nn.LeakyReLU())

        self.conv41 = nn.Sequential((nn.Conv2d(48, 16, 3, 1, 1)), nn.LeakyReLU())
        self.conv42 = nn.Sequential((nn.Conv2d(48, 16, 3, 1, 1)), nn.LeakyReLU())

        self.fuse = nn.Sequential((nn.Conv2d(128, 1, 1, 1, 0)), nn.Hardtanh())
        self.fuse1 = nn.Sequential((nn.Conv2d(96, 1, 1, 1, 0)))



        self.decom = nn.Sequential((nn.Conv2d(1, 128, 1, 1, 0)), nn.LeakyReLU())
        self.decom1 = nn.Sequential((nn.Conv2d(1, 96, 1, 1, 0)), nn.LeakyReLU())


        self.conv51 = nn.Sequential((nn.Conv2d(128, 16, 3, 1, 1)), nn.LeakyReLU())
        self.conv52 = nn.Sequential((nn.Conv2d(128, 16, 3, 1, 1)), nn.LeakyReLU())
        self.conv5e = nn.Sequential((nn.Conv2d(96, 16, 3, 1, 1)), nn.LeakyReLU())

        self.conv61 = nn.Sequential((nn.Conv2d(16, 4, 3, 1, 1)), nn.LeakyReLU())
        self.conv62 = nn.Sequential((nn.Conv2d(16, 4, 3, 1, 1)), nn.LeakyReLU())
        self.conv6e = nn.Sequential((nn.Conv2d(16, 4, 3, 1, 1)), nn.LeakyReLU())

        self.conv71 = nn.Sequential((nn.Conv2d(4, 1, 3, 1, 1)), nn.Tanh())
        self.conv72 = nn.Sequential((nn.Conv2d(4, 1, 3, 1, 1)), nn.Tanh())
        self.conv7e = nn.Sequential((nn.Conv2d(4, 1, 3, 1, 1)))

    def forward(self, x1, x2):
        x11 = self.conv11(x1)
        x11e = self.conv11e(x1)
        x12 = self.conv21(x11)
        x12e = self.conv21e(x11e)
        x13 = self.conv31(torch.cat([x11, x12], dim=1))
        x14 = self.conv41(torch.cat([x11, x12, x13], dim=1))

        x21 = self.conv12(x2)
        x21e = self.conv12e(x2)
        x22 = self.conv22(x21)
        x22e = self.conv22e(x21e)
        x23 = self.conv32(torch.cat([x21, x22], dim=1))
        x24 = self.conv42(torch.cat([x21, x22, x23], dim=1))

        x_fuse = self.fuse(torch.cat([x11, x12, x13, x14, x21, x22, x23, x24], dim=1))
        x_fuse1=self.fuse1(torch.cat([x11e,x12e,x21e,x22e,torch.max(x11e,x21e)*x11e*x21e,torch.max(x12e,x22e)*x12e*x22e], dim=1))

        x1_de = self.conv71(self.conv61(self.conv51(self.decom(x_fuse))))
        x2_de= self.conv72(self.conv62(self.conv52(self.decom(x_fuse))))

        #edge= self.conv7e(self.conv6e(self.conv5e(self.decom1(x_fuse1))))


        return x_fuse, x1_de,x2_de,torch.clamp(x_fuse1,min=0,max=1) #

if __name__ == '__main__':
    x1 = torch.rand(1, 1, 256, 256)  # [0,1]
    x2 = torch.rand(1, 1, 256, 256)  # [0,1]
    # 两个输入通道的channel都必须是1(dim=1)

    net = SDNet()
    x_fuse, x1_de, x2_de, edge = net(x1, x2)  # 两个输出都是[-1,1]，需要做归一化

    print(x_fuse.shape)
    print(x1_de.shape)
    print(x2_de.shape)
    print(edge.shape)

    print('y1 - max:', torch.max(x_fuse).item(), 'min:', torch.min(x_fuse).item())
    print('y2 - max:', torch.max(edge).item(), 'min:', torch.min(edge).item())
