from torch import nn
from torch.nn import functional as F


class ConvBlock(nn.Module):
    def __init__(self, in_channels, out_channels):  # 移除device参数
        super().__init__()
        self.layers = nn.Sequential(
            nn.Conv2d(in_channels, out_channels, 3, 2, 1),
            nn.BatchNorm2d(out_channels),
            nn.ReLU(),
            nn.Conv2d(out_channels, out_channels, 3, 1, 1),
            nn.BatchNorm2d(out_channels),
            nn.ReLU()
        )

    def forward(self, x):
        return self.layers(x)


class FCN8s(nn.Module):
    def __init__(self, num_classes):
        super().__init__()
        self.conv_block1 = ConvBlock(3, 64)
        self.conv_block2 = ConvBlock(64, 128)
        self.conv_block3 = ConvBlock(128, 256)
        self.conv_block4 = ConvBlock(256, 512)
        self.conv_block5 = ConvBlock(512, 512)

        self.conv6 = nn.Sequential(
            nn.Conv2d(512, 4096, 3, 1, 1),
            nn.BatchNorm2d(4096),
            nn.ReLU()
        )

        self.up_sampling1 = nn.ConvTranspose2d(4096, num_classes, 2, 2)
        self.up_sampling2 = nn.ConvTranspose2d(num_classes, num_classes, 2, 2)
        self.up_sampling3 = nn.ConvTranspose2d(num_classes, num_classes, 8, 8)

        # 使用1x1卷积调整通道数而不是转置卷积
        self.skip1 = nn.Conv2d(256, num_classes, 1)
        self.skip2 = nn.Conv2d(512, num_classes, 1)

        # 添加dropout防止过拟合
        self.dropout = nn.Dropout2d(0.5)

        # 修改最后的激活函数
        self.softmax = nn.Softmax(dim=1)

    def forward(self, x):
        # 保存中间特征用于跳跃连接
        x1 = self.conv_block1(x)
        x2 = self.conv_block2(x1)
        x3 = self.conv_block3(x2)  # 用于第一个跳跃连接
        x4 = self.conv_block4(x3)  # 用于第二个跳跃连接
        x5 = self.conv_block5(x4)

        x6 = self.conv6(x5)
        x6 = self.dropout(x6)  # 添加dropout

        # 上采样并融合跳跃连接
        up1 = self.up_sampling1(x6)
        # 调整跳跃连接的尺寸
        skip2 = self.skip2(x4)
        # 确保尺寸匹配
        if up1.size()[2:] != skip2.size()[2:]:
            up1 = F.interpolate(up1, size=skip2.size()[2:], mode='bilinear', align_corners=True)
        fusion1 = up1 + skip2

        up2 = self.up_sampling2(fusion1)
        skip1 = self.skip1(x3)
        if up2.size()[2:] != skip1.size()[2:]:
            up2 = F.interpolate(up2, size=skip1.size()[2:], mode='bilinear', align_corners=True)
        fusion2 = up2 + skip1

        output = self.up_sampling3(fusion2)

        # 调整输出尺寸与输入匹配
        if output.size()[2:] != x.size()[2:]:
            output = F.interpolate(output, size=x.size()[2:], mode='bilinear', align_corners=True)

        return self.softmax(output)
