import torch
import torch.nn as nn
import torchvision.models as models


class FCN16s(nn.Module):
    def __init__(self, num_classes):
        super(FCN16s, self).__init__()

        # 加载预训练的 VGG16 模型
        vgg16 = models.vgg16()

        # 从 VGG16 中提取特征
        self.features_low = nn.Sequential(*list(vgg16.features.children())[:24])
        self.features_high = nn.Sequential(*list(vgg16.features.children())[24:])

        # 对 pool4 进行 1x1 卷积
        self.score_pool4 = nn.Conv2d(512, num_classes, kernel_size=1)

        # FCN 层（将全连接层转换为卷积层）
        self.fcn = nn.Sequential(
            nn.Conv2d(512, 4096, kernel_size=7, padding=3),  # 添加 padding 以保持空间尺寸
            nn.ReLU(inplace=True),
            nn.Dropout2d(),
            nn.Conv2d(4096, 4096, kernel_size=1),
            nn.ReLU(inplace=True),
            nn.Dropout2d(),
            nn.Conv2d(4096, num_classes, kernel_size=1)
        )

        # 反卷积层
        self.deconv1 = nn.ConvTranspose2d(num_classes, num_classes, kernel_size=2, stride=2, bias=False)
        self.deconv2 = nn.ConvTranspose2d(num_classes, num_classes, kernel_size=32, stride=16, padding=8, bias=False)

    def forward(self, x):
        # VGG16 特征提取
        pool4 = self.features_low(x)  # 输出尺寸：[N, 512, 14, 14]
        pool5 = self.features_high(pool4)  # 输出尺寸：[N, 512, 7, 7]

        # FCN 部分
        fcn_out = self.fcn(pool5)  # 输出尺寸：[N, num_classes, 7, 7]

        # 第一次反卷积（上采样 2 倍）
        upscore2 = self.deconv1(fcn_out)  # 输出尺寸：[N, num_classes, 14, 14]

        # 对 pool4 进行 1x1 卷积
        score_pool4 = self.score_pool4(pool4)  # 输出尺寸：[N, num_classes, 14, 14]

        # 跳跃连接
        score = upscore2 + score_pool4  # 元素级相加

        # 第二次反卷积（上采样到原始图像尺寸）
        out = self.deconv2(score)  # 输出尺寸：[N, num_classes, 224, 224]

        return out


if __name__ == '__main__':
    # 使用示例
    num_classes = 21  # 例如，20 个类别 + 背景
    model = FCN16s(num_classes)
    input_tensor = torch.randn(1, 3, 224, 224)  # 批量大小为 1，3 个颜色通道，224x224 图像
    output = model(input_tensor)
    print(output.shape)  # 应该是 torch.Size([1, 21, 224, 224])
