'''
 * @Author: Benjay·Shaw
 * @Date: 2024-10-31 17:07:50
 * @LastEditors: Benjay·Shaw
 * @LastEditTime: 2024-10-31 21:43:58
 * @Description: 推理模型
'''
import paddle
import ssl
from common_module import *
model_urls = {'se_resnet50':
    'https://github.com/moskomule/senet.pytorch/releases/download/archive/seresnet50-60a8950a85b2b.pkl'
    }


class Bottleneck(paddle.nn.Layer):
    expansion = 4

    def __init__(self, inplanes, planes, stride=1, downsample=None,
        dilation=1, reduction=16):
        super(Bottleneck, self).__init__()
        self.conv1 = paddle.nn.Conv2D(in_channels=inplanes, out_channels=
            planes, kernel_size=1, bias_attr=False)
        self.bn1 = FixedBatchNorm(planes)
        self.conv2 = paddle.nn.Conv2D(in_channels=planes, out_channels=
            planes, kernel_size=3, stride=stride, padding=dilation,
            bias_attr=False, dilation=dilation)
        self.bn2 = FixedBatchNorm(planes)
        self.conv3 = paddle.nn.Conv2D(in_channels=planes, out_channels=
            planes * 4, kernel_size=1, bias_attr=False)
        self.bn3 = FixedBatchNorm(planes * 4)
        self.relu = paddle.nn.ReLU()
        self.se = SELayer(planes * 4, reduction)
        self.downsample = downsample
        self.stride = stride
        self.dilation = dilation

    def forward(self, x):
        residual = x
        out = self.conv1(x)
        out = self.bn1(out)
        out = self.relu(out)
        out = self.conv2(out)
        out = self.bn2(out)
        out = self.relu(out)
        out = self.conv3(out)
        out = self.bn3(out)
        out = self.se(out)
        if self.downsample is not None:
            residual = self.downsample(x)
        out += residual
        out = self.relu(out)
        return out


class SEResNet(paddle.nn.Layer):

    def __init__(self, block, layers, strides=(2, 2, 2, 2), dilations=(1, 1,
        2, 4)):
        super(SEResNet, self).__init__()
        self.inplanes = 64
        self.conv1 = paddle.nn.Conv2D(in_channels=3, out_channels=64,
            kernel_size=7, stride=2, padding=3, bias_attr=False)
        self.bn1 = FixedBatchNorm(64)
        self.relu = paddle.nn.ReLU()
        self.maxpool = paddle.nn.MaxPool2D(kernel_size=3, stride=2, padding=1)
        self.layer1 = self._make_layer(block, 64, layers[0], stride=1,
            dilation=dilations[0])
        self.layer2 = self._make_layer(block, 128, layers[1], stride=
            strides[1], dilation=dilations[1])
        self.layer3 = self._make_layer(block, 256, layers[2], stride=
            strides[2], dilation=dilations[2])
        self.layer4 = self._make_layer(block, 512, layers[3], stride=
            strides[3], dilation=dilations[3])
        self.inplanes = 1024

    def _make_layer(self, block, planes, blocks, stride=1, dilation=1):
        downsample = None
        if stride != 1 or self.inplanes != planes * block.expansion:
            downsample = paddle.nn.Sequential(paddle.nn.Conv2D(in_channels=
                self.inplanes, out_channels=planes * block.expansion,
                kernel_size=1, stride=stride, bias_attr=False),
                FixedBatchNorm(planes * block.expansion))
        layers = [block(self.inplanes, planes, stride, downsample, dilation=1)]
        self.inplanes = planes * block.expansion
        for i in range(1, blocks):
            layers.append(block(self.inplanes, planes, dilation=dilation))
        return paddle.nn.Sequential(*layers)

    def forward(self, x):
        x = self.conv1(x)
        x = self.bn1(x)
        x = self.relu(x)
        x = self.maxpool(x)
        x = self.layer1(x)
        x = self.layer2(x)
        x = self.layer3(x)
        x = self.layer4(x)
        return x


class SE_Resnet50(paddle.nn.Layer):

    def __init__(self, nums_class, pretrained=True, **kwargs):
        super(SE_Resnet50, self).__init__()
        se_resnet50 = SEResNet(Bottleneck, layers=[3, 4, 6, 3], **kwargs)
        if pretrained:
            state_dict = paddle.load(paddle.utils.download.
                get_weights_path_from_url(model_urls['se_resnet50']))
            model_state_dict = se_resnet50.state_dict()
            pretrained_dict = {k: v for k, v in state_dict.items() if k in
                model_state_dict}
            model_state_dict.update(pretrained_dict)
            se_resnet50.set_state_dict(state_dict=model_state_dict)
        self.stage1 = paddle.nn.Sequential(se_resnet50.conv1, se_resnet50.
            bn1, se_resnet50.relu, se_resnet50.maxpool)
        self.stage2 = paddle.nn.Sequential(se_resnet50.layer1)
        self.stage3 = paddle.nn.Sequential(se_resnet50.layer2)
        self.stage4 = paddle.nn.Sequential(se_resnet50.layer3)
        self.stage5 = paddle.nn.Sequential(se_resnet50.layer4)
        self.dec = Decoder(320, nums_class)
        self.locally1 = LocallyAdap(768, 3)
        self.locally2 = LocallyAdap(128, 2)
        self.up1 = Upsample(256)
        self.fc_dp1 = paddle.nn.Sequential(paddle.nn.Conv2D(in_channels=64,
            out_channels=64, kernel_size=1, bias_attr=False), paddle.nn.
            BatchNorm2D(num_features=64), paddle.nn.ReLU())
        self.fc_dp2 = paddle.nn.Sequential(paddle.nn.Conv2D(in_channels=256,
            out_channels=64, kernel_size=1, bias_attr=False), paddle.nn.
            BatchNorm2D(num_features=64), paddle.nn.ReLU())
        self.fc_dp3 = paddle.nn.Sequential(paddle.nn.Conv2D(in_channels=512,
            out_channels=256, kernel_size=1, bias_attr=False), paddle.nn.
            BatchNorm2D(num_features=256), paddle.nn.ReLU())
        self.fc_dp4 = paddle.nn.Sequential(paddle.nn.Conv2D(in_channels=
            1024, out_channels=256, kernel_size=1, bias_attr=False), paddle
            .nn.BatchNorm2D(num_features=256), paddle.nn.ReLU())
        self.fc_dp5 = paddle.nn.Sequential(paddle.nn.Conv2D(in_channels=
            2048, out_channels=256, kernel_size=1, bias_attr=False), paddle
            .nn.BatchNorm2D(num_features=256), paddle.nn.Upsample(
            scale_factor=2, mode='bilinear', align_corners=False), paddle.
            nn.ReLU())

    def encoder(self, x):
        x1 = self.stage1(x)
        x2 = self.stage2(x1)
        x3 = self.stage3(x2)
        x4 = self.stage4(x3)
        x5 = self.stage5(x4)
        return x5, x4, x3, x2, x1

    def forward(self, x_prev, x_now):
        x5_prev, x4_prev, x3_prev, x2_prev, x1_prev = self.encoder(x_prev)
        x5_now, x4_now, x3_now, x2_now, x1_now = self.encoder(x_now)
        x5_prev_s = self.fc_dp5(x5_prev)
        x5_now_s = self.fc_dp5(x5_now)
        x4_prev_s = self.fc_dp4(x4_prev)
        x4_now_s = self.fc_dp4(x4_now)
        x3_prev_s = self.fc_dp3(x3_prev)
        x3_now_s = self.fc_dp3(x3_now)
        high_prev = paddle.concat(x=[x5_prev_s, x4_prev_s, x3_prev_s], axis=1)
        high_now = paddle.concat(x=[x5_now_s, x4_now_s, x3_now_s], axis=1)
        x_high = self.locally1(high_prev, high_now)
        x_high_up = self.up1(x_high)
        x2_prev_s = self.fc_dp2(x2_prev)
        x2_now_s = self.fc_dp2(x2_now)
        x1_prev_s = self.fc_dp1(x1_prev)
        x1_now_s = self.fc_dp1(x1_now)
        low_prev = paddle.concat(x=[x2_prev_s, x1_prev_s], axis=1)
        low_now = paddle.concat(x=[x2_now_s, x1_now_s], axis=1)
        x_low = self.locally2(low_prev, low_now)
        x = paddle.concat(x=[x_low, x_high_up], axis=1)
        result = self.dec(x)
        return result
