import torch
import torch.nn as nn
import torch.nn.functional as F
from torchvision.models import resnet18  # 替代YOLO backbone，简化实现

# 空间变换网络
class STN(nn.Module):
    def __init__(self):
        super(STN, self).__init__()
        self.localization = nn.Sequential(
            nn.Conv2d(1, 8, kernel_size=7),
            nn.MaxPool2d(2, stride=2),
            nn.ReLU(True),
            nn.Conv2d(8, 10, kernel_size=5),
            nn.MaxPool2d(2, stride=2),
            nn.ReLU(True)
        )
        self.fc_loc = nn.Sequential(
            nn.Linear(10 * 124 * 124, 32),  # 根据输入尺寸调整
            nn.ReLU(True),
            nn.Linear(32, 6)  # 输出6个仿射变换参数
        )
        self.fc_loc[2].bias.data.fill_(0)  # 初始化为单位变换

    def forward(self, x):
        xs = self.localization(x)
        xs = xs.view(-1, 10 * 124 * 124)
        theta = self.fc_loc(xs)
        theta = theta.view(-1, 2, 3)  # 仿射变换矩阵
        grid = torch.nn.functional.affine_grid(theta, x.size())
        x = torch.nn.functional.grid_sample(x, grid)
        return x

class SpatialTransformer(nn.Module):
    def __init__(self, in_channels, img_size=640):
        super().__init__()
        self.localization = nn.Sequential(
            nn.Conv2d(in_channels, 8, 7, 2, 3),   # 640→320
            nn.MaxPool2d(2, 2),                   # 320→160
            nn.ReLU(True),
            nn.Conv2d(8, 10, 5, 2, 2),            # 160→80
            nn.MaxPool2d(2, 2),                   # 80→40
            nn.ReLU(True)
        )
        # 先放一个占位 Linear，forward 里再替换
        self.fc_loc = nn.Linear(1, 6)  # dummy，马上被覆盖
        self.fc_loc.weight.data.zero_()
        self.fc_loc.bias.data.copy_(torch.tensor([1, 0, 0, 0, 1, 0], dtype=torch.float))

    def forward(self, x):
        xs = self.localization(x)            # (B,10,H',W')
        B, C, H, W = xs.shape
        feat_dim = C * H * W                 # 10 * 40 * 40 = 16000
        # 如果 Linear 的 in_features 不对，就现场重建
        if self.fc_loc.in_features != feat_dim:
            device = xs.device
            self.fc_loc = nn.Linear(feat_dim, 6).to(device)
            self.fc_loc.weight.data.zero_()
            self.fc_loc.bias.data.copy_(torch.tensor([1, 0, 0, 0, 1, 0], dtype=torch.float, device=device))

        xs = xs.view(B, -1)
        theta = self.fc_loc(xs)
        theta = theta.view(-1, 2, 3)

        grid = F.affine_grid(theta, x.size(), align_corners=True)
        x_warped = F.grid_sample(x, grid, align_corners=True)
        return x_warped, theta