import torch
# from timm.models import create_model
# import utils
import torch.nn as nn
import torch.nn.functional as F
from networks.repvit import repvit_m1_1


def replace_batchnorm(net):
    for child_name, child in net.named_children():
        if hasattr(child, 'fuse'):
            fused = child.fuse()
            setattr(net, child_name, fused)
            replace_batchnorm(fused)
        elif isinstance(child, torch.nn.BatchNorm2d):
            setattr(net, child_name, torch.nn.Identity())
        else:
            replace_batchnorm(child)

class upconv(nn.Module):
    def __init__(self, in_dim, out_dim, k_size, scale):
        super(upconv, self).__init__()
        self.scale = scale
        self.conv = conv(in_dim, out_dim, k_size, 1)

    def forward(self, x):
        x = nn.functional.interpolate(x, scale_factor=self.scale, align_corners=True, mode='bilinear')
        return self.conv(x)

class conv(nn.Module):
    def __init__(self, in_dim, out_dim, k_size, stride):
        super(conv, self).__init__()
        self.k_size = k_size
        self.conv = nn.Conv2d(in_dim, out_dim, k_size, stride, padding=(self.k_size - 1) // 2)
        self.bn = nn.BatchNorm2d(out_dim)

    def forward(self, x):
        return F.elu(self.bn(self.conv(x)), inplace=True)


class RepVit(nn.Module):
    def __init__(self, up_dim=[64, 128, 256, 512]):
        super(RepVit, self).__init__()
        self.encoder = repvit_m1_1()
        # load pretrained model
        model_param = torch.load('/home/liyuke/lyk_work/posfeat_big/pretrained/repvit_m1_1_distill_450e.pth')['model']
        partial_state_dict = {}
        prefixes_to_ignore = ['classifier']
        for name, param in model_param.items():
            ignore_param = False
            for prefix in prefixes_to_ignore:
                if name.startswith(prefix):
                    ignore_param = True
                    break
            if not ignore_param:
                partial_state_dict[name] = param

        self.encoder.load_state_dict(partial_state_dict)



        # self.stem = encoder.features[0]
        # self.stage0 = encoder.features[1:4]
        # self.stage1 = encoder.features[4:8]
        # self.stage2 =  encoder.features[8:22]
        # self.stage3 =  encoder.features[22:]
        self.conv_coarse = conv(up_dim[2], 128, 1, 1)
        self.conv_fine = conv(64, 128, 1, 1)
        self.upconv3 = upconv(up_dim[3], up_dim[2], 3,2)
        self.iconv3 = conv(up_dim[2] + up_dim[2], up_dim[2], 3,1)

        self.upconv2 = upconv(up_dim[2], up_dim[1],3, 2)
        self.iconv2 = conv(up_dim[1] + up_dim[1], up_dim[1], 3,1)

        self.upconv1 = upconv(up_dim[1], up_dim[0], 3,2)
        self.iconv1 = conv(up_dim[0] + up_dim[0], up_dim[0],3, 1)

    def skipconnect(self, x1, x2):
        diffY = x2.size()[2] - x1.size()[2]
        diffX = x2.size()[3] - x1.size()[3]
        x1 = F.pad(x1, (diffX // 2, diffX - diffX // 2, diffY // 2, diffY - diffY // 2))

        x = torch.cat([x2, x1], dim=1)
        return x

    def forward(self, x):
        out = self.encoder(x)
        # x0 = self.stem(x)
        # x1 = self.stage0(x0)
        # x2 = self.stage1(x1)
        # x3 = self.stage2(x2)
        # x4 = self.stage3(x3)
        x_coarse = self.conv_coarse(out[3])  # H/16

        x = self.upconv3(out[4])
        x = self.skipconnect(out[3],x)
        x = self.iconv3(x)

        x = self.upconv2(x)
        x = self.skipconnect(out[2],x)
        x = self.iconv2(x)

        x = self.upconv1(x)
        x = self.skipconnect(out[1],x)
        x = self.iconv1(x)
        x_fine = self.conv_fine(x)  # H/2
        return {'global_map':x_coarse, 'local_map':x_fine, 'local_map_small':out[0]}