import math
from functools import partial
from itertools import repeat

import torch
import torch.nn as nn
import torch.nn.functional as F
# from torch._six import container_abcs
import torch.utils.checkpoint as checkpoint
from timm.models.layers import DropPath, to_2tuple, trunc_normal_
from timm.models.vision_transformer import Mlp as Mlp_old

from .resnet import ResNet, BasicBlock, Bottleneck
# from .resnet_ibn import ResNet_IBN, Bottleneck_IBN
import copy
from torch.nn import Parameter
# from .part_split import CenterPool


def weights_init_kaiming(m):
    classname = m.__class__.__name__
    if classname.find('Linear') != -1:
        nn.init.kaiming_normal_(m.weight, a=0, mode='fan_out')
        if m.bias is not None:
          nn.init.constant_(m.bias, 0.0)
    elif classname.find('Conv') != -1:
        nn.init.kaiming_normal_(m.weight, a=0, mode='fan_in')
        if m.bias is not None:
            nn.init.constant_(m.bias, 0.0)
    elif classname.find('BatchNorm') != -1:
        if m.affine:
            nn.init.constant_(m.weight, 1.0)
            nn.init.constant_(m.bias, 0.0)
  


class Presnet(nn.Module):
    def __init__(self, model_path="",last_stride=1,pre_trained=None,**kwargs):
        super().__init__()
        # self.vit_base=vit_base_patch16_224_TransReID(img_size=img_size,stride_size=stride_size,model_path=model_path,**kwargs)
        # self.base = ResNet_IBN(last_stride=last_stride,
        #            block=Bottleneck_IBN,
        #            layers=[3, 4, 6, 3])
        self.base = ResNet(last_stride=last_stride,
                   block=Bottleneck,
                   layers=[3, 4, 6, 3])        
    

        self.avgpool = nn.AdaptiveAvgPool1d(1)

        self.apply(self._init_weights)
        if model_path and pre_trained=="imagenet":
            self.base.load_param(model_path)
        self.backbone = nn.Sequential(
            self.base.conv1,
            self.base.bn1,
            self.base.relu,
            self.base.maxpool,
            self.base.layer1,
            self.base.layer2,
        )
        self.layer3 = copy.deepcopy(self.base.layer3)
        self.layer4 = copy.deepcopy(self.base.layer4)
        # self.layer3_2 = copy.deepcopy(self.base.layer3)
        # self.layer4_2 = copy.deepcopy(self.base.layer4)

        # self.part_split = CenterPool(pool_type='vertical')
    def _init_weights(self, m):
        if isinstance(m, nn.Linear):
            trunc_normal_(m.weight, std=.02)
            if isinstance(m, nn.Linear) and m.bias is not None:
                nn.init.constant_(m.bias, 0)
        elif isinstance(m, nn.LayerNorm):
            nn.init.constant_(m.bias, 0)
            nn.init.constant_(m.weight, 1.0)

    def forward(self, x,tdeep=1):
        B = x.shape[0]
        x=self.backbone(x)
        global_feat=self.layer3(x)
        global_feat=self.layer4(global_feat)

        if tdeep==0:
            global_feat = self.avgpool(global_feat.transpose(1, 2))  # B C 1
            global_feat = torch.flatten(global_feat, 1)        
            return [global_feat]






