import torch
from torch import nn
from taa_core.modeling.rpn.utils import cat, permute_and_flatten

def normal_init(module, mean=0, std=1, bias=0):
    nn.init.normal_(module.weight, mean, std)
    if hasattr(module, 'bias') and module.bias is not None:
        nn.init.constant_(module.bias, bias)


def concat_cls_reg_feats(cls_feats, reg_feats, num_channel=256):
    cls_feat_flattened = []
    reg_feat_flattened = []
    C = num_channel

    for cls_feat_per_level, reg_feat_per_level in zip(
        cls_feats, reg_feats
    ):
        N, AxC, H, W = cls_feat_per_level.shape
        A = AxC // C
        
        cls_feat_per_level = permute_and_flatten(
            cls_feat_per_level, N, A, C, H, W
        )
        cls_feat_flattened.append(cls_feat_per_level)

        reg_feat_per_level = permute_and_flatten(
            reg_feat_per_level, N, A, 4, H, W
        )
        reg_feat_flattened.append(reg_feat_per_level)

    cls_feat = cat(cls_feat_flattened, dim=1).reshape(-1, C)
    reg_feat = cat(reg_feat_flattened, dim=1).reshape(-1, C)
    return cls_feat, reg_feat
