# -*- coding: utf-8 -*-
"""
-------------------------------------------------
File Name： dab_transformer
Description :
Author : 'li'
date： 2022/7/3
Change Activity:
2022/7/3:
-------------------------------------------------
"""
from torch import nn

from ml.dl.model.base import BaseModule
from ml.dl.model.modules import DABEncoder
from ml.dl.model.modules.multi_attention.dab_decoder import DABDecoder


class DABTransformer(BaseModule):
    def __init__(self, dab_encoder_cfg=None, dab_decoder_cfg=None):
        """

        Args:
            dab_encoder_cfg:
            dab_decoder_cfg:
        """
        super(DABTransformer, self).__init__()
        self.encoder = DABEncoder(**dab_encoder_cfg)
        self.decoder = DABDecoder(**dab_decoder_cfg)
        self._reset_parameters()

    def _reset_parameters(self):
        for p in self.parameters():
            if p.dim() > 1:
                nn.init.xavier_uniform_(p)

    def forward(self, src, mask, bbox_query, pos_embed, label_query, attn_mask=None, mask_dict=None):
        """
        Args:
            src:
            mask:
            bbox_query:
            pos_embed:
            label_query:
            attn_mask:
            mask_dict:

        Returns:

        """

        # flatten NxCxHxW to HWxNxC
        bs, c, h, w = src.shape
        src = src.flatten(2).permute(2, 0, 1)  # (b,c,h,w)=>(h*w,b,c)
        pos_embed = pos_embed.flatten(2).permute(2, 0, 1)  # (b,c,h,w)=>(h*w,b,c)
        pos_embed = pos_embed.to(src.device)
        # refpoint_embed = refpoint_embed.unsqueeze(1).repeat(1, bs, 1)
        mask = mask.flatten(1).to(src.device)  # (b,h,w)+>(b,h*w)
        memory = self.encoder(src, src_key_padding_mask=mask, pos=pos_embed)  # standard transformer
        attn_mask = attn_mask.to(memory.device)
        out, mask_dict = self.decoder(label_query=label_query, memory=memory, pos=pos_embed, attention_mask=attn_mask, feature_padding_mask=None,
                                      tgt_key_padding_mask=None, memory_key_padding_mask=mask, ref_points_unsigmoid=bbox_query, mask_dict=mask_dict)
        return out, mask_dict
