import torch
# from torch.nn import nn
import torch.nn as nn


class MEAttention(nn.Module):
    def __init__(self, dim, configs_head =8):
        super(MEAttention, self).__init__()
        self.num_heads = configs_head
        self.coef = 4
        self.query_liner = nn.Linear(dim, dim * self.coef)  ### 对于nn.Linear可以代表query的本质还是不明白,包括下面的两个linear变换为什么就可以表示key value了
        self.num_heads = self.coef * self.num_heads
        self.k = 256 // self.coef    ## k=64
        self.linear_0 = nn.Linear(dim * self.coef // self.num_heads, self.k)  ##此处linear_0,linear_1本质就是key_value
        self.key_liner = nn.Linear(dim, dim * self.coef)
        # self.value_liner = nn.Linear(self.k, dim * self.coef // self.num_heads)
        self.linear_1 = nn.Linear(self.k, dim * self.coef // self.num_heads)
        self.linear = nn.Linear(1920,960)


        self.proj = nn.Linear(dim * self.coef, dim)

    def forward(self, src1, src2):
        # print("src1",src1.shape)     #[1, 3024, 240]
        # print("src2", src2.shape)
        B1, N1, C1 = src1.shape
        B2, N2, C2 = src2.shape
        query = self.query_liner(src1)
        # print("query", query.shape)   # [1, 3024, 960]

        # attn = self.key_liner(tgt)
        key = self.key_liner(src2)
        # print("key", key.shape)

        merge = torch.cat([query, key],dim=2)
        # print("merge.shape",merge.shape)
        attn = self.linear(merge)        # attn = attn.view(B1, N1, self.num_heads, -1).permute(0, 2, 1, 3)  #
        # print("attn", attn.shape)
        # print("attn", attn.shape)

        attn11 = attn.view(B1, N1, self.num_heads, -1).permute(0, 2, 1, 3)  #
        # print("attn11", attn11.shape)
        attn = self.linear_0(attn11)
        # print("attn1", attn.shape)  # [1, 32, 3024, 64]

        attn = attn.softmax(dim=-2)
        # print("attn1",attn.shape)     ## [1, 32, 3024, 64]
        attn = attn / (1e-9 + attn.sum(dim=-1, keepdim=True))
        # print("attn11", attn.shape)     ## [1, 32, 3024, 64]

        x = self.linear_1(attn).permute(0, 2, 1, 3).reshape(B1, N1, -1)
        # print("x.shape",x.shape)   #[1, 3024, 960]

        x = self.proj(x)
        # print("x.shape",x.shape)   #[1, 3024, 240]

        return x


