# -*- coding: utf-8 -*-
"""
@Time ： 2024/3/30 9:02
@Auth ： fcq
@File ：BERT_JOINT.py
@IDE ：PyCharm
@Motto：ABC(Always Be Coding)
"""

import torch
import torch.nn as nn
import torch.nn.functional as F


class BERT_JOINT(nn.Module):
    def __init__(self, opt, bert, ):
        super(BERT_JOINT, self).__init__()
        self.bert = bert
        self.bert.pooler = None

        self.dropout = nn.Dropout(opt.dropout)
        self.relu = nn.ReLU()
        self.linear = nn.Linear(opt.bert_dim * 2, opt.bert_dim)
        self.out = nn.Linear(opt.bert_dim, opt.num_labels)

    def forward(self, inputs):
        x_input_ids, x_seg_ids, x_atten_masks = inputs[0], inputs[1], inputs[2]

        # item_ids =text_bert_indices.type(torch.LongTensor)
        # item_masks = (item_ids != 0).type(torch.LongTensor).to('cuda:0')

        last_hidden = self.bert(input_ids=x_input_ids, attention_mask=x_atten_masks, token_type_ids=x_seg_ids)

        x_atten_masks[:, 0] = 0  # [CLS] --> 0
        idx = torch.arange(0, last_hidden[0].shape[1], 1).to('cuda')
        x_seg_ind = x_seg_ids * idx
        x_att_ind = (x_atten_masks - x_seg_ids) * idx
        indices_seg = torch.argmax(x_seg_ind, 1, keepdim=True)
        indices_att = torch.argmax(x_att_ind, 1, keepdim=True)
        # for seg, seg_id, att, att_id in zip(x_seg_ids, indices_seg, x_atten_masks, indices_att):
        #     seg[seg_id] = 0  # 2nd [SEP] --> 0
        #     att[att_id:] = 0  # 1st [SEP] --> 0

        txt_l = x_atten_masks.sum(1).to('cuda')
        topic_l = x_seg_ids.sum(1).to('cuda')
        txt_vec = x_atten_masks.type(torch.FloatTensor).to('cuda')
        topic_vec = x_seg_ids.type(torch.FloatTensor).to('cuda')
        txt_mean = torch.einsum('blh,bl->bh', last_hidden[0], txt_vec) / txt_l.unsqueeze(1)
        topic_mean = torch.einsum('blh,bl->bh', last_hidden[0], topic_vec) / topic_l.unsqueeze(1)

        cat = torch.cat((txt_mean, topic_mean), dim=1)
        query = self.dropout(cat)
        linear = self.relu(self.linear(query))
        out = self.out(linear)

        return out, cat
