"""
@Time: 2020/12/30 上午 10:26
@Author: jinzhuan
@File: bert_rex.py
@Desc: 
"""
import torch
import torch.nn as nn
import torch.nn.functional as f
from transformers import BertModel
from cognlp.models.base.base_function import BaseFunction


class Bert4RexFunction(BaseFunction):
    def __init__(self):
        super().__init__()

    def forward(
            self,
            batch=None,
    ):
        input_ids, attention_mask, segment_ids, head_indexes, relation_mentions, relation_mentions_mask = batch
        sequence_output = self.bert(input_ids=input_ids, attention_mask=attention_mask)[0]
        batch_size, max_len, feat_dim = sequence_output.shape
        for i in range(batch_size):
            sequence_output[i] = torch.index_select(sequence_output[i], 0, head_indexes[i])
        sequence_output = self.dropout(sequence_output)
        entity_pairs = []
        relation_labels = []
        for i in range(batch_size):
            valid_len = 0
            for j in range(max_len):
                if relation_mentions_mask[i][j].item() == 1:
                    valid_len += 1
                else:
                    break
            for j in range(valid_len):
                relation_mention = relation_mentions[i][j]
                subject_entity = sequence_output[i][relation_mention[0].item():relation_mention[1].item()]
                subject_entity = torch.mean(subject_entity, dim=0)
                object_entity = sequence_output[i][relation_mention[2].item():relation_mention[3].item()]
                object_entity = torch.mean(object_entity, dim=0)
                object_subject = torch.cat([subject_entity, object_entity], 0)
                entity_pairs.append(object_subject)
                relation_labels.append(relation_mention[4].item())
        entity_pairs = torch.stack(tuple(entity_pairs), dim=0)
        prediction = self.classifier(entity_pairs)
        golden_labels = torch.LongTensor(relation_labels).to(self.device)
        return prediction, golden_labels

    def predict(
            self,
            batch=None,
    ):
        input_ids, attention_mask, segment_ids, head_indexes, relation_mentions, relation_mentions_mask = batch
        prediction, golden_labels = self.forward(batch)
        prediction_labels = torch.argmax(f.log_softmax(prediction, dim=1), dim=1)
        return prediction_labels

    def loss(
            self,
            batch=None,
            loss_function=None,
    ):
        input_ids, attention_mask, segment_ids, head_indexes, relation_mentions, relation_mentions_mask = batch
        prediction, golden_labels = self.forward(batch)
        prediction_labels = torch.argmax(f.log_softmax(prediction, dim=1), dim=1)
        loss = loss_function(prediction, golden_labels)
        return loss

    def evaluate(
            self,
            batch=None,
            metrics=None,
    ):
        input_ids, attention_mask, segment_ids, head_indexes, relation_mentions, relation_mentions_mask = batch
        prediction, golden_labels = self.forward(batch)
        prediction_labels = torch.argmax(f.log_softmax(prediction, dim=1), dim=1)
        metrics.evaluate(prediction_labels, golden_labels)


class Bert4Rex(Bert4RexFunction, nn.Module):
    def __init__(
            self,
            vocabulary,
            embedding_size=768 * 2,
            hidden_dropout_prob=0.1,
            bert_model='bert-base-cased',
            device=torch.device("cuda"),
    ):
        super().__init__()
        self.vocabulary = vocabulary
        self.embedding_size = embedding_size
        self.label_size = len(self.vocabulary)
        self.hidden_dropout_prob = hidden_dropout_prob
        self.device = device
        self.bert = BertModel.from_pretrained(bert_model)
        self.classifier = nn.Linear(self.embedding_size, self.label_size)
        self.dropout = nn.Dropout(self.hidden_dropout_prob)


class Bert4RexParallel(nn.DataParallel, Bert4RexFunction):

    def __init__(self, module, device_ids):
        nn.DataParallel.__init__(self, module=module, device_ids=device_ids)
        self.label_size = self.module.label_size
        self.device = self.module.device
