#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @File  : test.py
# @Author: Richard Chiming Xu
# @Date  : 2024/3/3
# @Desc  :
from loguru import logger


class Config:
    # 数据加载部分
    max_seq_len = 512  # 句子长度
    train_data = 'data/simbert_训练集.csv'
    val_data = 'data/simbert_验证集.csv'
    test_data = 'data/rag_相似度测试集.csv'
    model_savedir = 'result/model'
    # 模型部分
    model_path = '/home/jxbd/python/bertmodel/bertmodel/hfl/chinese-roberta-wwm-ext'  # 本地模型路径
    tokenizer = None  # tokenizer对象
    load_model = False  # 是否加载已有模型预测
    save_model = True  # 是否保存训练好的模型
    # 训练部分
    device = 'cpu'
    learning_rate = 1e-5
    batch_size = 8  # batch大小
    epochs = 3  # 训练次数
    print_loss = 200  # 打印loss次数
    num_labels = 2  # 分类数


from transformers import  AutoModel, AutoConfig, RobertaForSequenceClassification
import torch
from torch import nn


class ClassificationHead(nn.Module):

    def __init__(self, bert_config):
        super().__init__()
        self.dense = nn.Linear(bert_config.hidden_size * 2, bert_config.hidden_size)
        classifier_dropout = (
            bert_config.classifier_dropout if bert_config.classifier_dropout is not None else bert_config.hidden_dropout_prob
        )
        self.dropout = nn.Dropout(classifier_dropout)
        self.out_proj = nn.Linear(bert_config.hidden_size, bert_config.num_labels)

    def forward(self, features, **kwargs):
        x = features[:, 0, :]  # take <s> token (equiv. to [CLS])
        logger.info(x.size())
        x = self.dropout(x)
        x = self.dense(x)
        x = torch.tanh(x)
        x = self.dropout(x)
        x = self.out_proj(x)
        return x


class SimBert(nn.Module):
    def __init__(self, config: Config, num_labels: int = 2):
        super(SimBert, self).__init__()
        # 定义双塔模型的bert
        bert_config = AutoConfig.from_pretrained(config.model_path)
        bert_config.num_labels = num_labels
        self.support_bert = AutoModel.from_pretrained(config.model_path)
        self.query_bert = AutoModel.from_pretrained(config.model_path)
        # 定义分类器
        self.classifier = ClassificationHead(bert_config)

    def forward(self, input_ids_a, attention_mask_a, input_ids_b, attention_mask_b, label=None):
        # bert推理
        outputs_a = self.support_bert(input_ids=input_ids_a, attention_mask=attention_mask_a)
        outputs_b = self.support_bert(input_ids=input_ids_b, attention_mask=attention_mask_b)
        # 提取信息
        sequence_output_a = outputs_a[0]
        sequence_output_b = outputs_b[0]
        # 合并
        sequence_output = torch.concat([sequence_output_a, sequence_output_b], dim=2)
        logger.info(sequence_output_a.size())
        logger.info(sequence_output.size())
        # 分类
        logits = self.classifier(sequence_output)
        # 计算loss
        loss = None
        if label is not None:
            loss_fct = nn.CrossEntropyLoss()
            loss = loss_fct(logits.view(-1, self.num_labels), label.view(-1))
        return logits, loss


