import numpy as np
import torch
from torch import nn
from transformers import BertModel, BertTokenizer
from torch.utils.tensorboard import SummaryWriter
import warnings
warnings.filterwarnings("ignore")

class Classifier(nn.Module):

    def __init__(self, bert):
        super(Classifier, self).__init__()
        self.bert = bert
        self.fc1 = nn.Linear(768, 512)
        self.fc2 = nn.Linear(512, 2)
        self.dropout = nn.Dropout(0.1)
        self.relu = nn.ReLU()
        self.softmax = nn.LogSoftmax(dim=1)

    def forward(self, sent_id, mask):
        all_hidden_states, cls_hidden_state = self.bert(sent_id, attention_mask=mask, return_dict=False)
        x = self.fc1(cls_hidden_state)
        x = self.relu(x)
        x = self.dropout(x)
        x = self.fc2(x)
        x = self.softmax(x)

        return x


if __name__ == '__main__':
    writer = SummaryWriter('../logs')
    model_name = '../bert-base-chinese'
    model_path = '../bert-base-chinese'
    bert_model = BertModel.from_pretrained(model_path)
    model = Classifier(bert_model)
    tokenizer = BertTokenizer.from_pretrained(model_name)
    text = "你无敌了[PAD],我没事,怎么撒s[SEP] ， 。 看得见我吗[PAD]"
    sent_id = tokenizer.encode(text,
                               add_special_tokens=True,
                               # 添加 [CLS] 和 [SEP] 标记
                               truncation=True,
                               # 指定序列的最大长度
                               max_length=100
                               )
    att_mask = [int(tok > 0) for tok in sent_id]
    # 打印整数序列
    print("整数序列: {}".format(sent_id))
    # 将整数转换回文本
    print("标记化文本:", tokenizer.convert_ids_to_tokens(sent_id))
    decoded = tokenizer.decode(sent_id)
    print("解码字符串: {}".format(decoded))
    print("注意力掩码:", att_mask)
    # 将列表转换为张量
    sent_id = torch.tensor(sent_id)
    att_mask = torch.tensor(att_mask)
    # 将张量调整为（批量大小，文本长度）的形式
    # unsqueeze增加一个维度
    # squeeze删掉一个维度
    sent_id = sent_id.unsqueeze(0)
    att_mask = att_mask.unsqueeze(0)
    print("sent_id.shape():",sent_id.shape)
    print("att_mask.shape():",att_mask.shape)
    print(model.forward(sent_id,att_mask))

    writer.add_graph(model,input_to_model=[sent_id,att_mask])
    outputs = bert_model(sent_id, attention_mask=att_mask, return_dict=False)
    hidden_states = outputs[0]
    # outputs[1]对应于第一个令牌（通常是分类标记[CLS]）的隐藏状态
    CLS_hidden_state = outputs[1]
    print("outputs:", len(outputs))
    print("outputs[0].shape:", outputs[0].shape)
    print("outputs[1]--[CLS]shape:", outputs[1].shape)

    print("----------------计算模型参数------------------")
    total_params = sum(p.numel() for p in model.parameters())
    print("Total parameters:", total_params)

