# 这是一个示例 Python 脚本。

# 按 Shift+F10 执行或将其替换为您的代码。
# 按 双击 Shift 在所有地方搜索类、文件、工具窗口、操作和设置。
import random

# from transformers import BertModel, BertConfig, BertTokenizer
import torch
from data_util.data_generate import Dataset
from data_util.data_pro import get_data
# from model.bert_atten import Bert_Attention
from train import train
from torch import nn
import torch.nn.functional as F
# from torch.nn import CrossEntropyLoss
# from transformers import AdamW, get_linear_schedule_with_warmup
from model.longformer_model import LongForm
import model.config as conf
from torch.utils.data import DataLoader
from model.bigbird import BigBird
import matplotlib
matplotlib.use('Agg')  # 替换为你希望使用的后端
import matplotlib.pyplot as plt
import os
# bert_model = BertModel.from_pretrained('bert_base_chinese')
# config = BertConfig.from_pretrained('bert_base_chinese')
# tokenizer = BertTokenizer.from_pretrained('bert_base_chinese')
# def print_hi(name):
#     # 在下面的代码行中使用断点来调试脚本。
#     print(f'Hi, {name}')  # 按 Ctrl+F8 切换断点。
#



class FocalLoss(nn.Module):
    def __init__(self, alpha=1, gamma=2, reduction='mean'):
        super(FocalLoss, self).__init__()
        self.alpha = alpha
        self.gamma = gamma
        self.reduction = reduction

    def forward(self, inputs, targets):
        ce_loss = F.cross_entropy(inputs, targets, reduction='none')
        pt = torch.exp(-ce_loss)
        focal_loss = self.alpha * (1 - pt) ** self.gamma * ce_loss

        if self.reduction == 'mean':
            return focal_loss.mean()
        elif self.reduction == 'sum':
            return focal_loss.sum()
        else:
            return focal_loss



# 按间距中的绿色按钮以运行脚本。
if __name__ == '__main__':
    # print_hi('PyCharm')
    torch.manual_seed(123)
    train_df, train_list = get_data(conf.train_path)
    valid_df, valid_list = get_data(conf.valid_path)
    train_dataset = Dataset(train_list)
    valid_dataset = Dataset(valid_list)
    train_dataloader = DataLoader(train_dataset, batch_size=conf.batch_size, shuffle=True)
    valid_dataloader = DataLoader(valid_dataset, batch_size=conf.batch_size, shuffle=True)

    device = 'cuda' if torch.cuda.is_available() else 'cpu'
    # device = 'cpu'
    # model = Bert_Attention()
    # lf_model = LongForm(n_model=conf.n_model, cls_nu=len(conf.cls))
    bigbird_model = BigBird(n_model=conf.n_model, cls_nu=len(conf.cls))
    bigbird_model.to(device)
    # 计算权重比例的倒数
    # inner_weight = len(train_df) / (len(conf.cls) * len(train_df[train_df["label"]=="inner"]))
    # yes_weight = len(train_df) / (len(conf.cls) * len(train_df[train_df["label"]=="yes"]))
    # no_weight = len(train_df) / (len(conf.cls) * len(train_df[train_df["label"]=="no"]))

    # weight = torch.tensor([inner_weight, yes_weight, no_weight]).to(device)
    # criterion = nn.CrossEntropyLoss()  # 原有的loss函数
    pos_weight = torch.tensor([8.0]).to(device)  # 设置正样本的权重为2
    criterion = nn.BCEWithLogitsLoss(pos_weight=pos_weight)
    # criterion = FocalLoss()

    optimizer = torch.optim.Adam(bigbird_model.parameters(), lr=conf.lr)
    # lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, 1, gamma=0.95)
    if not os.path.exists(conf.checkpoint):
        os.makedirs(conf.checkpoint)
    # train(bigbird_model, train_dataloader, valid_dataloader, device, criterion, lr_scheduler, optimizer)
    train_loss_list, valid_loss_list, acc_pred_list = train(bigbird_model, train_dataloader, valid_dataloader, device, criterion, optimizer)
    x_list = range(conf.epochs)
    # plt.rcParams['font.sans-serif'] = ['SimHei']
    plt.title('Result')
    plt.plot(x_list, train_loss_list, label='train loss')
    plt.plot(x_list, valid_loss_list, label='valid loss')
    plt.plot(x_list, acc_pred_list, label='acc')
    plt.legend(loc='best')
    plt.savefig('image/grid_3.png')

    pass
# 访问 https://www.jetbrains.com/help/pycharm/ 获取 PyCharm 帮助
