

import torch
from data_util.data_generate import Dataset
from data_util.data_pro import get_data
# from model.bert_atten import Bert_Attention
from train import evaluate
from torch import nn
# from torch.nn import CrossEntropyLoss
# from transformers import AdamW, get_linear_schedule_with_warmup
from model.longformer_model import LongForm
import model.config as conf
from torch.utils.data import DataLoader
from model.bigbird import BigBird
import matplotlib.pyplot as plt
# bert_model = BertModel.from_pretested('bert_base_chinese')
# config = BertConfig.from_pretested('bert_base_chinese')
# tokenizer = BertTokenizer.from_pretested('bert_base_chinese')
# def print_hi(name):
#     # 在下面的代码行中使用断点来调试脚本。
#     print(f'Hi, {name}')  # 按 Ctrl+F8 切换断点。
#
# 按间距中的绿色按钮以运行脚本。
if __name__ == '__main__':
    # print_hi('PyCharm')
    test_list = get_data(conf.test_path)
    test_dataset = Dataset(test_list)
    test_dataloader = DataLoader(test_dataset, batch_size=conf.batch_size, shuffle=True)

    device = 'cuda' if torch.cuda.is_available() else 'cpu'
    # model = Bert_Attention()
    # lf_model = LongForm(n_model=conf.n_model, cls_nu=len(conf.cls))
    # bigbird_model = torch.load('public_checkpoint/5_0.4796380090497738.pt')
    bigbird_model = BigBird(n_model=conf.n_model, cls_nu=len(conf.cls))
    bigbird_model.load_state_dict(torch.load('bigbird_checkpoint/7_0.5452488687782805.pt'))
    bigbird_model.to(device)
    criterion = nn.CrossEntropyLoss()

    optimizer = torch.optim.Adam(bigbird_model.parameters(), lr=conf.lr)
    # lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, 1, gamma=0.95)

    # test(bigbird_model, test_dataloader, valid_dataloader, device, criterion, lr_scheduler, optimizer)
    evaluate(bigbird_model, test_dataloader,  device)

