import datasets
import torch
import torch.nn as nn
from transformers import BertModel

EVAL_BATCH = 128
EVAL_EPOCH = 2000
CRITERION = nn.CrossEntropyLoss()

# repair REPOCARD_FILENAME
if not hasattr(datasets.config, 'REPOCARD_FILENAME'):
    datasets.config.REPOCARD_FILENAME = 'README.md'

from ClassifyModel import ClassifyModel
import configs
import data

bert: BertModel = BertModel.from_pretrained(configs.PRETRAINED_BERT_PATH)
model = ClassifyModel(
    bert,
    configs.lstm_hidden_size,
    configs.lstm_n_layers,
    configs.stat_n_features,
    configs.fusion_n_out,
    configs.classifier_dropout,
    configs.out_n_classes
)
model.load_custom_dict(torch.load(configs.eval_model_path))
model.eval()

testDs = configs.getEvalDataset()
dataloader = data.getLoader(testDs, EVAL_BATCH)
print(len(testDs), len(testDs) / EVAL_BATCH)
loss_rec = []
epc = 1
model.eval()  # 设置评估模式

total_tp, total_fp, total_tn, total_fn = 0, 0, 0, 0

with torch.no_grad():
    for batch in dataloader:
        if epc > EVAL_EPOCH:
            break

        input_ids, attention_mask, stats, labels = batch[0], batch[1], batch[2], batch[3]

        outputs = model(input_ids, attention_mask, stats)

        # Record Loss
        loss = CRITERION(outputs, labels)
        loss_rec.append(loss.item())

        _, preds = torch.max(outputs, 1)
        _, target_labels = torch.max(labels, 1)

        # 混淆矩阵
        tp = ((preds == 1) & (target_labels == 1)).sum().item()
        fp = ((preds == 1) & (target_labels == 0)).sum().item()
        tn = ((preds == 0) & (target_labels == 0)).sum().item()
        fn = ((preds == 0) & (target_labels == 1)).sum().item()

        total_tp += tp
        total_fp += fp
        total_tn += tn
        total_fn += fn

        epc += 1


total_samples = total_tp + total_fp + total_tn + total_fn
accuracy = (total_tp + total_tn) / total_samples

precision = total_tp / (total_tp + total_fp + 1e-10)
recall = total_tp / (total_tp + total_fn + 1e-10)

res = {
    "accuracy": accuracy,
    "precision": precision,
    "recall": recall,
    "confusion_matrix": (total_tp, total_fp, total_tn, total_fn)
}

print("sum and avg:", sum(loss_rec), sum(loss_rec) / len(loss_rec))
print(res)
