from dataset.wrapper import CLUENERWrapper
train_data = CLUENERWrapper()
val_data = CLUENERWrapper(data_type="dev")

from transformers import BertTokenizerFast
toeknizer = BertTokenizerFast.from_pretrained("pretrained/chinese-bert-wwm")

from dataset.dataset import CRFDataset
train_dataset = CRFDataset(toeknizer, train_data, 256)
val_dataset = CRFDataset(toeknizer, val_data, 256)

from torch.utils.data.dataloader import DataLoader
train_dataloader = DataLoader(train_dataset, 64, shuffle=True, collate_fn=CRFDataset.collect_function)
val_dataloader = DataLoader(val_dataset, 1, shuffle=False, collate_fn=CRFDataset.collect_function)

from transformers import BertModel
bert = BertModel.from_pretrained("pretrained/chinese-bert-wwm")
from model.model import BertLstmCrf
model = BertLstmCrf(len(train_data.id2type), bert)

from trainer.trainer import CrfTrainer
trainer = CrfTrainer(model, train_data.id2type, train_dataloader, val_dataloader)

import wandb
logger = wandb.init(project="LSTM-CRF-NER", name="202211291402", config={"batch_size":64, "lr":1e-5, "epoch":100})

from utils.saver import BestCheckpointSaver
saver = BestCheckpointSaver()
for e in range(100):
    train_metrics = trainer.train(e)
    val_metrics = trainer.validate(e)
    logger.log({
        "train_loss":train_metrics["loss"],
        "train_precison":train_metrics["precision"],
        "train_recall":train_metrics["recall"],
        "train_f1":train_metrics["f1-score"],
        "val_loss":val_metrics["loss"],
        "val_precison":val_metrics["precision"],
        "val_recall":val_metrics["recall"],
        "val_f1":val_metrics["f1-score"],
    })
    saver.update(e, trainer, val_metrics["f1-score"])
    