"""
@Time: 2020/12/4 下午 4:45
@Author: jinzhuan
@File: ner_tester_test.py
@Desc: 
"""
import sys

sys.path.append('/data/zhuoran/code/cognlp')

from cognlp.core.trainer import Tester
import torch
import torch.nn as nn
import torch.optim as optim
import torch.optim.lr_scheduler as lr_scheduler
from torch.utils.data import RandomSampler
from transformers import BertTokenizer

from cognlp.io.loader.ner import Conll2003NERLoader
from cognlp.io.processor.ner.conll2003 import NERProcessor
from cognlp.core.metrics import SpanFPreRecMetric
from cognlp.core.dataset import NerDataset
from cognlp.models.ner.bert_ner import Bert4Ner
from cognlp.config.ner.conll2003 import args

if __name__ == '__main__':
    torch.cuda.set_device(4)
    device = torch.device("cuda")
    loader = Conll2003NERLoader()
    sentence = ['But', 'China', 'saw', 'their', 'luck', 'desert', '.']
    label = ['O'] * len(sentence)
    dev_set = {'sentence': [sentence], 'label': [label]}
    # train_set, dev_set, test_set = loader.load(args.input_dir)
    tokenizer = BertTokenizer.from_pretrained(args.bert_model)
    process = NERProcessor(path="../data/ner/conll2003/data")

    dev_data = process.process(dev_set, device=device)
    dev_data = NerDataset(dev_data)
    dev_sampler = RandomSampler(dev_data)

    model = Bert4Ner(len(process.vocabulary), device=device)
    metric = SpanFPreRecMetric(tag_vocab=process.vocabulary)
    loss = nn.CrossEntropyLoss(ignore_index=0)
    optimizer = optim.Adam(model.parameters(), lr=args.learning_rate)
    scheduler = lr_scheduler.MultiStepLR(optimizer, milestones=[3, 4], gamma=0.9)

    tester = Tester(model, model_path='../data/ner/conll2003/model/2020-12-04-18:10:39-model.pkl', batch_size=1, sampler=dev_sampler,
                     drop_last=False, num_workers=0, print_every=1000,
                     dev_data=dev_data, metrics=metric, metric_key=None, use_tqdm=True, device=None,
                     callbacks=None, check_code_level=0, device_ids=[4, 5, 6])
    tester.test()

