import torch

from common.ner_tools import convert_examples_to_features, format_list_line
from database.ner.cner.interface import get_data
from torch.utils.data import Dataset, DataLoader
from functools import partial


def get_dataset():
    return CnerDataset, CnerDataLoader


class CnerDataset(Dataset):
    def __init__(self, config, mode, tokenizer, **kwargs):
        self.data_type = mode
        self.label_list = self.get_labels()
        self.max_seq_length = config.train_max_seq_length if mode == 'train' \
            else config.eval_max_seq_length
        label_list = self.get_labels()
        conver_tensor = partial(convert_examples_to_features, label_list=label_list, tokenizer=tokenizer,
                                max_seq_length=self.max_seq_length)
        callback = partial(format_list_line, callback=conver_tensor)
        self.data = get_data(mode, callback)
        self.num_labels = len(self.label_list)
        self.id2label = {i: label for i, label in enumerate(self.label_list)}
        self.label2id = {label: i for i, label in enumerate(self.label_list)}
        self.markup = "bio"  # bios or bio, s represent single

    @classmethod
    def get_labels(cls):
        # TODO:X是什么?pad占位符?
        return ["X", 'B-CONT', 'B-EDU', 'B-LOC', 'B-NAME', 'B-ORG', 'B-PRO', 'B-RACE', 'B-TITLE',
                'I-CONT', 'I-EDU', 'I-LOC', 'I-NAME', 'I-ORG', 'I-PRO', 'I-RACE', 'I-TITLE',
                'O']

    def __getitem__(self, index):
        f = self.data[index]
        all_input_ids = torch.tensor(f.input_ids, dtype=torch.long)
        all_input_mask = torch.tensor(f.input_mask, dtype=torch.long)
        all_segment_ids = torch.tensor(f.segment_ids, dtype=torch.long)
        all_label_ids = torch.tensor(f.label_ids, dtype=torch.long)
        all_lens = torch.tensor(f.input_len, dtype=torch.long)
        return all_input_ids, all_input_mask, all_segment_ids, all_lens, all_label_ids

    def __len__(self):
        return len(self.data)

    @classmethod
    def collate_fn(cls, batch):
        """
        batch should be a list of (sequence, target, length) tuples...
        Returns a padded tensor of sequences sorted from longest to shortest,
        """
        all_input_ids, all_attention_mask, all_token_type_ids, all_lens, all_labels = map(torch.stack, zip(*batch))
        max_len = max(all_lens).item()
        all_input_ids = all_input_ids[:, :max_len]
        all_attention_mask = all_attention_mask[:, :max_len]
        all_token_type_ids = all_token_type_ids[:, :max_len]
        all_labels = all_labels[:, :max_len]
        return all_input_ids, all_attention_mask, all_token_type_ids, all_labels, all_lens


class CnerDataLoader(DataLoader):
    def __init__(self, dataset, batch_size, num_workers=0, shuffle=True):
        super(CnerDataLoader, self).__init__(dataset, batch_size=batch_size, shuffle=shuffle,
                                               collate_fn=collate_fn, num_workers=num_workers)


def collate_fn(batch):
    """
    batch should be a list of (sequence, target, length) tuples...
    Returns a padded tensor of sequences sorted from longest to shortest,
    """
    all_input_ids, all_attention_mask, all_token_type_ids, all_lens, all_labels = map(torch.stack, zip(*batch))
    max_len = max(all_lens).item()
    all_input_ids = all_input_ids[:, :max_len]
    all_attention_mask = all_attention_mask[:, :max_len]
    all_token_type_ids = all_token_type_ids[:, :max_len]
    all_labels = all_labels[:, :max_len]
    return all_input_ids, all_attention_mask, all_token_type_ids, all_labels, all_lens
