import json
from abc import ABC

from torch.utils.data import Dataset
from constants import UTTERANCES, LABEL, DOMAIN_SLOT_TAG, VALUE_SLOT_TAG


def convert_labels_to_generative(labels):
    labels = sorted(labels)
    labels_for_gen = ''

    for l in labels:
        domain_slot, value = l.split('-')
        labels_for_gen += DOMAIN_SLOT_TAG + domain_slot + VALUE_SLOT_TAG + value

    return labels_for_gen


class TripleExtraDataset(Dataset, ABC):
    def __init__(self, args, f_name, tokenizer):
        super(TripleExtraDataset, self).__init__()
        self.args = args
        corpus = json.load(open(f_name, 'r', encoding='utf-8'))
        self.corpus = [window for doc in corpus for window in doc]
        self.tokenizer = tokenizer

    def __len__(self):
        return len(self.corpus)

    def __getitem__(self, index):
        example = self.corpus[index]
        utterances, labels = example[UTTERANCES], example[LABEL]
        utterances = "".join(utterances)
        batch = self.tokenizer(utterances,
                               max_length=self.args.max_source_length,
                               padding='max_length',
                               truncation=True,
                               return_tensors='pt')
        labels = convert_labels_to_generative(labels)
        labels = self.tokenizer(labels,
                                max_length=self.args.max_target_length,
                                padding='max_length',
                                truncation=True,
                                return_tensors='pt')['input_ids']

        return batch, labels
