from torch.utils.data import Dataset, DataLoader
from cio import JsonLReader
from torch.nn.utils.rnn import pad_sequence
from config.Config import BASE_DIR, PAD_TOKEN, BATCH_SIZE, DEVICE
from utils.vocabulary import big_vocab as vocab
import torch


class ConDataset(Dataset):
    def __getitem__(self, index):
        return self.jsonl[index]

    def __init__(self, mod='train'):
        super(ConDataset, self).__init__()
        if mod == 'train':
            self.jsonl = JsonLReader(BASE_DIR / 'resource' / 'data' / 'consum' / 'context_words_id.train.jsonl').read()
        elif mod == "test":
            self.jsonl = JsonLReader(BASE_DIR / 'resource' / 'data' / 'consum' / 'context_words_id.test.jsonl').read()
        else:
            raise ValueError("No train or test")

    def __len__(self):
        return len(self.jsonl)


def pad_collate(batch: list):
    methods = [torch.tensor(each['method'], dtype=torch.long, device=DEVICE) for each in batch]
    summaries = [torch.tensor(each['summary'], dtype=torch.long, device=DEVICE) for each in batch]  # 删掉了tgt中的BOS
    contexts = [torch.tensor(each['context'], dtype=torch.long, device=DEVICE) for each in batch]
    methods_len = [len(method) for method in methods]
    summaries_len = [len(summary) for summary in summaries]
    contexts_len = [len(context) for context in contexts]

    methods_pad = pad_sequence(methods, batch_first=True, padding_value=vocab[PAD_TOKEN])  # pad的输入是list[Tensor_]
    summaries_pad = pad_sequence(summaries, batch_first=True, padding_value=vocab[PAD_TOKEN])
    contexts_pad = pad_sequence(contexts, batch_first=True, padding_value=vocab[PAD_TOKEN])

    return {
        "methods": methods_pad,
        "summaries": summaries_pad,
        "contexts": contexts_pad,
        "src_methods_len": methods_len,
        "src_summaries_len": summaries_len,
        "src_contexts_len": contexts_len
    }


data_loader = DataLoader(ConDataset(mod='train'),
                         batch_size=BATCH_SIZE,
                         shuffle=True,
                         collate_fn=pad_collate,  # 一个batch内的长度要相同
                         drop_last=True)

test_loader = DataLoader(ConDataset(mod='test'),
                         batch_size=BATCH_SIZE,
                         shuffle=True,
                         collate_fn=pad_collate,  # 一个batch内的长度要相同
                         drop_last=True)
