import torch
from torch.utils.data import Dataset, DataLoader
from torch.nn.utils.rnn import pad_sequence
from torch import nn

class TextClassificationDataset(Dataset):
    def __init__(self, data, labels, padding_idx, max_size):
        self.data = data
        self.labels = labels
        self.padding_idx = padding_idx
        self.max_size = max_size

    def __len__(self):
        return len(self.data)

    def __getitem__(self, idx):
        text = torch.tensor(self.data[idx], dtype=torch.long)
        if len(text) > self.max_size:
            text = text[:self.max_size]
            
        if self.labels is not None:
            label = self.labels[idx]
            return text, label
        return text

    def collate_fn(self, batch):
        if self.labels is not None:
            texts = [item[0] for item in batch]
        else:
            texts = batch

        padded_texts = pad_sequence(texts, batch_first=True, padding_value=self.padding_idx)
        if self.labels is not None:
            labels = torch.tensor([item[1] for item in batch], dtype=torch.long)
            return padded_texts, labels
        else:
            return padded_texts
        

def build_dataloader(data, labels, batch_size, num_workers, padding_idx, max_size):
    if labels is None:
        shuffle = False
    else:
        shuffle = True
    
    dataset = TextClassificationDataset(data, labels, padding_idx, max_size)
    loader = DataLoader(dataset, batch_size=batch_size, num_workers=num_workers, pin_memory=True, shuffle=shuffle, collate_fn=dataset.collate_fn)
    return loader


