from torch.utils.data import Dataset
import torch
import random
# 5. 数据集

class SequenceDataset(Dataset):
    def __init__(self, num_samples=10000, min_len=2, max_len=8, max_value=9):
        self.num_samples = num_samples
        self.min_len = min_len
        self.max_len = max_len
        self.max_value = max_value
        self.data = self.generate_data()
        
    def generate_data(self):
        data = []
        for _ in range(self.num_samples):
            seq_len = random.randint(self.min_len, self.max_len)
            seq = [random.randint(1, self.max_value) for _ in range(seq_len)]
            data.append(seq)
        return data
    
    def __len__(self):
        return self.num_samples
    
    def __getitem__(self, idx):
        sequence = self.data[idx]
        return torch.tensor(sequence, dtype=torch.long)

def preprocess_sequence(sequence, start_token=0, end_token=9):
    encoder_input = sequence.clone()
    decoder_input = torch.cat([torch.tensor([start_token]), sequence])
    target = torch.cat([sequence, torch.tensor([end_token])])
    return encoder_input, decoder_input, target


def collate_fn(batch, start_token=0, end_token=10, pad_token=11):
    processed = [preprocess_sequence(seq, start_token, end_token) for seq in batch]
    encoder_inputs, decoder_inputs, targets = zip(*processed)
    
    max_enc_len = max(len(seq) for seq in encoder_inputs)
    max_dec_len = max(len(seq) for seq in decoder_inputs)
    max_target_len = max(len(seq) for seq in targets)
    
    padded_enc = []
    padded_dec = []
    padded_targets = []
    
    for enc, dec, target in zip(encoder_inputs, decoder_inputs, targets):
        pad_enc = torch.cat([enc, torch.full((max_enc_len - len(enc),), pad_token, dtype=torch.long)])
        padded_enc.append(pad_enc)
        
        pad_dec = torch.cat([dec, torch.full((max_dec_len - len(dec),), pad_token, dtype=torch.long)])
        padded_dec.append(pad_dec)
        
        pad_target = torch.cat([target, torch.full((max_target_len - len(target),), pad_token, dtype=torch.long)])
        padded_targets.append(pad_target)
    
    return (
        torch.stack(padded_enc),  # 形状: (batch_size, max_enc_len)
        torch.stack(padded_dec),  # 形状: (batch_size, max_dec_len)
        torch.stack(padded_targets)  # 形状: (batch_size, max_target_len)
    )






if __name__ == '__main__':
    dataset = SequenceDataset()
    print(len(dataset))
    print(dataset[0])