import torch
from torch.utils.data import Sampler
from typing import List, Iterator


class AccedingSequenceLengthBatchSampler(Sampler[List[int]]):
    def __init__(self, data: List[str], batch_size: int) -> None:
        self.data = data
        self.batch_size = batch_size
    def __len__(self) -> int:
        return (len(self.data) + self.batch_size - 1) // self.batch_size
    def __iter__(self) -> Iterator[List[int]]:
        sizes = torch.tensor([len(x) for x in self.data])
        for batch in torch.chunk(torch.argsort(sizes), len(self)):
            yield batch.tolist()


class StringDataset(torch.utils.data.Dataset):
    def __init__(self, strings):
        self.strings = strings

    def __getitem__(self, index):
        return self.strings[index]

    def __len__(self):
        return len(self.strings)
    
    
def my_collate_fn(batch):
    xmax_len = max(len(xstr) for xstr in batch)
    return [''.join([xstr[i] if i < len(xstr) else '_' for i in range(xmax_len)]) for xstr in batch]
        
    
if '__main__' == __name__:

    from PyCmpltrtok.common import sep

    BATCH_SIZE = 4

    data = [
        'Apple',
        'Banana',
        'Car',
        'Dog',
        "Dao",
        'Egg',
        'Fighter',
        'Good',
        'Hill',
        'Integrated',
        'Japan',
    ]
    print(data)

    sep('sizes')
    sizes = torch.Tensor([len(x) for x in data])
    print(sizes)
    
    sep('argsort')
    xsorted = torch.argsort(sizes)
    print(xsorted)
    xresult = xsorted.tolist()
    print(xresult)
    
    sep('dataloader with custom sampler')
    dataset = StringDataset(data)
    sampler = AccedingSequenceLengthBatchSampler(dataset, BATCH_SIZE)
    dataloader = torch.utils.data.DataLoader(dataset, batch_sampler=sampler)
    for epoch in range(2):
        for i, batch in enumerate(dataloader):
            print(epoch, i, batch)
            
    sep('dataloader with constructed sampler')
    dataloader = torch.utils.data.DataLoader(dataset, batch_size=BATCH_SIZE)
    for epoch in range(2):
        for i, batch in enumerate(dataloader):
            print(epoch, i, batch)
            
    sep('dataloader with constructed sampler with shuffle')
    dataloader = torch.utils.data.DataLoader(dataset, batch_size=BATCH_SIZE, shuffle=True)
    for epoch in range(2):
        for i, batch in enumerate(dataloader):
            print(epoch, i, batch)
            
    sep('With collate_fn')
    sep('With collate_fn')
    
    sep('dataloader with custom sampler')
    sampler = AccedingSequenceLengthBatchSampler(dataset, BATCH_SIZE)
    dataloader = torch.utils.data.DataLoader(dataset, batch_sampler=sampler, collate_fn=my_collate_fn)
    for epoch in range(2):
        for i, batch in enumerate(dataloader):
            print(epoch, i, batch)
            
    sep('dataloader with constructed sampler')
    dataloader = torch.utils.data.DataLoader(dataset, batch_size=BATCH_SIZE, collate_fn=my_collate_fn)
    for epoch in range(2):
        for i, batch in enumerate(dataloader):
            print(epoch, i, batch)
            
    sep('dataloader with constructed sampler with shuffle')
    dataloader = torch.utils.data.DataLoader(dataset, batch_size=BATCH_SIZE, shuffle=True, collate_fn=my_collate_fn)
    for epoch in range(2):
        for i, batch in enumerate(dataloader):
            print(epoch, i, batch)