import torch
from torch.utils.data import Sampler
from typing import List, Iterator


class AccedingSequenceLengthSampler(Sampler[int]):
    def __init__(self, data: List[str]) -> None:
        self.data = data
    def __len__(self) -> int:
        return len(self.data)
    def __iter__(self) -> Iterator[int]:
        sizes = torch.tensor([len(x) for x in self.data])
        yield from torch.argsort(sizes).tolist()[::-1]


class StringDataset(torch.utils.data.Dataset):
    def __init__(self, strings):
        self.strings = strings

    def __getitem__(self, index):
        return self.strings[index]

    def __len__(self):
        return len(self.strings)
    
    
if '__main__' == __name__:

    from PyCmpltrtok.common import sep
    import numpy as np
    
    np.random.seed(666)

    data = [
        'Apple',
        'Banana',
        'Car',
        'Dog',
        "Dao",
        'Egg',
        'Fighter',
        'Good',
        'Hill',
        'Integrated',
        'Japan',
    ]
    print(data)
    data_np = np.array(data, dtype=object)
    print(data_np)
    if 0:
        rnd_ind = np.random.permutation(range(len(data_np)))
        print(rnd_ind)
        data_np = data_np[rnd_ind]
        print(data_np)

    sep('sizes')
    sizes = torch.Tensor([len(x) for x in data])
    print(sizes)
    
    sep('argsort')
    xsorted = torch.argsort(sizes)
    print(xsorted)
    xresult = xsorted.tolist()
    print(xresult)
    
    sep('dataloader with custom sampler')
    dataset = StringDataset(data)
    sampler = AccedingSequenceLengthSampler(dataset)
    sep('individual sample')
    dataloader = torch.utils.data.DataLoader(dataset, sampler=sampler, batch_size=None)
    for epoch in range(2):
        for i, batch in enumerate(dataloader):
            print(epoch, i, batch)
    sep('batch_size default to 1 (automatic batching)')
    dataloader = torch.utils.data.DataLoader(dataset, sampler=sampler)
    for epoch in range(2):
        for i, batch in enumerate(dataloader):
            print(epoch, i, batch)
            
    sep('dataloader with constructed sampler')
    sep('individual sample')
    dataloader = torch.utils.data.DataLoader(dataset, batch_size=None)
    for epoch in range(2):
        for i, batch in enumerate(dataloader):
            print(epoch, i, batch)
    sep('batch_size default to 1 (automatic batching)')
    dataloader = torch.utils.data.DataLoader(dataset)
    for epoch in range(2):
        for i, batch in enumerate(dataloader):
            print(epoch, i, batch)
            
    sep('dataloader with constructed sampler with shuffle')
    sep('individual sample')
    dataloader = torch.utils.data.DataLoader(dataset, shuffle=True, batch_size=None)
    for epoch in range(2):
        for i, batch in enumerate(dataloader):
            print(epoch, i, batch)
    sep('batch_size default to 1 (automatic batching)')
    dataloader = torch.utils.data.DataLoader(dataset, shuffle=True)
    for epoch in range(2):
        for i, batch in enumerate(dataloader):
            print(epoch, i, batch)
            