from torch.utils.data import DataLoader
from torch.utils.data.dataloader import default_collate

from .registry import DATALOADERS
from .base import BaseDataloader


@DATALOADERS.register_module
class BatchBalanceDataloader(BaseDataloader):
    def __init__(self, dataset, batch_size, each_batch_ratio, each_usage, shuffle=False, num_workers=4, pin_memory=True,
                 sampler=None, batch_sampler=None, collate_fn=default_collate, drop_last=False, time_out=0,
                 worker_init_fn=None):
        assert isinstance(each_batch_ratio, list)
        assert isinstance(each_usage, list)
        assert len(dataset) == len(each_batch_ratio) == len(each_usage)

        super(BatchBalanceDataloader, self).__init__(dataset=dataset,
                                                     batch_size=batch_size,
                                                     num_workers=num_workers,
                                                     shuffle=shuffle,
                                                     each_usage=each_usage,
                                                     )

        for i, br in enumerate(each_batch_ratio):
            current_datasets = self.dataset[i]
            current_batchsize = max(round(batch_size * float(br)), 1)
            _dataloader = DataLoader(
                current_datasets, batch_size=current_batchsize,
                shuffle=shuffle, num_workers=num_workers,
                pin_memory=pin_memory, sampler=sampler, batch_sampler=batch_sampler,
                collate_fn=collate_fn, drop_last=drop_last,
                timeout=time_out, worker_init_fn=worker_init_fn,
            )
            self.data_loader_list.append(_dataloader)
            self.dataloader_iter_list.append(iter(_dataloader))
