import math,torch
from torch.utils.data import IterableDataset,DataLoader,TensorDataset,Dataset


class MyIterableDataset(torch.utils.data.IterableDataset):
    def __init__(self, start, end):
        super(MyIterableDataset).__init__()
        assert end > start, "this example code only works with end >= start"
        self.start = start
        self.end = end
    def __iter__(self):
        worker_info = torch.utils.data.get_worker_info()
        if worker_info is None:  # single-process data loading, return the full iterator
            iter_start = self.start
            iter_end = self.end
        else:  # in a worker process
            # split workload
            per_worker = int(math.ceil((self.end - self.start) / float(worker_info.num_workers)))
            worker_id = worker_info.id
            iter_start = self.start + worker_id * per_worker
            iter_end = min(iter_start + per_worker, self.end)
        s = torch.randint(0,10,(10,5))
        # return iter((s,s))
        # return iter(range(iter_start, iter_end))
        return iter(torch.randint(0,10,(10,5)))
    
    
# # should give same set of data as range(3, 7), i.e., [3, 4, 5, 6].
# ds = MyIterableDataset(start=3, end=7)
# for y in ds:
#     print(y)
# # Single-process loading
# s=DataLoader(ds, num_workers=0,batch_size=4,shuffle=False)
# for _ in s:
#     print(_)
#     break

'''
使用多种方式 创建一个相同数据集


'''

# 方式1  IterableDataset


class MyIterableDataset(IterableDataset):
    def __init__(self, start, end):
        super(MyIterableDataset).__init__()
        assert end > start, "this example code only works with end >= start"
        self.start = start
        self.end = end
    def __iter__(self):
        worker_info = torch.utils.data.get_worker_info()
        if worker_info is None:  # single-process data loading, return the full iterator
            iter_start = self.start
            iter_end = self.end
        else:  # in a worker process
            # split workload
            per_worker = int(math.ceil((self.end - self.start) / float(worker_info.num_workers)))
            worker_id = worker_info.id
            iter_start = self.start + worker_id * per_worker
            iter_end = min(iter_start + per_worker, self.end)
        s = torch.randint(0,10,(10,5))
        # return iter((s,s))
        # return iter(range(iter_start, iter_end))
        return iter(torch.randint(0,10,(10,5)))
    
    
# 方式2 Dataset
class MyDataset(Dataset):
    def __init__(self):
        self.data = torch.randint(0,10,(10,5))
        # self.labels = torch.randint(0, 2, (10,))

    def __len__(self):
        return len(self.data)

    def __getitem__(self, idx):
        return self.data[idx], self.data[idx]



# 方式3： [(),()] 形式
d = torch.randint(0,10,(10,5))
# d = [(_,_) for _ in range(10)]
MyTensorDataset = TensorDataset(d,d)


dataset1 = [(torch.randn(size=(12,)),3) for _ in range(10)]
# dataset = [(np.random.randn(12,),3) for _ in range(10)]



if __name__ == '__main__':
    
    # dataset = MyDataset()
    # dataloader = DataLoader(dataset, batch_size=2, shuffle=True, num_workers=0)
    # for i, (data, label) in enumerate(dataloader):
    #     print(data, label)
    #     print(data.shape, label.shape)
    

    # dataloader = DataLoader(MyTensorDataset, batch_size=2, shuffle=True, num_workers=0)
    # for i, (data, label) in enumerate(dataloader):
    #     print(data, label)
    #     print(data.shape, label.shape)
    
    
    # dataloader = DataLoader((d,d), batch_size=2, shuffle=True, num_workers=0)
    dataloader = DataLoader(dataset1, batch_size=2, shuffle=True, num_workers=0)
    for i, (data, label) in enumerate(dataloader):
        print(data, label)
        print(data.shape, label.shape)
        
    