import torchvision
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter

train_dataset = torchvision.datasets.CIFAR10(root='./CIFAR10',
                                             transform=torchvision.transforms.ToTensor(),
                                             train=True,
                                             download=True)

data0, label = train_dataset[0]
print(data0.shape, label)
"""
早期版本的torch, num_workers 不取0时，在windows上有可能会报错
drop_last: 在不能取到完整的batch时（即数据集的数量除不尽时），不进行读取
shuffle: 是否随机取数
"""
dataloader = DataLoader(train_dataset, batch_size=64, shuffle=False, num_workers=0, drop_last=False)

writer = SummaryWriter('../logs')

for epoch in range(2):
    idx = 0
    for data in dataloader:
        data_, label_ = data
        if idx > 50:
            break
        print(type(data_), data_.shape, label_)
        writer.add_images("Epoch:{}".format(epoch), data_, idx)
        idx += 1
