import torch 
from torch.utils.data import DataLoader, Dataset, TensorDataset

## 使用 MyDataset 除了方便 还有一个更大的好处。 避免内存爆炸 
class MyDataset(Dataset):
    def __init__(self, sents_src, sents_tgt):
        super(MyDataset, self).__init__()
        self.sents_src = sents_src
        self.sents_tgt = sents_tgt

    def __getitem__(self, i):
        src = self.sents_src[i]
        tgt = self.sents_tgt[i]
        return src, tgt

        ## 此时可以不传入所有数据 而是传入所有数据的url
        # src_path = src_url[i]
        # tgt_path = tgt_url[i]

        # return src, tgt

    def __len__(self):
        return len(self.sents_src)

def collect_fn(batch):
    print(batch)
    src = [d[0] for d in batch]
    tgt = [d[1] for d in batch]
    return src, tgt 


if __name__ == "__main__":

    # 1. 
    t1 = torch.rand((10, 5, 5))
    t1_dataset = TensorDataset(t1) # 只可以接收tensor
    print(len(t1_dataset)) 
    dataloader = DataLoader(t1_dataset, batch_size=2)
    for x in dataloader:
        print(x[0].shape)
    
    # 2. 
    sents_src = ["hello", "world", "fine", "python", "go"]
    sents_tgt = ["hello1", "world2", "fine3", "python4", "go5"]
    dataset = MyDataset(sents_src, sents_tgt)
    # 继承了DataSet类 便可以传入DataLoader了
    dataloader = DataLoader(dataset, batch_size=2, collate_fn=collect_fn) # 可以先不看collate fn 这个参数
    for src, tgt in dataloader:
        # print(src)
        # print(tgt)
        print("~~~~~~~~~~~")




