|
|
|
import numpy as np |
|
from datasets import load_dataset |
|
from torch.utils.data import DataLoader |
|
|
|
|
|
quakeflow_nc = load_dataset( |
|
"./quakeflow_sc.py", |
|
name="station_test", |
|
|
|
split="test", |
|
download_mode="force_redownload", |
|
) |
|
|
|
|
|
for example in quakeflow_nc: |
|
print("\nIterable test\n") |
|
print(example.keys()) |
|
for key in example.keys(): |
|
if key == "data": |
|
print(key, np.array(example[key]).shape) |
|
else: |
|
print(key, example[key]) |
|
break |
|
|
|
|
|
quakeflow_nc = quakeflow_nc.with_format("torch") |
|
dataloader = DataLoader(quakeflow_nc, batch_size=8, num_workers=0, collate_fn=lambda x: x) |
|
|
|
for batch in dataloader: |
|
print("\nDataloader test\n") |
|
print(f"Batch size: {len(batch)}") |
|
print(batch[0].keys()) |
|
for key in batch[0].keys(): |
|
if key == "data": |
|
print(key, np.array(batch[0][key]).shape) |
|
else: |
|
print(key, batch[0][key]) |
|
break |
|
|
|
|
|
|