Datasets:

ArXiv:
DOI:
License:
File size: 1,107 Bytes
4023dde
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
# %%
import numpy as np
from datasets import load_dataset
from torch.utils.data import DataLoader

# quakeflow_nc = load_dataset("AI4EPS/quakeflow_nc", name="station_test", split="test")
quakeflow_nc = load_dataset(
    "./quakeflow_nc.py",
    name="station_test",
    # name="event_test",
    split="test",
    download_mode="force_redownload",
)

# print the first sample of the iterable dataset
for example in quakeflow_nc:
    print("\nIterable test\n")
    print(example.keys())
    for key in example.keys():
        if key == "data":
            print(key, np.array(example[key]).shape)
        else:
            print(key, example[key])
    break

# %%
quakeflow_nc = quakeflow_nc.with_format("torch")
dataloader = DataLoader(quakeflow_nc, batch_size=8, num_workers=0, collate_fn=lambda x: x)

for batch in dataloader:
    print("\nDataloader test\n")
    print(f"Batch size: {len(batch)}")
    print(batch[0].keys())
    for key in batch[0].keys():
        if key == "data":
            print(key, np.array(batch[0][key]).shape)
        else:
            print(key, batch[0][key])
    break

# %%