|
|
""" |
|
|
EIT Dataset Loader - Direct Python Class (No HuggingFace script loading) |
|
|
|
|
|
This loader provides direct access to the EIT dataset stored in HDF5 format. |
|
|
Can be used standalone or wrapped for HuggingFace datasets compatibility. |
|
|
""" |
|
|
|
|
|
import h5py |
|
|
import numpy as np |
|
|
from pathlib import Path |
|
|
from typing import Dict, List, Tuple, Optional |
|
|
import torch |
|
|
from torch.utils.data import Dataset |
|
|
|
|
|
|
|
|
class EITDataset(Dataset): |
|
|
""" |
|
|
PyTorch Dataset for EIT (Electrical Impedance Tomography) data. |
|
|
|
|
|
Args: |
|
|
data_dir: Base directory containing the dataset |
|
|
subset: Which dataset to load ("CirclesOnly" or "FourObjects") |
|
|
split: Which split to load ("train", "val", or "test") |
|
|
image_resolution: Image resolution ("32_log", "64_log", "128_log", or "256") |
|
|
load_to_memory: If True, load all data to RAM (faster but memory intensive) |
|
|
""" |
|
|
|
|
|
def __init__( |
|
|
self, |
|
|
data_dir: str, |
|
|
subset: str = "CirclesOnly", |
|
|
split: str = "train", |
|
|
image_resolution: str = "128_log", |
|
|
load_to_memory: bool = False |
|
|
): |
|
|
self.data_dir = Path(data_dir) |
|
|
self.subset = subset |
|
|
self.split = split |
|
|
self.image_resolution = image_resolution |
|
|
self.load_to_memory = load_to_memory |
|
|
|
|
|
|
|
|
self.subset_path = self.data_dir / subset |
|
|
self.h5_path = self.subset_path / "dataset.h5" |
|
|
|
|
|
|
|
|
split_map = {"train": "train.txt", "val": "val.txt", "test": "test.txt"} |
|
|
self.split_file = self.subset_path / "parameters" / split_map[split] |
|
|
|
|
|
|
|
|
self._load_split_indices() |
|
|
|
|
|
|
|
|
if self.load_to_memory: |
|
|
self._load_to_memory() |
|
|
else: |
|
|
self.cached_data = None |
|
|
|
|
|
def _load_split_indices(self): |
|
|
"""Load the indices for this split.""" |
|
|
with open(self.split_file, 'r') as f: |
|
|
self.indices = [int(line.strip()) for line in f if line.strip()] |
|
|
|
|
|
def _load_to_memory(self): |
|
|
"""Load all data for this split into memory.""" |
|
|
print(f"Loading {len(self.indices)} samples to memory...") |
|
|
self.cached_data = [] |
|
|
|
|
|
with h5py.File(self.h5_path, "r") as h5_file: |
|
|
voltage_data = h5_file["volt"]["16"] |
|
|
image_data = h5_file["image"][self.image_resolution] |
|
|
|
|
|
|
|
|
graph_key = self.image_resolution if self.image_resolution != "256" else "128_log" |
|
|
has_graph = graph_key in h5_file["graph"] |
|
|
|
|
|
for sample_idx in self.indices: |
|
|
voltage = voltage_data[:, sample_idx].astype(np.float32) |
|
|
image = image_data[:, :, sample_idx].astype(np.float32) |
|
|
|
|
|
sample = { |
|
|
'voltage_measurements': voltage, |
|
|
'conductivity_map': image, |
|
|
'sample_id': sample_idx |
|
|
} |
|
|
|
|
|
if has_graph: |
|
|
graph = h5_file["graph"][graph_key][:, sample_idx].astype(np.float32) |
|
|
sample['graph_representation'] = graph |
|
|
|
|
|
self.cached_data.append(sample) |
|
|
|
|
|
print("Data loaded to memory!") |
|
|
|
|
|
def __len__(self) -> int: |
|
|
return len(self.indices) |
|
|
|
|
|
def __getitem__(self, idx: int) -> Dict[str, np.ndarray]: |
|
|
"""Get a single sample.""" |
|
|
if self.cached_data is not None: |
|
|
|
|
|
return self.cached_data[idx] |
|
|
|
|
|
|
|
|
sample_idx = self.indices[idx] |
|
|
|
|
|
with h5py.File(self.h5_path, "r") as h5_file: |
|
|
voltage = h5_file["volt"]["16"][:, sample_idx].astype(np.float32) |
|
|
image = h5_file["image"][self.image_resolution][:, :, sample_idx].astype(np.float32) |
|
|
|
|
|
sample = { |
|
|
'voltage_measurements': voltage, |
|
|
'conductivity_map': image, |
|
|
'sample_id': sample_idx |
|
|
} |
|
|
|
|
|
|
|
|
graph_key = self.image_resolution if self.image_resolution != "256" else "128_log" |
|
|
if graph_key in h5_file["graph"]: |
|
|
graph = h5_file["graph"][graph_key][:, sample_idx].astype(np.float32) |
|
|
sample['graph_representation'] = graph |
|
|
|
|
|
return sample |
|
|
|
|
|
def get_image_shape(self) -> Tuple[int, int]: |
|
|
"""Get the shape of conductivity maps.""" |
|
|
resolution_map = { |
|
|
"32_log": (32, 32), |
|
|
"64_log": (64, 64), |
|
|
"128_log": (128, 128), |
|
|
"256": (256, 256) |
|
|
} |
|
|
return resolution_map.get(self.image_resolution, (128, 128)) |
|
|
|
|
|
def get_statistics(self) -> Dict: |
|
|
"""Calculate dataset statistics.""" |
|
|
print("Calculating statistics...") |
|
|
voltage_sum = np.zeros(256, dtype=np.float64) |
|
|
voltage_sq_sum = np.zeros(256, dtype=np.float64) |
|
|
image_sum = 0.0 |
|
|
image_sq_sum = 0.0 |
|
|
n_samples = len(self) |
|
|
|
|
|
with h5py.File(self.h5_path, "r") as h5_file: |
|
|
voltage_data = h5_file["volt"]["16"] |
|
|
image_data = h5_file["image"][self.image_resolution] |
|
|
|
|
|
for sample_idx in self.indices: |
|
|
voltage = voltage_data[:, sample_idx] |
|
|
image = image_data[:, :, sample_idx] |
|
|
|
|
|
voltage_sum += voltage |
|
|
voltage_sq_sum += voltage ** 2 |
|
|
image_sum += np.sum(image) |
|
|
image_sq_sum += np.sum(image ** 2) |
|
|
|
|
|
n_pixels = n_samples * self.get_image_shape()[0] * self.get_image_shape()[1] |
|
|
|
|
|
stats = { |
|
|
'voltage_mean': voltage_sum / n_samples, |
|
|
'voltage_std': np.sqrt(voltage_sq_sum / n_samples - (voltage_sum / n_samples) ** 2), |
|
|
'image_mean': image_sum / n_pixels, |
|
|
'image_std': np.sqrt(image_sq_sum / n_pixels - (image_sum / n_pixels) ** 2), |
|
|
'n_samples': n_samples |
|
|
} |
|
|
|
|
|
return stats |
|
|
|
|
|
|
|
|
class EITDataModule: |
|
|
""" |
|
|
Convenience class to manage all splits of the EIT dataset. |
|
|
|
|
|
Args: |
|
|
data_dir: Base directory containing the dataset |
|
|
subset: Which dataset to load ("CirclesOnly" or "FourObjects") |
|
|
image_resolution: Image resolution ("32_log", "64_log", "128_log", or "256") |
|
|
batch_size: Batch size for DataLoaders |
|
|
num_workers: Number of workers for DataLoaders |
|
|
load_to_memory: If True, load all data to RAM |
|
|
""" |
|
|
|
|
|
def __init__( |
|
|
self, |
|
|
data_dir: str, |
|
|
subset: str = "CirclesOnly", |
|
|
image_resolution: str = "128_log", |
|
|
batch_size: int = 32, |
|
|
num_workers: int = 4, |
|
|
load_to_memory: bool = False |
|
|
): |
|
|
self.data_dir = data_dir |
|
|
self.subset = subset |
|
|
self.image_resolution = image_resolution |
|
|
self.batch_size = batch_size |
|
|
self.num_workers = num_workers |
|
|
self.load_to_memory = load_to_memory |
|
|
|
|
|
|
|
|
self.train_dataset = EITDataset( |
|
|
data_dir, subset, "train", image_resolution, load_to_memory |
|
|
) |
|
|
self.val_dataset = EITDataset( |
|
|
data_dir, subset, "val", image_resolution, load_to_memory |
|
|
) |
|
|
self.test_dataset = EITDataset( |
|
|
data_dir, subset, "test", image_resolution, load_to_memory |
|
|
) |
|
|
|
|
|
def train_dataloader(self, **kwargs): |
|
|
"""Get training DataLoader.""" |
|
|
from torch.utils.data import DataLoader |
|
|
return DataLoader( |
|
|
self.train_dataset, |
|
|
batch_size=kwargs.get('batch_size', self.batch_size), |
|
|
shuffle=True, |
|
|
num_workers=kwargs.get('num_workers', self.num_workers), |
|
|
pin_memory=True |
|
|
) |
|
|
|
|
|
def val_dataloader(self, **kwargs): |
|
|
"""Get validation DataLoader.""" |
|
|
from torch.utils.data import DataLoader |
|
|
return DataLoader( |
|
|
self.val_dataset, |
|
|
batch_size=kwargs.get('batch_size', self.batch_size), |
|
|
shuffle=False, |
|
|
num_workers=kwargs.get('num_workers', self.num_workers), |
|
|
pin_memory=True |
|
|
) |
|
|
|
|
|
def test_dataloader(self, **kwargs): |
|
|
"""Get test DataLoader.""" |
|
|
from torch.utils.data import DataLoader |
|
|
return DataLoader( |
|
|
self.test_dataset, |
|
|
batch_size=kwargs.get('batch_size', self.batch_size), |
|
|
shuffle=False, |
|
|
num_workers=kwargs.get('num_workers', self.num_workers), |
|
|
pin_memory=True |
|
|
) |
|
|
|
|
|
def get_statistics(self): |
|
|
"""Get statistics for all splits.""" |
|
|
return { |
|
|
'train': self.train_dataset.get_statistics(), |
|
|
'val': self.val_dataset.get_statistics(), |
|
|
'test': self.test_dataset.get_statistics() |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
print("="*60) |
|
|
print("EIT Dataset Loader - Example Usage") |
|
|
print("="*60) |
|
|
|
|
|
|
|
|
data_dir = "https://huggingface.co/datasets/AymanAmeen/SimEIT-dataset" |
|
|
|
|
|
print("\n1. Creating datasets...") |
|
|
train_dataset = EITDataset( |
|
|
data_dir=data_dir, |
|
|
subset="CirclesOnly", |
|
|
split="train", |
|
|
image_resolution="128_log", |
|
|
load_to_memory=False |
|
|
) |
|
|
|
|
|
print(f" Train dataset size: {len(train_dataset)}") |
|
|
print(f" Image shape: {train_dataset.get_image_shape()}") |
|
|
|
|
|
|
|
|
print("\n2. Loading a sample...") |
|
|
sample = train_dataset[0] |
|
|
print(f" Keys: {list(sample.keys())}") |
|
|
print(f" Voltage measurements shape: {sample['voltage_measurements'].shape}") |
|
|
print(f" Conductivity map shape: {sample['conductivity_map'].shape}") |
|
|
if 'graph_representation' in sample: |
|
|
print(f" Graph representation shape: {sample['graph_representation'].shape}") |
|
|
print(f" Sample ID: {sample['sample_id']}") |
|
|
|
|
|
|
|
|
print("\n3. Creating EITDataModule...") |
|
|
data_module = EITDataModule( |
|
|
data_dir=data_dir, |
|
|
subset="CirclesOnly", |
|
|
image_resolution="128_log", |
|
|
batch_size=4, |
|
|
num_workers=0 |
|
|
) |
|
|
|
|
|
print(f" Train samples: {len(data_module.train_dataset)}") |
|
|
print(f" Val samples: {len(data_module.val_dataset)}") |
|
|
print(f" Test samples: {len(data_module.test_dataset)}") |
|
|
|
|
|
|
|
|
print("\n4. Creating DataLoader and getting a batch...") |
|
|
train_loader = data_module.train_dataloader() |
|
|
batch = next(iter(train_loader)) |
|
|
print(f" Batch voltage shape: {batch['voltage_measurements'].shape}") |
|
|
print(f" Batch image shape: {batch['conductivity_map'].shape}") |
|
|
print(f" Batch IDs: {batch['sample_id'].tolist()}") |
|
|
|
|
|
|
|
|
print("\n5. Testing different resolutions...") |
|
|
for resolution in ["32_log", "64_log", "128_log", "256"]: |
|
|
try: |
|
|
ds = EITDataset(data_dir, "CirclesOnly", "train", resolution) |
|
|
print(f" {resolution}: {len(ds)} samples, shape: {ds.get_image_shape()}") |
|
|
except Exception as e: |
|
|
print(f" {resolution}: Error - {e}") |
|
|
|
|
|
print("\n" + "="*60) |
|
|
print("All tests completed successfully!") |
|
|
print("="*60) |
|
|
|