fafo / prepare_dataset.py
GotThatData's picture
Create
4ebe08e verified
#!/usr/bin/env python3
import os
import json
from pathlib import Path
import numpy as np
import cv2
from PIL import Image
import open3d as o3d
from tqdm import tqdm
from typing import Dict, List, Any
import shutil
import argparse
class FAFODatasetPreprocessor:
"""Preprocessor for FAFO dataset"""
def __init__(self, input_dir: str, output_dir: str):
self.input_dir = Path(input_dir)
self.output_dir = Path(output_dir)
self.metadata = {
'num_samples': 0,
'data_types': {
'sensor_data': {'lidar': 0, 'gps': 0, 'imu': 0},
'image_data': 0,
'3d_data': 0,
'task_data': 0
}
}
def prepare_dataset(self):
"""Prepare the complete dataset"""
print("Preparing FAFO dataset...")
# Create directory structure
self._create_directories()
# Process each data type
self._process_sensor_data()
self._process_image_data()
self._process_3d_data()
self._process_task_data()
# Save metadata
self._save_metadata()
print("Dataset preparation completed!")
def _create_directories(self):
"""Create dataset directory structure"""
directories = [
'data/sensor_data/lidar',
'data/sensor_data/gps',
'data/sensor_data/imu',
'data/image_data',
'data/3d_data',
'data/task_data'
]
for dir_path in directories:
(self.output_dir / dir_path).mkdir(parents=True, exist_ok=True)
def _process_sensor_data(self):
"""Process all sensor data"""
sensor_types = ['lidar', 'gps', 'imu']
for sensor_type in sensor_types:
print(f"Processing {sensor_type} data...")
input_dir = self.input_dir / f'data/sensor_data/{sensor_type}'
output_dir = self.output_dir / f'data/sensor_data/{sensor_type}'
if not input_dir.exists():
print(f"Warning: {input_dir} does not exist")
continue
for file_path in tqdm(list(input_dir.glob('*.json'))):
try:
# Load and process data
with open(file_path, 'r') as f:
data = json.load(f)
# Process based on sensor type
if sensor_type == 'lidar':
data = self._process_lidar_data(data)
elif sensor_type == 'gps':
data = self._process_gps_data(data)
elif sensor_type == 'imu':
data = self._process_imu_data(data)
# Save processed data
output_path = output_dir / file_path.name
with open(output_path, 'w') as f:
json.dump(data, f, indent=2)
self.metadata['data_types']['sensor_data'][sensor_type] += 1
except Exception as e:
print(f"Error processing {file_path}: {e}")
def _process_image_data(self):
"""Process image data"""
print("Processing image data...")
input_dir = self.input_dir / 'data/image_data'
output_dir = self.output_dir / 'data/image_data'
if not input_dir.exists():
print(f"Warning: {input_dir} does not exist")
return
for file_path in tqdm(list(input_dir.glob('*.jpg'))):
try:
# Load and process image
image = Image.open(file_path)
# Standardize image
image = self._process_image(image)
# Save processed image
output_path = output_dir / file_path.name
image.save(output_path, quality=95)
self.metadata['data_types']['image_data'] += 1
except Exception as e:
print(f"Error processing {file_path}: {e}")
def _process_3d_data(self):
"""Process 3D point cloud data"""
print("Processing 3D data...")
input_dir = self.input_dir / 'data/3d_data'
output_dir = self.output_dir / 'data/3d_data'
if not input_dir.exists():
print(f"Warning: {input_dir} does not exist")
return
for file_path in tqdm(list(input_dir.glob('*.pcd'))):
try:
# Load and process point cloud
pcd = o3d.io.read_point_cloud(str(file_path))
# Process point cloud
pcd = self._process_point_cloud(pcd)
# Save processed point cloud
output_path = output_dir / file_path.name
o3d.io.write_point_cloud(str(output_path), pcd)
self.metadata['data_types']['3d_data'] += 1
except Exception as e:
print(f"Error processing {file_path}: {e}")
def _process_task_data(self):
"""Process task data"""
print("Processing task data...")
input_dir = self.input_dir / 'data/task_data'
output_dir = self.output_dir / 'data/task_data'
if not input_dir.exists():
print(f"Warning: {input_dir} does not exist")
return
for file_path in tqdm(list(input_dir.glob('*.json'))):
try:
# Load and process task data
with open(file_path, 'r') as f:
data = json.load(f)
# Process task data
data = self._process_task_definition(data)
# Save processed data
output_path = output_dir / file_path.name
with open(output_path, 'w') as f:
json.dump(data, f, indent=2)
self.metadata['data_types']['task_data'] += 1
except Exception as e:
print(f"Error processing {file_path}: {e}")
def _process_lidar_data(self, data: Dict) -> Dict:
"""Process LiDAR data"""
# Normalize ranges to meters
if 'ranges' in data:
data['ranges'] = [x / 100.0 for x in data['ranges']]
# Ensure all required fields
required_fields = ['timestamp', 'ranges', 'intensities', 'angles']
for field in required_fields:
if field not in data:
data[field] = []
return data
def _process_gps_data(self, data: Dict) -> Dict:
"""Process GPS data"""
# Ensure all required fields
required_fields = ['timestamp', 'latitude', 'longitude', 'altitude']
for field in required_fields:
if field not in data:
data[field] = 0.0
return data
def _process_imu_data(self, data: Dict) -> Dict:
"""Process IMU data"""
# Ensure all required fields
required_fields = ['timestamp', 'acceleration', 'angular_velocity', 'orientation']
for field in required_fields:
if field not in data:
data[field] = [0.0, 0.0, 0.0]
return data
def _process_image(self, image: Image.Image) -> Image.Image:
"""Process image data"""
# Resize to standard size
image = image.resize((640, 480), Image.Resampling.LANCZOS)
# Convert to RGB if needed
if image.mode != 'RGB':
image = image.convert('RGB')
return image
def _process_point_cloud(self, pcd: o3d.geometry.PointCloud) -> o3d.geometry.PointCloud:
"""Process point cloud data"""
# Remove outliers
pcd, _ = pcd.remove_statistical_outlier(nb_neighbors=20, std_ratio=2.0)
# Downsample
pcd = pcd.voxel_down_sample(voxel_size=0.05)
return pcd
def _process_task_definition(self, data: Dict) -> Dict:
"""Process task definition data"""
# Ensure all required fields
required_fields = ['task_type', 'parameters', 'annotations']
for field in required_fields:
if field not in data:
data[field] = {}
return data
def _save_metadata(self):
"""Save dataset metadata"""
# Update total samples
self.metadata['num_samples'] = sum([
sum(self.metadata['data_types']['sensor_data'].values()),
self.metadata['data_types']['image_data'],
self.metadata['data_types']['3d_data'],
self.metadata['data_types']['task_data']
])
# Save metadata
metadata_path = self.output_dir / 'dataset_info.json'
with open(metadata_path, 'w') as f:
json.dump(self.metadata, f, indent=2)
print(f"Dataset statistics:")
print(f"Total samples: {self.metadata['num_samples']}")
print("Data types:")
for data_type, count in self.metadata['data_types'].items():
if isinstance(count, dict):
for subtype, subcount in count.items():
print(f" - {data_type}/{subtype}: {subcount}")
else:
print(f" - {data_type}: {count}")
def main():
parser = argparse.ArgumentParser(description='Prepare FAFO dataset')
parser.add_argument('--input_dir', type=str, required=True,
help='Input directory containing raw data')
parser.add_argument('--output_dir', type=str, required=True,
help='Output directory for processed dataset')
args = parser.parse_args()
preprocessor = FAFODatasetPreprocessor(args.input_dir, args.output_dir)
preprocessor.prepare_dataset()
if __name__ == '__main__':
main()