temphdf5 / stat.py
Cohom's picture
Upload folder using huggingface_hub
3aecd13 verified
# + Group: data/demo_8
# - Dataset: data/demo_8/actions, shape: (79, 7), dtype: float64
# - Dataset: data/demo_8/dones, shape: (79,), dtype: uint8
# + Group: data/demo_8/obs
# - Dataset: data/demo_8/obs/agentview_rgb, shape: (79, 128, 128, 3), dtype: uint8
# - Dataset: data/demo_8/obs/ee_ori, shape: (79, 3), dtype: float64
# - Dataset: data/demo_8/obs/ee_pos, shape: (79, 3), dtype: float64
# - Dataset: data/demo_8/obs/ee_states, shape: (79, 6), dtype: float64
# - Dataset: data/demo_8/obs/eye_in_hand_rgb, shape: (79, 128, 128, 3), dtype: uint8
# - Dataset: data/demo_8/obs/gripper_states, shape: (79, 2), dtype: float64
# - Dataset: data/demo_8/obs/joint_states, shape: (79, 7), dtype: float64
# - Dataset: data/demo_8/rewards, shape: (79,), dtype: uint8
# - Dataset: data/demo_8/robot_states, shape: (79, 9), dtype: float64
# - Dataset: data/demo_8/states, shape: (79, 92), dtype: float64
# + Group: data/demo_9
# - Dataset: data/demo_9/actions, shape: (89, 7), dtype: float64
# - Dataset: data/demo_9/dones, shape: (89,), dtype: uint8
# + Group: data/demo_9/obs
# - Dataset: data/demo_9/obs/agentview_rgb, shape: (89, 128, 128, 3), dtype: uint8
# - Dataset: data/demo_9/obs/ee_ori, shape: (89, 3), dtype: float64
# - Dataset: data/demo_9/obs/ee_pos, shape: (89, 3), dtype: float64
# - Dataset: data/demo_9/obs/ee_states, shape: (89, 6), dtype: float64
# - Dataset: data/demo_9/obs/eye_in_hand_rgb, shape: (89, 128, 128, 3), dtype: uint8
# - Dataset: data/demo_9/obs/gripper_states, shape: (89, 2), dtype: float64
# - Dataset: data/demo_9/obs/joint_states, shape: (89, 7), dtype: float64
# - Dataset: data/demo_9/rewards, shape: (89,), dtype: uint8
# - Dataset: data/demo_9/robot_states, shape: (89, 9), dtype: float64
# - Dataset: data/demo_9/states, shape: (89, 92), dtype: float64
# The above is the structure of the HDF5 file. Read all the HDF5 files in the directory, and calculate the mean, std, min, max, q01, q99 of the actions, obs/ee_states, gripper_states, joint_states of all the files.
import h5py
import numpy as np
import os
def calculate_statistics(hdf5_path):
actions = []
ee_states = []
gripper_states = []
joint_states = []
with h5py.File(hdf5_path, 'r') as f:
for demo in f['data']:
actions.append(f[f'data/{demo}/actions'][:])
ee_states.append(f[f'data/{demo}/obs/ee_states'][:])
gripper_states.append(f[f'data/{demo}/obs/gripper_states'][:])
joint_states.append(f[f'data/{demo}/obs/joint_states'][:])
actions = np.concatenate(actions, axis=0)
ee_states = np.concatenate(ee_states, axis=0)
gripper_states = np.concatenate(gripper_states, axis=0)
joint_states = np.concatenate(joint_states, axis=0)
stats = {
'actions': {
'mean': np.mean(actions, axis=0),
'std': np.std(actions, axis=0),
'min': np.min(actions, axis=0),
'max': np.max(actions, axis=0),
'q01': np.percentile(actions, 1, axis=0),
'q99': np.percentile(actions, 99, axis=0)
},
'ee_states': {
'mean': np.mean(ee_states, axis=0),
'std': np.std(ee_states, axis=0),
'min': np.min(ee_states, axis=0),
'max': np.max(ee_states, axis=0),
'q01': np.percentile(ee_states, 1, axis=0),
'q99': np.percentile(ee_states, 99, axis=0)
},
'gripper_states': {
'mean': np.mean(gripper_states, axis=0),
'std': np.std(gripper_states, axis=0),
'min': np.min(gripper_states, axis=0),
'max': np.max(gripper_states, axis=0),
'q01': np.percentile(gripper_states, 1, axis=0),
'q99': np.percentile(gripper_states, 99, axis=0)
},
'joint_states': {
'mean': np.mean(joint_states, axis=0),
'std': np.std(joint_states, axis=0),
'min': np.min(joint_states, axis=0),
'max': np.max(joint_states, axis=0),
'q01': np.percentile(joint_states, 1, axis=0),
'q99': np.percentile(joint_states, 99, axis=0)
}
}
return stats
def process_directory(directory):
all_stats = {
'actions': [],
'ee_states': [],
'gripper_states': [],
'joint_states': []
}
for filename in os.listdir(directory):
if filename.endswith('.hdf5'):
hdf5_path = os.path.join(directory, filename)
stats = calculate_statistics(hdf5_path)
for key in all_stats:
all_stats[key].append(stats[key])
# Calculate overall statistics
overall_stats = {}
for key, values in all_stats.items():
# values: a list of dictionaries
means = np.array([v['mean'] for v in values])
stds = np.array([v['std'] for v in values])
mins = np.array([v['min'] for v in values])
maxs = np.array([v['max'] for v in values])
q01s = np.array([v['q01'] for v in values])
q99s = np.array([v['q99'] for v in values])
overall_stats[key] = {
'mean': np.mean(means, axis=0),
'std': np.mean(stds, axis=0),
'min': np.min(mins, axis=0),
'max': np.max(maxs, axis=0),
'q01': np.mean(q01s, axis=0),
'q99': np.mean(q99s, axis=0)
}
return overall_stats
if __name__ == "__main__":
directory = '/home2/czhang/datasets/LIBERO/libero_spatial'
stats = process_directory(directory)
for key, value in stats.items():
print(f"{key}:")
for stat_name, stat_value in value.items():
print(f" {stat_name}: {stat_value}")
print("Statistics calculated successfully.")