Commit
·
9135070
1
Parent(s):
c7c65cb
update README.md
Browse files
README.md
CHANGED
|
@@ -91,6 +91,52 @@ semantic = sample["semantic"]
|
|
| 91 |
instance = sample["instance"]
|
| 92 |
```
|
| 93 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 94 |
## 📄 Citation
|
| 95 |
|
| 96 |
If you use this dataset, please cite the original authors:
|
|
|
|
| 91 |
instance = sample["instance"]
|
| 92 |
```
|
| 93 |
|
| 94 |
+
### 🔄 Recover Original Values from TIFF Images
|
| 95 |
+
|
| 96 |
+
The dataset uses .tiff format for all dense outputs to preserve precision and visual compatibility. Here’s how to revert them back to their original values:
|
| 97 |
+
|
| 98 |
+
```python
|
| 99 |
+
from datasets import load_dataset
|
| 100 |
+
from huggingface_hub import snapshot_download
|
| 101 |
+
from PIL import Image
|
| 102 |
+
import numpy as np
|
| 103 |
+
import json
|
| 104 |
+
import os
|
| 105 |
+
|
| 106 |
+
# Load sample
|
| 107 |
+
dataset = load_dataset("jagennath-hari/nyuv2", split="train")
|
| 108 |
+
sample = dataset[0]
|
| 109 |
+
|
| 110 |
+
# Download and load scaling metadata
|
| 111 |
+
local_dir = snapshot_download(
|
| 112 |
+
repo_id="jagennath-hari/nyuv2",
|
| 113 |
+
repo_type="dataset",
|
| 114 |
+
allow_patterns="scaling_factors.json"
|
| 115 |
+
)
|
| 116 |
+
with open(os.path.join(local_dir, "scaling_factors.json")) as f:
|
| 117 |
+
scale = json.load(f)
|
| 118 |
+
|
| 119 |
+
depth_scale = scale["depth_scale"] # e.g., 1000.0
|
| 120 |
+
label_max = scale["label_max_value"] # e.g., 894
|
| 121 |
+
instance_max = scale["instance_max_value"] # e.g., 37
|
| 122 |
+
|
| 123 |
+
# === Unscale depth (mm → m)
|
| 124 |
+
depth_img = np.array(sample["depth"])
|
| 125 |
+
depth_m = depth_img.astype(np.float32) / depth_scale
|
| 126 |
+
|
| 127 |
+
# === Unscale semantic mask
|
| 128 |
+
sem_scaled = np.array(sample["semantic"])
|
| 129 |
+
semantic_labels = np.round(
|
| 130 |
+
sem_scaled.astype(np.float32) * (label_max / 65535.0)
|
| 131 |
+
).astype(np.uint16)
|
| 132 |
+
|
| 133 |
+
# === Unscale instance mask
|
| 134 |
+
inst_scaled = np.array(sample["instance"])
|
| 135 |
+
instance_ids = np.round(
|
| 136 |
+
inst_scaled.astype(np.float32) * (instance_max / 65535.0)
|
| 137 |
+
).astype(np.uint16)
|
| 138 |
+
```
|
| 139 |
+
|
| 140 |
## 📄 Citation
|
| 141 |
|
| 142 |
If you use this dataset, please cite the original authors:
|