Datasets:

Tasks:
Other
Languages:
English
Multilinguality:
monolingual
Size Categories:
10K<n<100K
Language Creators:
found
Annotations Creators:
expert-generated
Source Datasets:
extended|mnist
Tags:
License:
cristiano.pizzamiglio commited on
Commit
b594a16
1 Parent(s): 5239a0b

feat: setup repo

Browse files
.gitignore ADDED
@@ -0,0 +1,134 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ *.manifest
31
+ *.spec
32
+
33
+ # Installer logs
34
+ pip-log.txt
35
+ pip-delete-this-directory.txt
36
+
37
+ # Unit test / coverage reports
38
+ htmlcov/
39
+ .tox/
40
+ .nox/
41
+ .coverage
42
+ .coverage.*
43
+ .cache
44
+ nosetests.xml
45
+ coverage.xml
46
+ *.cover
47
+ *.py,cover
48
+ .hypothesis/
49
+ .pytest_cache/
50
+ cover/
51
+
52
+ # Translations
53
+ *.mo
54
+ *.pot
55
+
56
+ # Django stuff:
57
+ *.log
58
+ local_settings.py
59
+ db.sqlite3
60
+ db.sqlite3-journal
61
+
62
+ # Flask stuff:
63
+ instance/
64
+ .webassets-cache
65
+
66
+ # Scrapy stuff:
67
+ .scrapy
68
+
69
+ # Sphinx documentation
70
+ docs/_build/
71
+
72
+ # PyBuilder
73
+ .pybuilder/
74
+ target/
75
+
76
+ # Jupyter Notebook
77
+ .ipynb_checkpoints
78
+
79
+ # IPython
80
+ profile_default/
81
+ ipython_config.py
82
+
83
+ # poetry
84
+ poetry.lock
85
+
86
+ # pdm
87
+ pdm.lock
88
+ .pdm.toml
89
+
90
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
91
+ __pypackages__/
92
+
93
+ # Celery stuff
94
+ celerybeat-schedule
95
+ celerybeat.pid
96
+
97
+ # SageMath parsed files
98
+ *.sage.py
99
+
100
+ # Environments
101
+ .env
102
+ .venv
103
+ env/
104
+ venv/
105
+ ENV/
106
+ env.bak/
107
+ venv.bak/
108
+
109
+ # Spyder project settings
110
+ .spyderproject
111
+ .spyproject
112
+
113
+ # Idea project settings
114
+ .idea
115
+
116
+ # Rope project settings
117
+ .ropeproject
118
+
119
+ # mkdocs documentation
120
+ /site
121
+
122
+ # mypy
123
+ .mypy_cache/
124
+ .dmypy.json
125
+ dmypy.json
126
+
127
+ # Pyre type checker
128
+ .pyre/
129
+
130
+ # pytype static type analyzer
131
+ .pytype/
132
+
133
+ # Cython debug symbols
134
+ cython_debug/
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) [year] [fullname]
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
dataset/test_labels.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff7e84b144c037e7215dfa787d6773550c5db83029d9a4e7bae6e90f605f081d
3
+ size 10128
dataset/test_point_clouds.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30fc5923e8c438bae6b0c467ed1642ac81ca11308c6563bb66a838bdb080a047
3
+ size 11580128
dataset/train_labels.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5dd4d822cab3e20099239bc9d433d587ae3ce00e084d191079dd30b38380b336
3
+ size 60128
dataset/train_point_clouds.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bbfb3577c041af905cb3a3f0910e604df3aa65b5303a1f71bb7ea8f31598dbb8
3
+ size 69480128
images/0.png ADDED

Git LFS Details

  • SHA256: 316602d50f445f9f8e87a3d0bbaef17459087bec4f31aa2f53853bfcb3dd7592
  • Pointer size: 130 Bytes
  • Size of remote file: 66.8 kB
images/0_side_view.PNG ADDED

Git LFS Details

  • SHA256: 3f10c4dbd2281e8b4eb20c71e5a7f2d244104bbf9ec6596b15b735f25627658c
  • Pointer size: 131 Bytes
  • Size of remote file: 102 kB
images/0_top_view.PNG ADDED

Git LFS Details

  • SHA256: 153c26c9f26c6f7982b4fd6589f33808d080573cfd7fa1a7c1c0249043675051
  • Pointer size: 130 Bytes
  • Size of remote file: 66.3 kB
images/1.png ADDED

Git LFS Details

  • SHA256: a80ed7d82a3c30e3bc34631bc6fdea5f9cf2903a715d8eee8ccc05acd256cc16
  • Pointer size: 130 Bytes
  • Size of remote file: 42.8 kB
images/2.png ADDED

Git LFS Details

  • SHA256: 05b6bbdaf8dce20b04f320544d08872565785e009d627c600338aa3f7409b938
  • Pointer size: 130 Bytes
  • Size of remote file: 56.9 kB
images/3.png ADDED

Git LFS Details

  • SHA256: eaaa42120c0472bd05463112fb7e9f388b91a24629de833d433e9e5ead14723c
  • Pointer size: 130 Bytes
  • Size of remote file: 69.2 kB
images/4.png ADDED

Git LFS Details

  • SHA256: 62bc6f6bcd8ab64da1ade76162826abde79ad7f047686c92f2cbd746b24762d3
  • Pointer size: 130 Bytes
  • Size of remote file: 60.5 kB
images/5.png ADDED

Git LFS Details

  • SHA256: ac47416f68c962551e7a5a995e0ed8a4f19c5997a46c3ee5d1250953df34f23f
  • Pointer size: 130 Bytes
  • Size of remote file: 61.2 kB
images/6.png ADDED

Git LFS Details

  • SHA256: cc4779e1b820a0d15e1f42d9ee82a2b2bc61cfdebf7cb1a814bca54f8c8de2ba
  • Pointer size: 130 Bytes
  • Size of remote file: 65.1 kB
images/7.png ADDED

Git LFS Details

  • SHA256: ece258678d5aced8725f7d4c00e8af5edc28ed1d0dfcdaf2dd37a74f9f42fbca
  • Pointer size: 130 Bytes
  • Size of remote file: 56.5 kB
images/8.png ADDED

Git LFS Details

  • SHA256: 182225d53236e7c9cd93d881d168665ce299227230cd7700461bc42edb6bc2a4
  • Pointer size: 130 Bytes
  • Size of remote file: 69.4 kB
images/9.png ADDED

Git LFS Details

  • SHA256: 7800490b01eb012205be5eb269de4509a75ce18f45ccd5f7df8c1b5ed1b93147
  • Pointer size: 130 Bytes
  • Size of remote file: 52.6 kB
images/non_zero_intensity_distribution_boxplot.png ADDED

Git LFS Details

  • SHA256: 80e8e7b7315d49c1015d36e5a8e0b2328b1f2afd7c300d6856faf96ca1e7dc3d
  • Pointer size: 130 Bytes
  • Size of remote file: 17.3 kB
images/test_image_pixel_intensity_distribution_0.png ADDED

Git LFS Details

  • SHA256: fe723367cacc60adfd12044859d72e0d03fa75bb83430a38bf72bb114250345b
  • Pointer size: 129 Bytes
  • Size of remote file: 3.16 kB
images/test_images_pixel_intensity_distribution.png ADDED

Git LFS Details

  • SHA256: 6e26f6f611250c3551cf5f96fe654ddb97c363d641621491035f47ccd3cbe52e
  • Pointer size: 130 Bytes
  • Size of remote file: 18.3 kB
images/train_images_pixel_intensity_distribution.png ADDED

Git LFS Details

  • SHA256: 920dcec46129290b27ae7fc74e4c8802638267cfc7b80ac3658760570a9ac4ae
  • Pointer size: 130 Bytes
  • Size of remote file: 18.2 kB
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ matplotlib==3.8.2
2
+ numpy==1.24.3
3
+ pydantic==2.7.1
4
+ PyYAML==6.0.1
5
+ tensorflow==2.13.0
6
+ typing_extensions==4.11.0
src/mnist3d/__init__.py ADDED
File without changes
src/mnist3d/io_.py ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from pathlib import Path
4
+ from typing import Tuple
5
+
6
+ import numpy as np
7
+
8
+
9
+ def import_dataset(
10
+ dir_path: Path,
11
+ ) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
12
+ """
13
+ Import dataset.
14
+
15
+ Parameters
16
+ ----------
17
+ dir_path : Path
18
+
19
+ Returns
20
+ -------
21
+ Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]
22
+
23
+ """
24
+ return (
25
+ np.load(Path(rf"{dir_path}\train_point_clouds.npy")),
26
+ np.load(Path(rf"{dir_path}\train_labels.npy")),
27
+ np.load(Path(rf"{dir_path}\test_point_clouds.npy")),
28
+ np.load(Path(rf"{dir_path}\test_labels.npy")),
29
+ )
30
+
31
+
32
+ def export_dataset(
33
+ train_point_clouds: np.ndarray,
34
+ train_labels: np.ndarray,
35
+ test_point_clouds: np.ndarray,
36
+ test_labels: np.ndarray,
37
+ dir_path: Path,
38
+ ) -> None:
39
+ """
40
+ Export dataset as NumPy arrays.
41
+
42
+ Parameters
43
+ ----------
44
+ train_point_clouds : np.ndarray
45
+ train_labels : np.ndarray
46
+ test_point_clouds : np.ndarray
47
+ test_labels : np.ndarray
48
+ dir_path : Path
49
+
50
+ """
51
+ np.save(Path(rf"{dir_path}\train_point_clouds"), train_point_clouds)
52
+ np.save(Path(rf"{dir_path}\train_labels"), train_labels)
53
+ np.save(Path(rf"{dir_path}\test_point_clouds"), test_point_clouds)
54
+ np.save(Path(rf"{dir_path}\test_labels"), test_labels)
src/mnist3d/main.py ADDED
@@ -0,0 +1,255 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass, InitVar, field
4
+ from pathlib import Path
5
+ from typing import Tuple
6
+
7
+ import numpy as np
8
+ import tensorflow as tf
9
+
10
+ from io_ import export_dataset
11
+ from parameters import Parameters, import_parameters
12
+
13
+ np.random.seed(42)
14
+
15
+ IMAGE_SIZE = 28
16
+
17
+
18
+ def main(
19
+ parameters: Parameters,
20
+ ) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
21
+ """
22
+ Load the original MNIST dataset and convert images to point clouds.
23
+
24
+ Parameters
25
+ ----------
26
+ parameters : Parameters
27
+
28
+ Returns
29
+ -------
30
+ Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]
31
+
32
+ """
33
+ mnist = tf.keras.datasets.mnist
34
+ (train_images, train_labels), (test_images, test_labels) = mnist.load_data()
35
+
36
+ binary_intensities = compute_binary_intensities(
37
+ train_images, parameters.pixel_intensity_threshold
38
+ )
39
+ point_count = compute_point_count(binary_intensities)
40
+
41
+ train_point_clouds = convert_images_to_point_clouds(
42
+ train_images,
43
+ point_count,
44
+ parameters.pixel_intensity_threshold,
45
+ parameters.noise_standard_deviation,
46
+ )
47
+ test_point_clouds = convert_images_to_point_clouds(
48
+ test_images,
49
+ point_count,
50
+ parameters.pixel_intensity_threshold,
51
+ parameters.noise_standard_deviation,
52
+ )
53
+ return train_point_clouds, train_labels, test_point_clouds, test_labels
54
+
55
+
56
+ @dataclass
57
+ class ActivePixelStats:
58
+ """
59
+ Active pixel (i.e. intensity = 1) statistics.
60
+
61
+ Parameters
62
+ ----------
63
+ binary_intensities : np.ndarray
64
+ Binary pixel intensities (i.e. 0 or 1).
65
+
66
+ Attributes
67
+ ----------
68
+ counts : np.ndarray
69
+ first_quartile : int
70
+ third_quartile : int
71
+ median : int
72
+ iqr : float
73
+ Interquartile range.
74
+ minimum : int
75
+ Outliers excluded.
76
+ maximum : int
77
+ Outliers excluded.
78
+
79
+ """
80
+
81
+ binary_intensities: InitVar[np.ndarray]
82
+ counts: np.ndarray = field(init=False)
83
+ first_quartile: int = field(init=False)
84
+ third_quartile: int = field(init=False)
85
+ median: int = field(init=False)
86
+ minimum: int = field(init=False)
87
+ maximum: int = field(init=False)
88
+
89
+ def __post_init__(self, binary_intensities: np.ndarray) -> None:
90
+ self.counts = np.sum(binary_intensities, axis=1).astype(int)
91
+ self.first_quartile = np.percentile(self.counts, 25).astype(int)
92
+ self.third_quartile = np.percentile(self.counts, 75).astype(int)
93
+ self.median = np.median(self.counts).astype(int)
94
+ self.iqr = self.third_quartile - self.first_quartile
95
+ iqr_factor = 1.5
96
+ self.minimum = self.counts[
97
+ self.counts >= self.first_quartile - iqr_factor * self.iqr
98
+ ].min()
99
+ self.maximum = self.counts[
100
+ self.counts <= self.third_quartile + iqr_factor * self.iqr
101
+ ].max()
102
+
103
+
104
+ def create_xy_grid(image_size: int) -> np.ndarray:
105
+ """
106
+ Create x-y grid.
107
+
108
+ Parameters
109
+ ----------
110
+ image_size : int
111
+ Pixel count (the image is squared).
112
+
113
+ Returns
114
+ -------
115
+ np.ndarray
116
+
117
+ """
118
+ x = np.tile(np.linspace(0.0, 1.0, image_size), image_size)
119
+ y = np.repeat(np.linspace(0.0, 1.0, image_size), image_size)
120
+ return np.column_stack((x, y))
121
+
122
+
123
+ def convert_images_to_point_clouds(
124
+ images: np.ndarray,
125
+ point_count: int,
126
+ pixel_intensity_threshold: int,
127
+ noise_standard_deviation: float,
128
+ ) -> np.ndarray:
129
+ """
130
+ Convert images to point clouds.
131
+
132
+ Parameters
133
+ ----------
134
+ images : np.ndarray
135
+ point_count : int
136
+ pixel_intensity_threshold : int
137
+ noise_standard_deviation : float
138
+
139
+ Returns
140
+ -------
141
+ np.ndarray
142
+
143
+ """
144
+ binary_intensities = compute_binary_intensities(images, pixel_intensity_threshold)
145
+
146
+ xy_grid = create_xy_grid(image_size=IMAGE_SIZE)
147
+ xy_grids = np.tile(xy_grid, (images.shape[0], 1, 1))
148
+ point_clouds = np.concatenate(
149
+ (xy_grids, binary_intensities[:, :, np.newaxis]), axis=2
150
+ )
151
+
152
+ point_clouds_resized = np.array(
153
+ [resize_point_cloud(point_cloud, point_count) for point_cloud in point_clouds]
154
+ )
155
+ point_clouds_resized_noisy = np.array(
156
+ [
157
+ add_noise(point_cloud, noise_standard_deviation)
158
+ for point_cloud in point_clouds_resized
159
+ ]
160
+ )
161
+ return point_clouds_resized_noisy.astype(np.float16)
162
+
163
+
164
+ def compute_binary_intensities(
165
+ images: np.ndarray, pixel_intensity_threshold: int
166
+ ) -> np.ndarray:
167
+ """
168
+ Compute binary pixel intensities (i.e. 0 or 1).
169
+
170
+ Parameters
171
+ ----------
172
+ images : np.ndarray
173
+ pixel_intensity_threshold : int
174
+
175
+ Returns
176
+ -------
177
+ np.ndarray
178
+
179
+ """
180
+ images = (images > pixel_intensity_threshold).astype(int)
181
+ return images.reshape(images.shape[0], images.shape[1] * images.shape[2])
182
+
183
+
184
+ def compute_point_count(binary_intensities: np.ndarray) -> int:
185
+ """
186
+ Compute the number of points as the maximum of the boxplot (excluding any outliers).
187
+
188
+ Parameters
189
+ ----------
190
+ binary_intensities : np.ndarray
191
+
192
+ Returns
193
+ -------
194
+ int
195
+
196
+ """
197
+ active_pixel_stats = ActivePixelStats(binary_intensities)
198
+ return active_pixel_stats.maximum
199
+
200
+
201
+ def resize_point_cloud(point_cloud: np.ndarray, point_count: int) -> np.ndarray:
202
+ """
203
+ Resize point cloud to have `point_count` points.
204
+
205
+ Parameters
206
+ ----------
207
+ point_cloud : p.ndarray
208
+ point_count : int
209
+
210
+ Returns
211
+ -------
212
+ np.ndarray
213
+
214
+ """
215
+ point_cloud = point_cloud[point_cloud[:, 2] > 0]
216
+ if len(point_cloud) < point_count:
217
+ missing_count = point_count - len(point_cloud)
218
+ indices = np.random.choice(len(point_cloud), missing_count)
219
+ return np.concatenate((point_cloud, point_cloud[indices, :]), axis=0)
220
+ elif len(point_cloud) > point_count:
221
+ indices = np.random.choice(len(point_cloud), point_count)
222
+ return point_cloud[indices, :]
223
+ else:
224
+ return point_cloud
225
+
226
+
227
+ def add_noise(point_cloud: np.ndarray, standard_deviation: float) -> np.ndarray:
228
+ """
229
+ Add gaussian noise.
230
+
231
+ Parameters
232
+ ----------
233
+ point_cloud : np.ndarray
234
+ standard_deviation : float
235
+
236
+ Returns
237
+ -------
238
+ np.ndarray
239
+
240
+ """
241
+ point_cloud[:, 2] = point_cloud[:, 2] - 1.0
242
+ noise = np.random.normal(0.0, standard_deviation, point_cloud.shape)
243
+ return point_cloud + noise
244
+
245
+
246
+ if __name__ == "__main__":
247
+ parameters_ = import_parameters()
248
+ train_point_clouds, train_labels, test_point_clouds, test_labels = main(parameters_)
249
+ export_dataset(
250
+ train_point_clouds,
251
+ train_labels,
252
+ test_point_clouds,
253
+ test_labels,
254
+ dir_path=Path("../../dataset"),
255
+ )
src/mnist3d/parameters.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import yaml
4
+ from pydantic import BaseModel, Field
5
+ from typing_extensions import Annotated
6
+
7
+
8
+ class Parameters(BaseModel):
9
+ """
10
+ Parameters.
11
+
12
+ Attributes
13
+ ----------
14
+ pixel_intensity_threshold : int
15
+ noise_standard_deviation : float
16
+
17
+ """
18
+
19
+ pixel_intensity_threshold: Annotated[int, Field(strict=True, gt=0, lt=255)]
20
+ noise_standard_deviation: Annotated[float, Field(strict=True, gt=0.0, lt=1.0)]
21
+
22
+
23
+ def import_parameters() -> Parameters:
24
+ """
25
+ Import parameters.
26
+
27
+ Returns
28
+ -------
29
+ Parameters
30
+
31
+ """
32
+ with open("params.yaml", "r") as file:
33
+ parameter_to_value = yaml.safe_load(file.read())
34
+
35
+ return Parameters(
36
+ pixel_intensity_threshold=parameter_to_value["pixel_intensity_threshold"],
37
+ noise_standard_deviation=parameter_to_value["noise_standard_deviation"],
38
+ )
src/mnist3d/params.yaml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ # Pixel intensity threshold (Integer)
2
+ pixel_intensity_threshold: 128
3
+
4
+ # Gaussian noise standard deviation (Float)
5
+ noise_standard_deviation: 0.01
src/mnist3d/plotter.py ADDED
@@ -0,0 +1,144 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from pathlib import Path
4
+
5
+ import numpy as np
6
+ import tensorflow as tf
7
+ from matplotlib import pyplot as plt
8
+
9
+ from io_ import import_dataset
10
+ from main import compute_binary_intensities, ActivePixelStats
11
+ from parameters import Parameters, import_parameters
12
+
13
+
14
+ def main(parameters: Parameters) -> None:
15
+ """
16
+ Plot image pixel intensity distributions, active pixel count boxplot, and images and
17
+ corresponding point clouds.
18
+
19
+ Parameters
20
+ ----------
21
+ parameters : Parameters
22
+
23
+ """
24
+ mnist = tf.keras.datasets.mnist
25
+ (train_images, train_labels), (test_images, test_labels) = mnist.load_data()
26
+ plot_image_pixel_intensity_distributions(
27
+ train_images, train_labels, title="Train Images"
28
+ )
29
+ plot_image_pixel_intensity_distributions(
30
+ test_images, test_labels, title="Test Images"
31
+ )
32
+
33
+ binary_intensities = compute_binary_intensities(
34
+ train_images, parameters.pixel_intensity_threshold
35
+ )
36
+ active_pixel_stats = ActivePixelStats(binary_intensities)
37
+ plot_active_pixel_count_boxplot(active_pixel_stats)
38
+
39
+ train_point_clouds, train_labels, test_point_clouds, test_labels = import_dataset(
40
+ dir_path=Path("../../dataset")
41
+ )
42
+ label_count = 10
43
+ label_to_indices = {
44
+ index: np.where(train_labels == index)[0] for index in range(label_count)
45
+ }
46
+ indices = [np.random.choice(indices) for indices in label_to_indices.values()]
47
+ for label, index in enumerate(indices):
48
+ plot_point_cloud_image(train_point_clouds[index], train_images[index], label)
49
+
50
+
51
+ def plot_point_cloud_image(
52
+ point_cloud: np.ndarray, image: np.ndarray, label: int
53
+ ) -> None:
54
+ """
55
+ Plot point cloud and corresponding image.
56
+
57
+ Parameters
58
+ ----------
59
+ point_cloud : np.ndarray
60
+ image : np.ndarray
61
+ label : int
62
+
63
+ """
64
+ figure = plt.figure(figsize=(12, 6))
65
+
66
+ axis_point_cloud = figure.add_subplot(121, projection="3d")
67
+ axis_point_cloud.scatter(
68
+ point_cloud[:, 0], point_cloud[:, 1], point_cloud[:, 2], s=40
69
+ )
70
+ axis_point_cloud.set_xlim(0.0, 1.0)
71
+ axis_point_cloud.set_ylim(0.0, 1.0)
72
+ axis_point_cloud.set_zlim(-0.1, 0.1)
73
+ axis_point_cloud.view_init(elev=-90, azim=-85)
74
+
75
+ axis_image = figure.add_subplot(122)
76
+ axis_image.imshow(image, cmap="gray")
77
+ axis_image.set_xticks([])
78
+ axis_image.set_yticks([])
79
+
80
+ figure.suptitle(f"Label: {label}")
81
+ plt.tight_layout()
82
+ plt.show()
83
+
84
+
85
+ def plot_image_pixel_intensity_distributions(
86
+ images: np.ndarray, labels: np.ndarray, title: str
87
+ ) -> None:
88
+ """
89
+ Plot image pixel intensity distributions.
90
+
91
+ Parameters
92
+ ----------
93
+ images : np.ndarray
94
+ labels : np.ndarray
95
+ title : str
96
+
97
+ """
98
+ label_count = 10
99
+ figure, axis = plt.subplots(2, 5, figsize=(12, 5))
100
+ for label in range(label_count):
101
+ indices = np.where(labels == label)[0]
102
+ intensities = images[indices].flatten()
103
+ i = 0 if label < label_count // 2 else 1
104
+ j = label if label < label_count // 2 else (label - label_count // 2)
105
+ axis[i, j].hist(intensities, bins=40)
106
+ axis[i, j].set_title(label)
107
+ axis[i, j].set_xticks(np.arange(0, 256, 85))
108
+ axis[i, j].get_yaxis().set_visible(False)
109
+
110
+ figure.suptitle(f"{title} - Pixel Intensity Distributions")
111
+ plt.tight_layout()
112
+ plt.show()
113
+
114
+
115
+ def plot_active_pixel_count_boxplot(active_pixel_stats: ActivePixelStats) -> None:
116
+ """
117
+ Plot active pixel counts boxplot.
118
+
119
+ Parameters
120
+ ----------
121
+ active_pixel_stats : ActivePixelStats
122
+
123
+ """
124
+ figure, axis = plt.subplots(figsize=(5, 6))
125
+ plt.boxplot(active_pixel_stats.counts)
126
+
127
+ median = np.median(active_pixel_stats.median)
128
+ minimum = np.min(active_pixel_stats.minimum)
129
+ maximum = np.max(active_pixel_stats.maximum)
130
+
131
+ axis.annotate(f"Median: {median}", xy=(1, median), xytext=(1.1, median))
132
+ axis.annotate(f"Minimum: {minimum}", xy=(1, minimum), xytext=(1.1, minimum))
133
+ axis.annotate(f"Maximum: {maximum}", xy=(1, maximum), xytext=(1.1, maximum))
134
+
135
+ plt.xlabel("Intensity = 1")
136
+ plt.ylabel("Count")
137
+ plt.tight_layout()
138
+ plt.show()
139
+
140
+
141
+ if __name__ == "__main__":
142
+
143
+ parameters_ = import_parameters()
144
+ main(parameters_)
src/mnist3d/size_calculator.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import zipfile
5
+ from pathlib import Path
6
+ from typing import List
7
+
8
+ DIR_PATH = Path("../../dataset")
9
+
10
+
11
+ def compute_dataset_sizes() -> List[int]:
12
+ """
13
+ Compute the dataset size in bytes.
14
+
15
+ Returns
16
+ -------
17
+ List[int]
18
+
19
+ """
20
+ return [
21
+ os.path.getsize(Path(DIR_PATH, filename)) for filename in os.listdir(DIR_PATH)
22
+ ]
23
+
24
+
25
+ def compute_download_size() -> int:
26
+ """
27
+ Compute the download size in bytes.
28
+
29
+ Returns
30
+ -------
31
+ int
32
+
33
+ """
34
+ archive_path = Path(DIR_PATH, "archive.zip")
35
+ with zipfile.ZipFile(archive_path, "w", zipfile.ZIP_LZMA) as zipf:
36
+ for filename in os.listdir(DIR_PATH):
37
+ zipf.write(os.path.join(DIR_PATH, filename), arcname=filename)
38
+ return os.path.getsize(archive_path)
39
+
40
+
41
+ if __name__ == "__main__":
42
+ dataset_sizes = compute_dataset_sizes()
43
+ dataset_size = sum(dataset_sizes)
44
+ download_size = compute_download_size()