Updates notebooks.
Browse files- .gitignore +129 -0
- load_dataset.ipynb +0 -0
- upload/upload_layer_frames.ipynb +196 -0
- upload/upload_part_section_frames.ipynb +201 -0
.gitignore
ADDED
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Byte-compiled / optimized / DLL files
|
2 |
+
__pycache__/
|
3 |
+
*.py[cod]
|
4 |
+
*$py.class
|
5 |
+
|
6 |
+
# C extensions
|
7 |
+
*.so
|
8 |
+
|
9 |
+
# Distribution / packaging
|
10 |
+
.Python
|
11 |
+
build/
|
12 |
+
develop-eggs/
|
13 |
+
dist/
|
14 |
+
downloads/
|
15 |
+
eggs/
|
16 |
+
.eggs/
|
17 |
+
lib/
|
18 |
+
lib64/
|
19 |
+
parts/
|
20 |
+
sdist/
|
21 |
+
var/
|
22 |
+
wheels/
|
23 |
+
pip-wheel-metadata/
|
24 |
+
share/python-wheels/
|
25 |
+
*.egg-info/
|
26 |
+
.installed.cfg
|
27 |
+
*.egg
|
28 |
+
MANIFEST
|
29 |
+
|
30 |
+
# PyInstaller
|
31 |
+
# Usually these files are written by a python script from a template
|
32 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
33 |
+
*.manifest
|
34 |
+
*.spec
|
35 |
+
|
36 |
+
# Installer logs
|
37 |
+
pip-log.txt
|
38 |
+
pip-delete-this-directory.txt
|
39 |
+
|
40 |
+
# Unit test / coverage reports
|
41 |
+
htmlcov/
|
42 |
+
.tox/
|
43 |
+
.nox/
|
44 |
+
.coverage
|
45 |
+
.coverage.*
|
46 |
+
.cache
|
47 |
+
nosetests.xml
|
48 |
+
coverage.xml
|
49 |
+
*.cover
|
50 |
+
*.py,cover
|
51 |
+
.hypothesis/
|
52 |
+
.pytest_cache/
|
53 |
+
|
54 |
+
# Translations
|
55 |
+
*.mo
|
56 |
+
*.pot
|
57 |
+
|
58 |
+
# Django stuff:
|
59 |
+
*.log
|
60 |
+
local_settings.py
|
61 |
+
db.sqlite3
|
62 |
+
db.sqlite3-journal
|
63 |
+
|
64 |
+
# Flask stuff:
|
65 |
+
instance/
|
66 |
+
.webassets-cache
|
67 |
+
|
68 |
+
# Scrapy stuff:
|
69 |
+
.scrapy
|
70 |
+
|
71 |
+
# Sphinx documentation
|
72 |
+
docs/_build/
|
73 |
+
|
74 |
+
# PyBuilder
|
75 |
+
target/
|
76 |
+
|
77 |
+
# Jupyter Notebook
|
78 |
+
.ipynb_checkpoints
|
79 |
+
|
80 |
+
# IPython
|
81 |
+
profile_default/
|
82 |
+
ipython_config.py
|
83 |
+
|
84 |
+
# pyenv
|
85 |
+
.python-version
|
86 |
+
|
87 |
+
# pipenv
|
88 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
89 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
90 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
91 |
+
# install all needed dependencies.
|
92 |
+
#Pipfile.lock
|
93 |
+
|
94 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
95 |
+
__pypackages__/
|
96 |
+
|
97 |
+
# Celery stuff
|
98 |
+
celerybeat-schedule
|
99 |
+
celerybeat.pid
|
100 |
+
|
101 |
+
# SageMath parsed files
|
102 |
+
*.sage.py
|
103 |
+
|
104 |
+
# Environments
|
105 |
+
.env
|
106 |
+
.venv
|
107 |
+
env/
|
108 |
+
venv/
|
109 |
+
ENV/
|
110 |
+
env.bak/
|
111 |
+
venv.bak/
|
112 |
+
|
113 |
+
# Spyder project settings
|
114 |
+
.spyderproject
|
115 |
+
.spyproject
|
116 |
+
|
117 |
+
# Rope project settings
|
118 |
+
.ropeproject
|
119 |
+
|
120 |
+
# mkdocs documentation
|
121 |
+
/site
|
122 |
+
|
123 |
+
# mypy
|
124 |
+
.mypy_cache/
|
125 |
+
.dmypy.json
|
126 |
+
dmypy.json
|
127 |
+
|
128 |
+
# Pyre type checker
|
129 |
+
.pyre/
|
load_dataset.ipynb
ADDED
The diff for this file is too large to render.
See raw diff
|
|
upload/upload_layer_frames.ipynb
ADDED
@@ -0,0 +1,196 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 62,
|
6 |
+
"metadata": {},
|
7 |
+
"outputs": [],
|
8 |
+
"source": [
|
9 |
+
"import numpy as np\n",
|
10 |
+
"import pickle\n",
|
11 |
+
"\n",
|
12 |
+
"from datasets import Dataset, Split"
|
13 |
+
]
|
14 |
+
},
|
15 |
+
{
|
16 |
+
"cell_type": "code",
|
17 |
+
"execution_count": 63,
|
18 |
+
"metadata": {},
|
19 |
+
"outputs": [],
|
20 |
+
"source": [
|
21 |
+
"with open(\"../../NIST-In-Situ-IN625-LPBF-Overhangs/layer/base/1.pkl\", \"rb\") as f:\n",
|
22 |
+
" layer = pickle.load(f)"
|
23 |
+
]
|
24 |
+
},
|
25 |
+
{
|
26 |
+
"cell_type": "code",
|
27 |
+
"execution_count": 64,
|
28 |
+
"metadata": {},
|
29 |
+
"outputs": [
|
30 |
+
{
|
31 |
+
"name": "stdout",
|
32 |
+
"output_type": "stream",
|
33 |
+
"text": [
|
34 |
+
"build_time (3, 377)\n",
|
35 |
+
"build_time (377, 3)\n",
|
36 |
+
"raw_frame_number (1, 377)\n",
|
37 |
+
"raw_frame_number (377, 1)\n",
|
38 |
+
"resolution [[49.8]\n",
|
39 |
+
" [33.5]]\n",
|
40 |
+
"resolution [49.8 33.5]\n"
|
41 |
+
]
|
42 |
+
}
|
43 |
+
],
|
44 |
+
"source": [
|
45 |
+
"layer_transposed = {}\n",
|
46 |
+
"for key, value in layer.items():\n",
|
47 |
+
" if (key in [\"build_time\", \"raw_frame_number\"]):\n",
|
48 |
+
" print(key, value.shape)\n",
|
49 |
+
" value_transposed = value.transpose(1, 0)\n",
|
50 |
+
" print(key, value_transposed.shape)\n",
|
51 |
+
" if (key == \"raw_frame_number\"):\n",
|
52 |
+
" layer_transposed[key] = value_transposed.flatten()\n",
|
53 |
+
" else:\n",
|
54 |
+
" layer_transposed[key] = value_transposed\n",
|
55 |
+
" elif (key == \"resolution\"):\n",
|
56 |
+
" print(key, value)\n",
|
57 |
+
" value_flatten = value.flatten()\n",
|
58 |
+
" print(key, value_flatten)\n",
|
59 |
+
" layer_transposed[key] = value_flatten\n",
|
60 |
+
" else:\n",
|
61 |
+
" layer_transposed[key] = value\n"
|
62 |
+
]
|
63 |
+
},
|
64 |
+
{
|
65 |
+
"cell_type": "code",
|
66 |
+
"execution_count": 65,
|
67 |
+
"metadata": {},
|
68 |
+
"outputs": [
|
69 |
+
{
|
70 |
+
"name": "stdout",
|
71 |
+
"output_type": "stream",
|
72 |
+
"text": [
|
73 |
+
"(377, 126, 360)\n",
|
74 |
+
"377\n"
|
75 |
+
]
|
76 |
+
}
|
77 |
+
],
|
78 |
+
"source": [
|
79 |
+
"frames = layer_transposed[\"radiant_temp\"]\n",
|
80 |
+
"print(frames.shape)\n",
|
81 |
+
"print(len(frames))"
|
82 |
+
]
|
83 |
+
},
|
84 |
+
{
|
85 |
+
"cell_type": "code",
|
86 |
+
"execution_count": 66,
|
87 |
+
"metadata": {},
|
88 |
+
"outputs": [
|
89 |
+
{
|
90 |
+
"name": "stdout",
|
91 |
+
"output_type": "stream",
|
92 |
+
"text": [
|
93 |
+
"{'folder_layer_range': '001-005', 'part': 'OverhangPart', 'part_section': 'BASE', 'process': 'LPBFthermography', 'source': 'NIST', 'supports': 'N/A', 'layer_number': 1, 'build_time': array([0., 0., 0.], dtype=float32), 'contact_email': 'jarred.heigel@nist.gov', 'file_name': '20180801_OverhangStudy_Layer01.mat', 'hatch_spacing': 100, 'laser_power': 195, 'layer_thickness': 20, 'material': 'IN625', 'radiant_temp': array([[0, 0, 0, ..., 0, 0, 0],\n",
|
94 |
+
" [0, 0, 0, ..., 0, 0, 0],\n",
|
95 |
+
" [0, 0, 0, ..., 0, 0, 0],\n",
|
96 |
+
" ...,\n",
|
97 |
+
" [0, 0, 0, ..., 0, 0, 0],\n",
|
98 |
+
" [0, 0, 0, ..., 0, 0, 0],\n",
|
99 |
+
" [0, 0, 0, ..., 0, 0, 0]], dtype=uint16), 'raw_frame_number': 5024, 'resolution': array([49.8, 33.5], dtype=float32), 's_hvariable__a': 2.655, 's_hvariable__b': -800.7, 's_hvariable__c': 1940000.0, 'scan_speed': 800, 'website': 'nist.gov/el/lpbf-thermography/3D-part-builds/OverhangPart-IN625'}\n"
|
100 |
+
]
|
101 |
+
}
|
102 |
+
],
|
103 |
+
"source": [
|
104 |
+
"frames_list = []\n",
|
105 |
+
"for frame_index, frame in enumerate(frames):\n",
|
106 |
+
" frame_dict = {}\n",
|
107 |
+
" for key, value in layer_transposed.items():\n",
|
108 |
+
" if (key in [\"radiant_temp\", \"build_time\", \"raw_frame_number\"]):\n",
|
109 |
+
" frame_dict[key] = value[frame_index]\n",
|
110 |
+
" else:\n",
|
111 |
+
" frame_dict[key] = value\n",
|
112 |
+
" \n",
|
113 |
+
" frames_list.append(frame_dict)\n",
|
114 |
+
"\n",
|
115 |
+
"print(frames_list[0])"
|
116 |
+
]
|
117 |
+
},
|
118 |
+
{
|
119 |
+
"cell_type": "code",
|
120 |
+
"execution_count": 67,
|
121 |
+
"metadata": {},
|
122 |
+
"outputs": [],
|
123 |
+
"source": [
|
124 |
+
"dataset = Dataset.from_list(frames_list)"
|
125 |
+
]
|
126 |
+
},
|
127 |
+
{
|
128 |
+
"cell_type": "code",
|
129 |
+
"execution_count": 69,
|
130 |
+
"metadata": {},
|
131 |
+
"outputs": [
|
132 |
+
{
|
133 |
+
"name": "stderr",
|
134 |
+
"output_type": "stream",
|
135 |
+
"text": [
|
136 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 1/1 [00:00<00:00, 1.45ba/s]\n",
|
137 |
+
"Uploading the dataset shards: 100%|ββββββββββ| 1/1 [00:01<00:00, 1.33s/it]\n"
|
138 |
+
]
|
139 |
+
},
|
140 |
+
{
|
141 |
+
"data": {
|
142 |
+
"text/plain": [
|
143 |
+
"CommitInfo(commit_url='https://huggingface.co/datasets/ppak10/Melt-Pool-Thermal-Images/commit/7012f060899646abef1d05262df89ba69167e4b7', commit_message='Upload dataset', commit_description='', oid='7012f060899646abef1d05262df89ba69167e4b7', pr_url=None, pr_revision=None, pr_num=None)"
|
144 |
+
]
|
145 |
+
},
|
146 |
+
"execution_count": 69,
|
147 |
+
"metadata": {},
|
148 |
+
"output_type": "execute_result"
|
149 |
+
}
|
150 |
+
],
|
151 |
+
"source": [
|
152 |
+
"dataset.push_to_hub(\n",
|
153 |
+
" \"ppak10/Melt-Pool-Thermal-Images\",\n",
|
154 |
+
" config_name = \"nist_overhangs_layer_1\",\n",
|
155 |
+
")"
|
156 |
+
]
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"cell_type": "code",
|
160 |
+
"execution_count": null,
|
161 |
+
"metadata": {},
|
162 |
+
"outputs": [],
|
163 |
+
"source": [
|
164 |
+
"print(layer)"
|
165 |
+
]
|
166 |
+
},
|
167 |
+
{
|
168 |
+
"cell_type": "code",
|
169 |
+
"execution_count": null,
|
170 |
+
"metadata": {},
|
171 |
+
"outputs": [],
|
172 |
+
"source": []
|
173 |
+
}
|
174 |
+
],
|
175 |
+
"metadata": {
|
176 |
+
"kernelspec": {
|
177 |
+
"display_name": "venv",
|
178 |
+
"language": "python",
|
179 |
+
"name": "python3"
|
180 |
+
},
|
181 |
+
"language_info": {
|
182 |
+
"codemirror_mode": {
|
183 |
+
"name": "ipython",
|
184 |
+
"version": 3
|
185 |
+
},
|
186 |
+
"file_extension": ".py",
|
187 |
+
"mimetype": "text/x-python",
|
188 |
+
"name": "python",
|
189 |
+
"nbconvert_exporter": "python",
|
190 |
+
"pygments_lexer": "ipython3",
|
191 |
+
"version": "3.12.3"
|
192 |
+
}
|
193 |
+
},
|
194 |
+
"nbformat": 4,
|
195 |
+
"nbformat_minor": 2
|
196 |
+
}
|
upload/upload_part_section_frames.ipynb
ADDED
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 1,
|
6 |
+
"metadata": {},
|
7 |
+
"outputs": [
|
8 |
+
{
|
9 |
+
"name": "stderr",
|
10 |
+
"output_type": "stream",
|
11 |
+
"text": [
|
12 |
+
"/media/ppak/Storage/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
13 |
+
" from .autonotebook import tqdm as notebook_tqdm\n"
|
14 |
+
]
|
15 |
+
}
|
16 |
+
],
|
17 |
+
"source": [
|
18 |
+
"import numpy as np\n",
|
19 |
+
"import os\n",
|
20 |
+
"import pickle\n",
|
21 |
+
"import re\n",
|
22 |
+
"\n",
|
23 |
+
"from datasets import Dataset, concatenate_datasets\n",
|
24 |
+
"from tqdm import tqdm"
|
25 |
+
]
|
26 |
+
},
|
27 |
+
{
|
28 |
+
"cell_type": "code",
|
29 |
+
"execution_count": 2,
|
30 |
+
"metadata": {},
|
31 |
+
"outputs": [],
|
32 |
+
"source": [
|
33 |
+
"data_dir = \"../../NIST-In-Situ-IN625-LPBF-Overhangs/layer/base/\""
|
34 |
+
]
|
35 |
+
},
|
36 |
+
{
|
37 |
+
"cell_type": "code",
|
38 |
+
"execution_count": 3,
|
39 |
+
"metadata": {},
|
40 |
+
"outputs": [],
|
41 |
+
"source": [
|
42 |
+
"def numerical_sort(value):\n",
|
43 |
+
" # Extract numerical part from the string\n",
|
44 |
+
" numbers = re.compile(r'(\\d+)')\n",
|
45 |
+
" parts = numbers.split(value)\n",
|
46 |
+
" parts[1::2] = map(int, parts[1::2])\n",
|
47 |
+
" return parts"
|
48 |
+
]
|
49 |
+
},
|
50 |
+
{
|
51 |
+
"cell_type": "code",
|
52 |
+
"execution_count": 4,
|
53 |
+
"metadata": {},
|
54 |
+
"outputs": [
|
55 |
+
{
|
56 |
+
"name": "stderr",
|
57 |
+
"output_type": "stream",
|
58 |
+
"text": [
|
59 |
+
"100%|ββββββββββ| 99/99 [03:09<00:00, 1.91s/it]\n"
|
60 |
+
]
|
61 |
+
}
|
62 |
+
],
|
63 |
+
"source": [
|
64 |
+
"dataset = None\n",
|
65 |
+
"layer_frames_list = []\n",
|
66 |
+
"# for layer_file in sorted(os.listdir(data_dir), key=numerical_sort)[0:2]:\n",
|
67 |
+
"for layer_file in tqdm(sorted(os.listdir(data_dir), key=numerical_sort)):\n",
|
68 |
+
" with open(f\"{data_dir}/{layer_file}\", \"rb\") as f:\n",
|
69 |
+
" layer = pickle.load(f)\n",
|
70 |
+
"\n",
|
71 |
+
" # Transpose specific layer values\n",
|
72 |
+
" layer_transposed = {}\n",
|
73 |
+
" for key, value in layer.items():\n",
|
74 |
+
" if (key in [\"build_time\", \"raw_frame_number\"]):\n",
|
75 |
+
" # print(key, value.shape)\n",
|
76 |
+
" value_transposed = value.transpose(1, 0)\n",
|
77 |
+
" # print(key, value_transposed.shape)\n",
|
78 |
+
" if (key == \"raw_frame_number\"):\n",
|
79 |
+
" layer_transposed[key] = value_transposed.flatten()\n",
|
80 |
+
" else:\n",
|
81 |
+
" layer_transposed[key] = value_transposed\n",
|
82 |
+
" elif (key == \"resolution\"):\n",
|
83 |
+
" # print(key, value)\n",
|
84 |
+
" value_flatten = value.flatten()\n",
|
85 |
+
" # print(key, value_flatten)\n",
|
86 |
+
" layer_transposed[key] = value_flatten\n",
|
87 |
+
" else:\n",
|
88 |
+
" layer_transposed[key] = value\n",
|
89 |
+
"\n",
|
90 |
+
" frames_list = []\n",
|
91 |
+
" for frame_index, frame in enumerate(layer_transposed[\"radiant_temp\"]):\n",
|
92 |
+
" frame_dict = {}\n",
|
93 |
+
" frame_dict[\"frame_index\"] = frame_index\n",
|
94 |
+
" for key, value in layer_transposed.items():\n",
|
95 |
+
" if (key in [\"radiant_temp\", \"build_time\", \"raw_frame_number\"]):\n",
|
96 |
+
" frame_dict[key] = value[frame_index]\n",
|
97 |
+
" else:\n",
|
98 |
+
" frame_dict[key] = value\n",
|
99 |
+
" \n",
|
100 |
+
" frames_list.append(frame_dict)\n",
|
101 |
+
" \n",
|
102 |
+
" layer_dataset = Dataset.from_list(frames_list)\n",
|
103 |
+
" if dataset == None:\n",
|
104 |
+
" dataset = layer_dataset\n",
|
105 |
+
" else:\n",
|
106 |
+
" dataset = concatenate_datasets([dataset, layer_dataset])\n",
|
107 |
+
"\n",
|
108 |
+
" # layer_frames_list += frames_list"
|
109 |
+
]
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"cell_type": "code",
|
113 |
+
"execution_count": 5,
|
114 |
+
"metadata": {},
|
115 |
+
"outputs": [
|
116 |
+
{
|
117 |
+
"name": "stderr",
|
118 |
+
"output_type": "stream",
|
119 |
+
"text": [
|
120 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:07<00:00, 1.25s/ba]\n",
|
121 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:07<00:00, 1.18s/ba]\n",
|
122 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:07<00:00, 1.25s/ba]\n",
|
123 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:07<00:00, 1.30s/ba]\n",
|
124 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:08<00:00, 1.34s/ba]\n",
|
125 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:06<00:00, 1.10s/ba]\n",
|
126 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:09<00:00, 1.52s/ba]\n",
|
127 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:07<00:00, 1.32s/ba]\n",
|
128 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:06<00:00, 1.03s/ba]\n",
|
129 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:06<00:00, 1.04s/ba]\n",
|
130 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:07<00:00, 1.25s/ba]\n",
|
131 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:07<00:00, 1.22s/ba]\n",
|
132 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:08<00:00, 1.45s/ba]\n",
|
133 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:10<00:00, 1.71s/ba]\n",
|
134 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:07<00:00, 1.17s/ba]\n",
|
135 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:08<00:00, 1.37s/ba]\n",
|
136 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:07<00:00, 1.26s/ba]\n",
|
137 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:08<00:00, 1.46s/ba]\n",
|
138 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:06<00:00, 1.08s/ba]\n",
|
139 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:07<00:00, 1.32s/ba]\n",
|
140 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:09<00:00, 1.61s/ba]\n",
|
141 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:08<00:00, 1.37s/ba]\n",
|
142 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:07<00:00, 1.28s/ba]\n",
|
143 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:08<00:00, 1.43s/ba]\n",
|
144 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:09<00:00, 1.65s/ba]\n",
|
145 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:08<00:00, 1.49s/ba]\n",
|
146 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:09<00:00, 1.57s/ba]\n",
|
147 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:06<00:00, 1.12s/ba]\n",
|
148 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:07<00:00, 1.25s/ba]\n",
|
149 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:07<00:00, 1.31s/ba]\n",
|
150 |
+
"Creating parquet from Arrow format: 100%|ββββββββββ| 6/6 [00:07<00:00, 1.18s/ba]\n",
|
151 |
+
"Uploading the dataset shards: 100%|ββββββββββ| 31/31 [05:00<00:00, 9.70s/it]\n"
|
152 |
+
]
|
153 |
+
},
|
154 |
+
{
|
155 |
+
"data": {
|
156 |
+
"text/plain": [
|
157 |
+
"CommitInfo(commit_url='https://huggingface.co/datasets/ppak10/Melt-Pool-Thermal-Images/commit/3252ef78aceb07c7bbea05c43eda0f5c7070f869', commit_message='Upload dataset', commit_description='', oid='3252ef78aceb07c7bbea05c43eda0f5c7070f869', pr_url=None, pr_revision=None, pr_num=None)"
|
158 |
+
]
|
159 |
+
},
|
160 |
+
"execution_count": 5,
|
161 |
+
"metadata": {},
|
162 |
+
"output_type": "execute_result"
|
163 |
+
}
|
164 |
+
],
|
165 |
+
"source": [
|
166 |
+
"dataset.push_to_hub(\n",
|
167 |
+
" \"ppak10/Melt-Pool-Thermal-Images\",\n",
|
168 |
+
" config_name = \"nist_overhangs_base_frame_data\",\n",
|
169 |
+
")"
|
170 |
+
]
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"cell_type": "code",
|
174 |
+
"execution_count": null,
|
175 |
+
"metadata": {},
|
176 |
+
"outputs": [],
|
177 |
+
"source": []
|
178 |
+
}
|
179 |
+
],
|
180 |
+
"metadata": {
|
181 |
+
"kernelspec": {
|
182 |
+
"display_name": "venv",
|
183 |
+
"language": "python",
|
184 |
+
"name": "python3"
|
185 |
+
},
|
186 |
+
"language_info": {
|
187 |
+
"codemirror_mode": {
|
188 |
+
"name": "ipython",
|
189 |
+
"version": 3
|
190 |
+
},
|
191 |
+
"file_extension": ".py",
|
192 |
+
"mimetype": "text/x-python",
|
193 |
+
"name": "python",
|
194 |
+
"nbconvert_exporter": "python",
|
195 |
+
"pygments_lexer": "ipython3",
|
196 |
+
"version": "3.12.3"
|
197 |
+
}
|
198 |
+
},
|
199 |
+
"nbformat": 4,
|
200 |
+
"nbformat_minor": 2
|
201 |
+
}
|