LeoZhangzaolin
commited on
Commit
•
7f97f62
1
Parent(s):
3c33491
Update Graptolodiea-Speciemens_Imaging.py
Browse files
Graptolodiea-Speciemens_Imaging.py
CHANGED
@@ -1,7 +1,11 @@
|
|
1 |
"""Graptoloidea Specimens dataset."""
|
2 |
|
3 |
-
import
|
4 |
-
import
|
|
|
|
|
|
|
|
|
5 |
|
6 |
_CITATION = """\
|
7 |
|
@@ -28,9 +32,9 @@ _HOMEPAGE = "https://zenodo.org/records/6194943"
|
|
28 |
_license = ""
|
29 |
|
30 |
|
31 |
-
_URL = ""
|
32 |
_URLS = {
|
33 |
-
|
|
|
34 |
}
|
35 |
|
36 |
|
@@ -60,7 +64,32 @@ class GraptoloideaSpecimensDataset(datasets.GeneratorBasedBuilder):
|
|
60 |
citation=_CITATION,
|
61 |
)
|
62 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
63 |
|
|
|
|
|
64 |
|
65 |
|
66 |
|
|
|
1 |
"""Graptoloidea Specimens dataset."""
|
2 |
|
3 |
+
import os
|
4 |
+
import random
|
5 |
+
from typing import List
|
6 |
+
import datasets
|
7 |
+
import pandas as pd
|
8 |
+
import numpy as np
|
9 |
|
10 |
_CITATION = """\
|
11 |
|
|
|
32 |
_license = ""
|
33 |
|
34 |
|
|
|
35 |
_URLS = {
|
36 |
+
"part1": "https://zenodo.org/records/6194943/files/graptolite%20specimens%20with%20scale.zip.001?download=1",
|
37 |
+
"part2": "https://zenodo.org/records/6194943/files/graptolite%20specimens%20with%20scale.zip.002?download=1",
|
38 |
}
|
39 |
|
40 |
|
|
|
64 |
citation=_CITATION,
|
65 |
)
|
66 |
|
67 |
+
def _split_generators(self, dl_manager: datasets.DownloadManager) -> List[datasets.SplitGenerator]:
|
68 |
+
downloaded_files = dl_manager.download(_URLS)
|
69 |
+
combined_zip_path = os.path.join(dl_manager.manual_dir, 'combined.zip')
|
70 |
+
with open(combined_zip_path, 'wb') as f_out:
|
71 |
+
for part in ['part1', 'part2']:
|
72 |
+
with open(downloaded_files[part], 'rb') as f_in:
|
73 |
+
f_out.write(f_in.read())
|
74 |
+
with zipfile.ZipFile(combined_zip_path, 'r') as zip_ref:
|
75 |
+
zip_ref.extractall(dl_manager.manual_dir)
|
76 |
+
all_files = [os.path.join(dl_manager.manual_dir, f) for f in os.listdir(dl_manager.manual_dir)
|
77 |
+
if os.path.isfile(os.path.join(dl_manager.manual_dir, f))]
|
78 |
+
random.shuffle(all_files)
|
79 |
+
split_index = int(0.8 * len(all_files))
|
80 |
+
train_files = all_files[:split_index]
|
81 |
+
validation_files = all_files[split_index:]
|
82 |
+
return [
|
83 |
+
datasets.SplitGenerator(
|
84 |
+
name=datasets.Split.TRAIN,
|
85 |
+
gen_kwargs={"filepath": train_files}),
|
86 |
+
datasets.SplitGenerator(
|
87 |
+
name=datasets.Split.VALIDATION,
|
88 |
+
gen_kwargs={"filepath": validation_files}),
|
89 |
+
]
|
90 |
|
91 |
+
|
92 |
+
|
93 |
|
94 |
|
95 |
|