Datasets:
Tasks:
Image Classification
Sub-tasks:
multi-class-classification
Languages:
English
Size:
10K<n<100K
License:
Francisco Castillo
commited on
Commit
•
23d78a5
1
Parent(s):
ce2244b
wip
Browse files
fashion_mnist_label_drift.py
CHANGED
@@ -129,6 +129,7 @@ class FashionMNISTLabelDrift(datasets.GeneratorBasedBuilder):
|
|
129 |
# dl_manager is a datasets.download.DownloadManager that can be used to download and extract URLS
|
130 |
# It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
|
131 |
# By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
|
|
|
132 |
extracted_paths = dl_manager.download_and_extract(_URLS)
|
133 |
print(extracted_paths)
|
134 |
return [
|
@@ -160,6 +161,7 @@ class FashionMNISTLabelDrift(datasets.GeneratorBasedBuilder):
|
|
160 |
def _generate_examples(self, filepath):
|
161 |
# This method handles input defined in _split_generators to yield (key, example) tuples from the dataset.
|
162 |
# The `key` is for legacy reasons (tfds) and is not important in itself, but must be unique for each example.
|
|
|
163 |
with open(filepath, 'rb') as pkl_file:
|
164 |
data = pickle.load(pkl_file, encoding='bytes')
|
165 |
prediction_ts=data['prediction_ts']
|
|
|
129 |
# dl_manager is a datasets.download.DownloadManager that can be used to download and extract URLS
|
130 |
# It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
|
131 |
# By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
|
132 |
+
print("EXTRACTED PATHS=",extracted_paths)
|
133 |
extracted_paths = dl_manager.download_and_extract(_URLS)
|
134 |
print(extracted_paths)
|
135 |
return [
|
|
|
161 |
def _generate_examples(self, filepath):
|
162 |
# This method handles input defined in _split_generators to yield (key, example) tuples from the dataset.
|
163 |
# The `key` is for legacy reasons (tfds) and is not important in itself, but must be unique for each example.
|
164 |
+
print("FILEPATH=",filepath)
|
165 |
with open(filepath, 'rb') as pkl_file:
|
166 |
data = pickle.load(pkl_file, encoding='bytes')
|
167 |
prediction_ts=data['prediction_ts']
|