srivarra commited on
Commit
ab31001
1 Parent(s): 453973a

added datasets for nb2

Browse files
ark_example.py CHANGED
@@ -17,15 +17,10 @@ This dataset contains example data for running through the multiplexed imaging d
17
  Ark Analysis: https://github.com/angelolab/ark-analysis
18
  """
19
 
20
- import json
21
  import os
22
-
23
  import datasets
24
  import pathlib
25
  import glob
26
- import tifffile
27
- import xarray as xr
28
- import numpy as np
29
 
30
  # Find for instance the citation on arxiv or on the dataset repo/website
31
  _CITATION = """\
@@ -52,7 +47,20 @@ _LICENSE = "https://github.com/angelolab/ark-analysis/blob/main/LICENSE"
52
  _URL_REPO = "https://huggingface.co/datasets/angelolab/ark_example/resolve/main"
53
 
54
 
55
- _URLS = {"base_dataset": f"{_URL_REPO}/data/input_data.zip"}
 
 
 
 
 
 
 
 
 
 
 
 
 
56
 
57
  """
58
  Dataset Fov renaming:
@@ -74,7 +82,7 @@ TMA24_R9C1 -> fov10
74
  class ArkExample(datasets.GeneratorBasedBuilder):
75
  """The Dataset consists of 11 FOVs"""
76
 
77
- VERSION = datasets.Version("0.0.1")
78
 
79
  # This is an example of a dataset with multiple configurations.
80
  # If you don't want/need to define several sub-sets in your dataset,
@@ -85,45 +93,30 @@ class ArkExample(datasets.GeneratorBasedBuilder):
85
  # BUILDER_CONFIG_CLASS = MyBuilderConfig
86
 
87
  # You will be able to load one or the other configurations in the following list with
88
- # data = datasets.load_dataset('my_dataset', 'base_dataset')
89
- # data = datasets.load_dataset('my_dataset', 'dev_dataset')
90
  BUILDER_CONFIGS = [
91
  datasets.BuilderConfig(
92
- name="base_dataset",
93
  version=VERSION,
94
- description="This dataset contains only the 12 FOVs.",
95
  ),
96
  datasets.BuilderConfig(
97
- name="dev_dataset",
98
  version=VERSION,
99
- description="This dataset is a superset of the base_dataset, and contains intermediate data for all notebooks. \
100
  Therefore you can start at any notebook with this dataset.",
101
  ),
102
  ]
103
 
104
- DEFAULT_CONFIG_NAME = (
105
- "base_dataset" # It's not mandatory to have a default configuration. Just use one if it make sense.
106
- )
107
-
108
  def _info(self):
109
  # This is the name of the configuration selected in BUILDER_CONFIGS above
110
- if self.config.name == "base_dataset":
111
- features = datasets.Features(
112
- {
113
- "Channel Data": datasets.Sequence(datasets.Image()),
114
- "Channel Names": datasets.Sequence(datasets.Value("string")),
115
- "Data Path": datasets.Value("string"),
116
- }
117
- )
118
- else: # This is an example to show how to have different features for "first_domain" and "second_domain"
119
- features = datasets.Features(
120
- {
121
- "sentence": datasets.Value("string"),
122
- "option2": datasets.Value("string"),
123
- "second_domain_answer": datasets.Value("string")
124
- # These are the features of your dataset like images, labels ...
125
- }
126
- )
127
  return datasets.DatasetInfo(
128
  # This is the description that will appear on the datasets page.
129
  description=_DESCRIPTION,
@@ -147,12 +140,12 @@ class ArkExample(datasets.GeneratorBasedBuilder):
147
  # dl_manager is a datasets.download.DownloadManager that can be used to download and extract URLS
148
  # It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
149
  # By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
150
- urls = _URLS[self.config.name]
151
  data_dir = dl_manager.download_and_extract(urls)
152
 
153
  return [
154
  datasets.SplitGenerator(
155
- name="base_dataset",
156
  # These kwargs will be passed to _generate_examples
157
  gen_kwargs={"filepath": pathlib.Path(data_dir)},
158
  ),
@@ -167,24 +160,10 @@ class ArkExample(datasets.GeneratorBasedBuilder):
167
  # Loop over all the TMAs
168
  for fp in file_paths:
169
 
170
- # Get the TMA FOV Name
171
- fov_name = fp.stem
172
-
173
- # Get all channels per TMA FOV
174
- channel_paths = fp.glob("*.tiff")
175
-
176
- chan_data = []
177
- chan_names = []
178
- for chan in channel_paths:
179
- chan_name = chan.stem
180
- chan_image: np.ndarray = tifffile.imread(chan)
181
-
182
- chan_data.append(chan_image)
183
- chan_names.append(chan_name)
184
 
185
- if self.config.name == "base_dataset":
186
- yield fov_name, {
187
- "Channel Data": chan_data,
188
- "Channel Names": chan_names,
189
  "Data Path": filepath.as_posix(),
190
  }
 
17
  Ark Analysis: https://github.com/angelolab/ark-analysis
18
  """
19
 
 
20
  import os
 
21
  import datasets
22
  import pathlib
23
  import glob
 
 
 
24
 
25
  # Find for instance the citation on arxiv or on the dataset repo/website
26
  _CITATION = """\
 
47
  _URL_REPO = "https://huggingface.co/datasets/angelolab/ark_example/resolve/main"
48
 
49
 
50
+ _URLS = {
51
+ "input_data": f"{_URL_REPO}/data/input_data.zip",
52
+ "segmentation/cell_table": f"{_URL_REPO}/data/segmentation/cell_table.zip",
53
+ "segmentation/deepcell_output": f"{_URL_REPO}/data/segmentation/deepcell_output.zip",
54
+ }
55
+
56
+ _URL_DATASET_CONFIGS = {
57
+ "nb1": {"input_data": _URLS["input_data"]},
58
+ "nb2": {
59
+ "input_data": _URLS["input_data"],
60
+ "segmentation/cell_table": _URLS["segmentation/cell_table"],
61
+ "segmentation/deepcell_output": _URLS["segmentation/deepcell_output"],
62
+ },
63
+ }
64
 
65
  """
66
  Dataset Fov renaming:
 
82
  class ArkExample(datasets.GeneratorBasedBuilder):
83
  """The Dataset consists of 11 FOVs"""
84
 
85
+ VERSION = datasets.Version("0.0.2")
86
 
87
  # This is an example of a dataset with multiple configurations.
88
  # If you don't want/need to define several sub-sets in your dataset,
 
93
  # BUILDER_CONFIG_CLASS = MyBuilderConfig
94
 
95
  # You will be able to load one or the other configurations in the following list with
96
+ # data = datasets.load_dataset('my_dataset', 'nb1')
97
+ # data = datasets.load_dataset('my_dataset', 'nb2')
98
  BUILDER_CONFIGS = [
99
  datasets.BuilderConfig(
100
+ name="nb1",
101
  version=VERSION,
102
+ description="This dataset contains only the 12 FOVs, and their 22 channels.",
103
  ),
104
  datasets.BuilderConfig(
105
+ name="nb2",
106
  version=VERSION,
107
+ description="This dataset is a superset of the nb1 and contains data from notebook 1 in order to start with notebook 2. \
108
  Therefore you can start at any notebook with this dataset.",
109
  ),
110
  ]
111
 
 
 
 
 
112
  def _info(self):
113
  # This is the name of the configuration selected in BUILDER_CONFIGS above
114
+ if self.config.name == "nb1":
115
+ features = datasets.Features({"Data Path": datasets.Value("string")})
116
+ elif self.config.name == "nb2":
117
+ features = datasets.Features({"Data Path": datasets.Value("string")})
118
+ else:
119
+ features = datasets.Features({"Data Path": datasets.Value("string")})
 
 
 
 
 
 
 
 
 
 
 
120
  return datasets.DatasetInfo(
121
  # This is the description that will appear on the datasets page.
122
  description=_DESCRIPTION,
 
140
  # dl_manager is a datasets.download.DownloadManager that can be used to download and extract URLS
141
  # It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
142
  # By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
143
+ urls = _URL_DATASET_CONFIGS[self.config.name]
144
  data_dir = dl_manager.download_and_extract(urls)
145
 
146
  return [
147
  datasets.SplitGenerator(
148
+ name=self.config.name,
149
  # These kwargs will be passed to _generate_examples
150
  gen_kwargs={"filepath": pathlib.Path(data_dir)},
151
  ),
 
160
  # Loop over all the TMAs
161
  for fp in file_paths:
162
 
163
+ # Get the file Name
164
+ fn = fp.stem
 
 
 
 
 
 
 
 
 
 
 
 
165
 
166
+ if self.config.name == "fovs":
167
+ yield fn, {
 
 
168
  "Data Path": filepath.as_posix(),
169
  }
data/segmentation/cell_table.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a537cf42788feb05b7efe91c4f6867930b99a029ae5a68b374d09c2cca0a34c4
3
+ size 10366535
data/segmentation/deepcell_output.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:06d50ac056dd766909c4d66e7de3305e18e7dfa4326692395353fea30cd2a8d0
3
+ size 916593