Spatial Analysis Datasets

#5
by srivarra - opened
ark_example.py CHANGED
@@ -57,8 +57,6 @@ _LICENSE = "https://github.com/angelolab/ark-analysis/blob/main/LICENSE"
57
 
58
  # The HuggingFace Datasets library doesn't host the datasets but only points to the original files.
59
  # This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
60
- # _URL_REPO = "https://huggingface.co/datasets/angelolab/ark_example"
61
-
62
 
63
  _URL_DATA = {
64
  "image_data": "./data/image_data.zip",
@@ -66,6 +64,8 @@ _URL_DATA = {
66
  "deepcell_output": "./data/segmentation/deepcell_output.zip",
67
  "example_pixel_output_dir": "./data/pixie/example_pixel_output_dir.zip",
68
  "example_cell_output_dir": "./data/pixie/example_cell_output_dir.zip",
 
 
69
  }
70
 
71
  _URL_DATASET_CONFIGS = {
@@ -87,6 +87,29 @@ _URL_DATASET_CONFIGS = {
87
  "deepcell_output": _URL_DATA["deepcell_output"],
88
  "example_cell_output_dir": _URL_DATA["example_cell_output_dir"],
89
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
90
  }
91
 
92
 
@@ -94,15 +117,7 @@ _URL_DATASET_CONFIGS = {
94
  class ArkExample(datasets.GeneratorBasedBuilder):
95
  """The Dataset consists of 11 FOVs"""
96
 
97
- VERSION = datasets.Version("0.0.3")
98
-
99
- # This is an example of a dataset with multiple configurations.
100
- # If you don't want/need to define several sub-sets in your dataset,
101
- # just remove the BUILDER_CONFIG_CLASS and the BUILDER_CONFIGS attributes.
102
-
103
- # If you need to make complex sub-parts in the datasets with configurable options
104
- # You can create your own builder configuration class to store attribute, inheriting from datasets.BuilderConfig
105
- # BUILDER_CONFIG_CLASS = MyBuilderConfig
106
 
107
  # You will be able to load one or the other configurations in the following list with
108
  BUILDER_CONFIGS = [
@@ -126,6 +141,31 @@ class ArkExample(datasets.GeneratorBasedBuilder):
126
  version=VERSION,
127
  description="This configuration contains data used by notebook 4 - Post Clustering.",
128
  ),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
129
  ]
130
 
131
  def _info(self):
@@ -135,6 +175,11 @@ class ArkExample(datasets.GeneratorBasedBuilder):
135
  "cluster_pixels",
136
  "cluster_cells",
137
  "post_clustering",
 
 
 
 
 
138
  ]:
139
  features = datasets.Features(
140
  {f: datasets.Value("string") for f in _URL_DATASET_CONFIGS[self.config.name].keys()}
 
57
 
58
  # The HuggingFace Datasets library doesn't host the datasets but only points to the original files.
59
  # This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
 
 
60
 
61
  _URL_DATA = {
62
  "image_data": "./data/image_data.zip",
 
64
  "deepcell_output": "./data/segmentation/deepcell_output.zip",
65
  "example_pixel_output_dir": "./data/pixie/example_pixel_output_dir.zip",
66
  "example_cell_output_dir": "./data/pixie/example_cell_output_dir.zip",
67
+ "spatial_lda": "./data/spatial_analysis/spatial_lda.zip",
68
+ "post_clustering": "./data/post_clustering.zip"
69
  }
70
 
71
  _URL_DATASET_CONFIGS = {
 
87
  "deepcell_output": _URL_DATA["deepcell_output"],
88
  "example_cell_output_dir": _URL_DATA["example_cell_output_dir"],
89
  },
90
+ "fiber_segmentation": {
91
+ "image_data": _URL_DATA["image_data"],
92
+ },
93
+ "LDA_preprocessing": {
94
+ "image_data": _URL_DATA["image_data"],
95
+ "cell_table": _URL_DATA["cell_table"],
96
+ },
97
+ "LDA_training_inference": {
98
+ "image_data": _URL_DATA["image_data"],
99
+ "cell_table": _URL_DATA["cell_table"],
100
+ "spatial_lda": _URL_DATA["spatial_lda"],
101
+ },
102
+ "neighborhood_analysis": {
103
+ "image_data": _URL_DATA["image_data"],
104
+ "cell_table": _URL_DATA["cell_table"],
105
+ "deepcell_output": _URL_DATA["deepcell_output"],
106
+ },
107
+ "pairwise_spatial_enrichment": {
108
+ "image_data": _URL_DATA["image_data"],
109
+ "cell_table": _URL_DATA["cell_table"],
110
+ "deepcell_output": _URL_DATA["deepcell_output"],
111
+ "post_clustering": _URL_DATA["post_clustering"],
112
+ }
113
  }
114
 
115
 
 
117
  class ArkExample(datasets.GeneratorBasedBuilder):
118
  """The Dataset consists of 11 FOVs"""
119
 
120
+ VERSION = datasets.Version("0.0.4")
 
 
 
 
 
 
 
 
121
 
122
  # You will be able to load one or the other configurations in the following list with
123
  BUILDER_CONFIGS = [
 
141
  version=VERSION,
142
  description="This configuration contains data used by notebook 4 - Post Clustering.",
143
  ),
144
+ datasets.BuilderConfig(
145
+ name="fiber_segmentation",
146
+ version=VERSION,
147
+ description="This configuration contains data used by the Fiber Segmentation Notebook.",
148
+ ),
149
+ datasets.BuilderConfig(
150
+ name="LDA_preprocessing",
151
+ version=VERSION,
152
+ description="This configuration contains data used by the Spatial LDA - Preprocessing Notebook."
153
+ ),
154
+ datasets.BuilderConfig(
155
+ name="LDA_training_inference",
156
+ version=VERSION,
157
+ description="This configuration contains data used by the Spatial LDA - Training and Inference Notebook."
158
+ ),
159
+ datasets.BuilderConfig(
160
+ name="neighborhood_analysis",
161
+ version=VERSION,
162
+ description="This configuration contains data used by the Neighborhood Analysis Notebook."
163
+ ),
164
+ datasets.BuilderConfig(
165
+ name="pairwise_spatial_enrichment",
166
+ version=VERSION,
167
+ description="This configuration contains data used by the Pairwise Spatial Enrichment Notebook."
168
+ )
169
  ]
170
 
171
  def _info(self):
 
175
  "cluster_pixels",
176
  "cluster_cells",
177
  "post_clustering",
178
+ "fiber_segmentation",
179
+ "LDA_preprocessing",
180
+ "LDA_training_inference",
181
+ "neighborhood_analysis",
182
+ "pairwise_spatial_enrichment",
183
  ]:
184
  features = datasets.Features(
185
  {f: datasets.Value("string") for f in _URL_DATASET_CONFIGS[self.config.name].keys()}
data/post_clustering.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:639326276833f53e4e831b72c835ea9b2376a964b04504f5d6629f4439a46883
3
+ size 9719468
data/spatial_analysis/spatial_lda.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7028b6b132fa51eee9ebb99f9f79106e9a6f1505cd92604cc1a6dce6f99aee7a
3
+ size 3486371