Meehai commited on
Commit
384ca1b
1 Parent(s): 6d95ae3

semantic mapper script and multitask reader fixes

Browse files
dronescapes_reader/multitask_dataset.py CHANGED
@@ -107,7 +107,10 @@ class MultiTaskDataset(Dataset):
107
  if self._statistics is not None:
108
  for task_name, task in self.name_to_task.items():
109
  if not task.is_classification:
110
- task.set_normalization(self.normalization[task_name], self._statistics[task_name])
 
 
 
111
 
112
  # Public methods and properties
113
 
@@ -264,7 +267,7 @@ class MultiTaskDataset(Dataset):
264
  if self.cache_task_stats and cache_path.exists():
265
  res = np.load(cache_path, allow_pickle=True)["arr_0"].item()
266
  logger.info(f"Loaded task statistics: { {k: tuple(v[0].shape) for k, v in res.items()} } from {cache_path}")
267
- missing_tasks = list(set(self.task_names).difference(res.keys()))
268
  if len(missing_tasks) == 0:
269
  return res
270
  logger.info(f"Computing global task statistics (dataset len {len(self)}) for {missing_tasks}")
@@ -287,27 +290,25 @@ class MultiTaskDataset(Dataset):
287
  assert not new_mean.isnan().any() and not new_M2.isnan().any(), (mean, new_mean, counts, counts_delta)
288
  return new_count, new_mean, new_M2
289
 
290
- missing_tasks_no_classif = [t for t in missing_tasks if not self.name_to_task[t].is_classification]
 
291
  ch = {k: v[-1] if len(v) == 3 else 1 for k, v in self.data_shape.items()}
292
- counts = {task_name: tr.zeros(ch[task_name]).long() for task_name in missing_tasks_no_classif}
293
- mins = {task_name: tr.zeros(ch[task_name]).type(tr.float64) + 10**10 for task_name in missing_tasks_no_classif}
294
- maxs = {task_name: tr.zeros(ch[task_name]).type(tr.float64) - 10**10 for task_name in missing_tasks_no_classif}
295
- means_vec = {task_name: tr.zeros(ch[task_name]).type(tr.float64) for task_name in missing_tasks_no_classif}
296
- M2s_vec = {task_name: tr.zeros(ch[task_name]).type(tr.float64) for task_name in missing_tasks_no_classif}
297
 
298
  old_names, old_normalization = self.task_names, self.normalization
299
- self.task_names, self.normalization = missing_tasks_no_classif, None # for self[ix]
300
-
301
- if len(missing_tasks_no_classif) == 0:
302
- return {}
303
-
304
  res = {}
305
  BS = min(len(self), self.batch_size_stats)
306
  n = (len(self) // BS) + (len(self) % BS != 0)
 
307
  logger.debug(f"Global task statistics. Batch size: {BS}. N iterations: {n}.")
308
  for ix in trange(n, disable=os.getenv("STATS_PBAR", "0") == "0", desc="Computing stats"):
309
  item = self[ix * BS: min(len(self), (ix + 1) * BS)][0]
310
- for task in missing_tasks_no_classif:
311
  item_flat_ch = item[task].reshape(-1, ch[task])
312
  item_no_nan = item_flat_ch.nan_to_num(0).type(tr.float64)
313
  mins[task] = tr.minimum(mins[task], item_no_nan.min(0)[0])
@@ -316,7 +317,7 @@ class MultiTaskDataset(Dataset):
316
  counts[task], means_vec[task], M2s_vec[task] = \
317
  update(counts[task], counts_delta, means_vec[task], M2s_vec[task], item_no_nan)
318
 
319
- for task in missing_tasks_no_classif:
320
  res[task] = (mins[task], maxs[task], means_vec[task], (M2s_vec[task] / counts[task]).sqrt())
321
  assert not any(x[0].isnan().any() for x in res[task]), (task, res[task])
322
  self.task_names, self.normalization = old_names, old_normalization
 
107
  if self._statistics is not None:
108
  for task_name, task in self.name_to_task.items():
109
  if not task.is_classification:
110
+ try:
111
+ task.set_normalization(self.normalization[task_name], self._statistics[task_name])
112
+ except:
113
+ breakpoint()
114
 
115
  # Public methods and properties
116
 
 
267
  if self.cache_task_stats and cache_path.exists():
268
  res = np.load(cache_path, allow_pickle=True)["arr_0"].item()
269
  logger.info(f"Loaded task statistics: { {k: tuple(v[0].shape) for k, v in res.items()} } from {cache_path}")
270
+ missing_tasks = [t for t in set(self.task_names).difference(res) if not self.name_to_task[t].is_classification]
271
  if len(missing_tasks) == 0:
272
  return res
273
  logger.info(f"Computing global task statistics (dataset len {len(self)}) for {missing_tasks}")
 
290
  assert not new_mean.isnan().any() and not new_M2.isnan().any(), (mean, new_mean, counts, counts_delta)
291
  return new_count, new_mean, new_M2
292
 
293
+ assert not any(mt := [self.name_to_task[t].is_classification for t in missing_tasks]), mt
294
+ assert len(missing_tasks) > 0, missing_tasks
295
  ch = {k: v[-1] if len(v) == 3 else 1 for k, v in self.data_shape.items()}
296
+ counts = {task_name: tr.zeros(ch[task_name]).long() for task_name in missing_tasks}
297
+ mins = {task_name: tr.zeros(ch[task_name]).type(tr.float64) + 10**10 for task_name in missing_tasks}
298
+ maxs = {task_name: tr.zeros(ch[task_name]).type(tr.float64) - 10**10 for task_name in missing_tasks}
299
+ means_vec = {task_name: tr.zeros(ch[task_name]).type(tr.float64) for task_name in missing_tasks}
300
+ M2s_vec = {task_name: tr.zeros(ch[task_name]).type(tr.float64) for task_name in missing_tasks}
301
 
302
  old_names, old_normalization = self.task_names, self.normalization
303
+ self.task_names, self.normalization = missing_tasks, None # for self[ix]
 
 
 
 
304
  res = {}
305
  BS = min(len(self), self.batch_size_stats)
306
  n = (len(self) // BS) + (len(self) % BS != 0)
307
+
308
  logger.debug(f"Global task statistics. Batch size: {BS}. N iterations: {n}.")
309
  for ix in trange(n, disable=os.getenv("STATS_PBAR", "0") == "0", desc="Computing stats"):
310
  item = self[ix * BS: min(len(self), (ix + 1) * BS)][0]
311
+ for task in missing_tasks:
312
  item_flat_ch = item[task].reshape(-1, ch[task])
313
  item_no_nan = item_flat_ch.nan_to_num(0).type(tr.float64)
314
  mins[task] = tr.minimum(mins[task], item_no_nan.min(0)[0])
 
317
  counts[task], means_vec[task], M2s_vec[task] = \
318
  update(counts[task], counts_delta, means_vec[task], M2s_vec[task], item_no_nan)
319
 
320
+ for task in missing_tasks:
321
  res[task] = (mins[task], maxs[task], means_vec[task], (M2s_vec[task] / counts[task]).sqrt())
322
  assert not any(x[0].isnan().any() for x in res[task]), (task, res[task])
323
  self.task_names, self.normalization = old_names, old_normalization
scripts/dronescapes_viewer.ipynb CHANGED
The diff for this file is too large to render. See raw diff
 
scripts/semantic_mapper.ipynb ADDED
The diff for this file is too large to render. See raw diff