afiliot's picture
Add scripts.
b697714
# Copyright (c) Owkin, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# Copyright (c) Owkin, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
import pickle
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
import datasets
import numpy as np
import torch
from sklearn.metrics import roc_auc_score
from torch.utils.data.dataloader import default_collate
def pad_collate_fn(
batch: List[Tuple[torch.Tensor, Any]],
batch_first: bool = True,
max_len: Optional[int] = None,
) -> Tuple[torch.Tensor, torch.BoolTensor, Any]:
"""Pad together sequences of arbitrary lengths.
Add a mask of the padding to the samples that can later be used
to ignore padding in activation functions.
Expected to be used in combination of a torch.utils.datasets.DataLoader.
Expect the sequences to be padded to be the first one in the sample tuples.
Others members will be batched using ``torch.utils.data.dataloader.default_collate``.
Parameters
----------
batch: List[Tuple[torch.Tensor, Any]]
List of tuples (features, Any). Features have shape (N_slides_tiles, F)
with ``N_slides_tiles`` being specific to each slide depending on the
number of extractable tiles in the tissue matter. ``F`` is the feature
extractor output dimension.
batch_first: bool = True
Either return (B, N_TILES, F) or (N_TILES, B, F)
max_len: Optional[int] = None
Pre-defined maximum length for elements inside a batch.
Returns
-------
padded_sequences, masks, Any: Tuple[torch.Tensor, torch.BoolTensor, Any]
- if batch_first: Tuple[(B, N_TILES, F), (B, N_TILES, 1), ...]
- else: Tuple[(N_TILES, B, F), (N_TILES, B, 1), ...]
with N_TILES = max_len if max_len is not None
or N_TILES = max length of the training samples.
"""
# Expect the sequences to be the first one in the sample tuples
sequences = []
others = []
for sample in batch:
sequences.append(sample[0])
others.append(sample[1:])
if max_len is None:
max_len = max([s.size(0) for s in sequences])
trailing_dims = sequences[0].size()[1:]
if batch_first:
padded_dims = (len(sequences), max_len) + trailing_dims
masks_dims = (len(sequences), max_len, 1)
else:
padded_dims = (max_len, len(sequences)) + trailing_dims
masks_dims = (max_len, len(sequences), 1)
padded_sequences = sequences[0].data.new(*padded_dims).fill_(0.0)
masks = torch.ones(*masks_dims, dtype=torch.bool)
for i, tensor in enumerate(sequences):
length = tensor.size(0)
# use index notation to prevent duplicate references to the tensor
if batch_first:
padded_sequences[i, :length, ...] = tensor[:max_len, ...]
masks[i, :length, ...] = False
else:
padded_sequences[:length, i, ...] = tensor[:max_len, ...]
masks[:length, i, ...] = False
# Batching other members of the tuple using default_collate
others = default_collate(others)
return (padded_sequences, masks, *others)
def auc(labels: np.array, logits: np.array) -> float:
"""ROC AUC score for binary classification.
Parameters
----------
labels: np.array
Labels of the outcome.
logits: np.array
Probabilities.
"""
preds = 1.0 / (1.0 + np.exp(-logits))
return roc_auc_score(labels, preds)
def get_cv_metrics(
cv_metrics: List[Dict[str, float]], epoch: int = -1
) -> Dict[str, float]:
"""Get mean and std from cross-validation metrics at a given epoch."""
cv_mean_metrics = {}
metrics_names = cv_metrics[0].keys()
for m_name in metrics_names:
values = [fold_metrics[m_name][epoch] for fold_metrics in cv_metrics]
mean_metric, std_metric = np.mean(values), np.std(values)
cv_mean_metrics[m_name] = f"{mean_metric:.4f} ± {std_metric:.4f}"
return cv_mean_metrics