leaderboards / utils.py
Tristan Thrush
rolled back latest changes except requirement versions
1060543
ascending_metrics = {
"wer",
"cer",
"loss",
"mae",
"mahalanobis",
"mse",
"perplexity",
"ter",
}
metric_ranges = {
"accuracy": (0,1),
"precision": (0,1),
"recall": (0,1),
"macro f1": (0,1),
"micro f1": (0,1),
"pearson": (-1, 1),
"matthews_correlation": (-1, 1),
"spearmanr": (-1, 1),
"google_bleu": (0, 1),
"precision@10": (0, 1),
"mae": (0, 1),
"mauve": (0, 1),
"frontier_integral": (0, 1),
"mean_iou": (0, 1),
"mean_accuracy": (0, 1),
"overall_accuracy": (0, 1),
"meteor": (0, 1),
"mse": (0, 1),
"perplexity": (0, float("inf")),
"rogue1": (0, 1),
"rogue2": (0, 1),
"sari": (0, 100),
}