pr_auc / pr_auc.py
Vlasta's picture
Fix parameter doc
1d7fac4
# Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TODO: Add a description here."""
import evaluate
import datasets
from sklearn.metrics import precision_recall_curve, auc
_CITATION = """\
@InProceedings{huggingface:module,
title = {A great new module},
authors={huggingface, Inc.},
year={2020}
}
"""
_DESCRIPTION = """\
Computes the area under precision-recall curve. Implementation details taken from https://sinyi-chou.github.io/python-sklearn-precision-recall/
"""
# TODO: Add description of the arguments of the module here
_KWARGS_DESCRIPTION = """
Calculates how good are predictions given some references, using certain scores
Args:
prediction_scores: Model predictions
references: list of reference for each prediction. Each
reference should be a string with tokens separated by spaces.
Returns:
pr_auc: area under the precision-recall curve,
Examples:
No examples
"""
BAD_WORDS_URL = ""
@evaluate.utils.file_utils.add_start_docstrings(_DESCRIPTION, _KWARGS_DESCRIPTION)
class PRAUC(evaluate.Metric):
def _info(self):
# TODO: Specifies the evaluate.EvaluationModuleInfo object
return evaluate.MetricInfo(
# This is the description that will appear on the modules page.
module_type="metric",
description=_DESCRIPTION,
citation=_CITATION,
inputs_description=_KWARGS_DESCRIPTION,
# This defines the format of each prediction and reference
features=datasets.Features({
'prediction_scores': datasets.Value("float"),
'references': datasets.Value('int32'),
}),
# Homepage of the module for documentation
homepage="http://module.homepage",
# Additional links to the codebase or references
codebase_urls=["http://github.com/path/to/codebase/of/new_module"],
reference_urls=["http://path.to.reference.url/new_module"]
)
def _download_and_prepare(self, dl_manager):
"""Optional: download external resources useful to compute the scores"""
# TODO: Download external resources if needed
pass
def _compute(self, prediction_scores, references):
"""Returns the scores"""
precision, recall, thresholds = precision_recall_curve(references, prediction_scores)
auc_precision_recall = auc(recall, precision)
return {
"pr_auc": auc_precision_recall,
}