|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
"""BirdSet: The General Avian Monitoring Evaluation Benchmark""" |
|
|
|
import os |
|
import datasets |
|
import pandas as pd |
|
|
|
from .classes import BIRD_NAMES_BIRDDB, BIRD_NAMES_NIPS4BPLUS, BIRD_NAMES_AMAZON_BASIN, BIRD_NAMES_HAWAII, \ |
|
BIRD_NAMES_HIGH_SIERRAS, BIRD_NAMES_SIERRA_NEVADA, BIRD_NAMES_POWDERMILL_NATURE, BIRD_NAMES_SAPSUCKER, \ |
|
BIRD_NAMES_COLUMBIA_COSTA_RICA, BIRD_NAMES_XENOCANTO |
|
|
|
from .descriptions import _BIRD_DB_DESCRIPTION, _BIRD_DB_CITATION, _NIPS4BPLUS_CITATION, _NIPS4BPLUS_DESCRIPTION, \ |
|
_HIGH_SIERRAS_DESCRIPTION, _HIGH_SIERRAS_CITATION, _SIERRA_NEVADA_DESCRIPTION, _SIERRA_NEVADA_CITATION, \ |
|
_POWDERMILL_NATURE_DESCRIPTION, _POWDERMILL_NATURE_CITATION, _AMAZON_BASIN_DESCRIPTION, _AMAZON_BASIN_CITATION, \ |
|
_SAPSUCKER_WOODS_DESCRIPTION, _SAPSUCKER_WOODS_CITATION, _COLUMBIA_COSTA_RICA_CITATION, \ |
|
_COLUMBIA_COSTA_RICA_DESCRIPTION, _HAWAIIAN_ISLANDS_CITATION, _HAWAIIAN_ISLANDS_DESCRIPTION |
|
|
|
|
|
_BIRDSET_CITATION = """\ |
|
@article{rauch2024, |
|
title = {BirdSet: A Multi-Task Benchmark For Avian Diversity Monitoring}, |
|
author={Rauch, Lukas and Schwinger, Raphael and Wirth, Moritz and Lange, Jonas and Heinrich, René}, |
|
year={2024} |
|
} |
|
""" |
|
_BIRDSET_DESCRIPTION = """\ |
|
This dataset offers a unified, well-structured platform for avian bioacoustics and consists of various tasks. \ |
|
By creating a set of tasks, BirdSet enables an overall performance score for models and uncovers their limitations \ |
|
in certain areas. |
|
Note that each BirdSet dataset has its own citation. Please see the source to get the correct citation for each |
|
contained dataset. |
|
""" |
|
|
|
base_url = "https://huggingface.co/datasets/DBD-research-group/gadme/resolve/data" |
|
|
|
|
|
|
|
|
|
|
|
class BirdSetConfig(datasets.BuilderConfig): |
|
def __init__( |
|
self, |
|
name, |
|
citation, |
|
class_list, |
|
**kwargs): |
|
super().__init__(version=datasets.Version("0.0.1"), name=name, **kwargs) |
|
|
|
features = datasets.Features({ |
|
"audio": datasets.Audio(sampling_rate=32_000, mono=True, decode=True), |
|
"filepath": datasets.Value("string"), |
|
"start_time": datasets.Value("float64"), |
|
"end_time": datasets.Value("float64"), |
|
"low_freq": datasets.Value("int64"), |
|
"high_freq": datasets.Value("int64"), |
|
"ebird_code": datasets.ClassLabel(names=class_list), |
|
"ebird_code_multiclass": datasets.Sequence(datasets.ClassLabel(names=class_list)), |
|
"ebird_code_secondary": datasets.Sequence(datasets.Value("string")), |
|
"call_type": datasets.Value("string"), |
|
"sex": datasets.Value("string"), |
|
"lat": datasets.Value("float64"), |
|
"long": datasets.Value("float64"), |
|
"length": datasets.Value("int64"), |
|
"microphone": datasets.Value("string"), |
|
"license": datasets.Value("string"), |
|
"source": datasets.Value("string"), |
|
"local_time": datasets.Value("string"), |
|
"detected_events": datasets.Sequence(datasets.Sequence(datasets.Value("float64"))), |
|
"event_cluster": datasets.Sequence(datasets.Value("int64")), |
|
"quality": datasets.Value("string"), |
|
"recordist": datasets.Value("string"), |
|
}) |
|
|
|
self.features = features |
|
self.citation = citation |
|
|
|
|
|
class BirdSet(datasets.GeneratorBasedBuilder): |
|
"""TODO: Short description of my dataset.""" |
|
|
|
DEFAULT_WRITER_BATCH_SIZE = 500 |
|
|
|
BUILDER_CONFIGS = [ |
|
BirdSetConfig( |
|
name="SSW", |
|
description=_SAPSUCKER_WOODS_DESCRIPTION, |
|
citation=_SAPSUCKER_WOODS_CITATION, |
|
data_dir=f"{base_url}/SSW", |
|
class_list=BIRD_NAMES_SAPSUCKER, |
|
), |
|
BirdSetConfig( |
|
name="SSW_xc", |
|
description=_SAPSUCKER_WOODS_DESCRIPTION, |
|
citation=_SAPSUCKER_WOODS_CITATION, |
|
data_dir=f"{base_url}/SSW", |
|
class_list=BIRD_NAMES_SAPSUCKER, |
|
), |
|
BirdSetConfig( |
|
name="SSW_scape", |
|
description=_SAPSUCKER_WOODS_DESCRIPTION, |
|
citation=_SAPSUCKER_WOODS_CITATION, |
|
data_dir=f"{base_url}/SSW", |
|
class_list=BIRD_NAMES_SAPSUCKER, |
|
), |
|
BirdSetConfig( |
|
name="PER", |
|
description=_AMAZON_BASIN_DESCRIPTION, |
|
citation=_AMAZON_BASIN_CITATION, |
|
data_dir=f"{base_url}/PER", |
|
class_list=BIRD_NAMES_AMAZON_BASIN, |
|
), |
|
BirdSetConfig( |
|
name="PER_xc", |
|
description=_AMAZON_BASIN_DESCRIPTION, |
|
citation=_AMAZON_BASIN_CITATION, |
|
data_dir=f"{base_url}/PER", |
|
class_list=BIRD_NAMES_AMAZON_BASIN, |
|
), |
|
BirdSetConfig( |
|
name="PER_scape", |
|
description=_AMAZON_BASIN_DESCRIPTION, |
|
citation=_AMAZON_BASIN_CITATION, |
|
data_dir=f"{base_url}/PER", |
|
class_list=BIRD_NAMES_AMAZON_BASIN, |
|
), |
|
BirdSetConfig( |
|
name="UHH", |
|
description=_HAWAIIAN_ISLANDS_DESCRIPTION, |
|
citation=_HAWAIIAN_ISLANDS_CITATION, |
|
data_dir=f"{base_url}/UHH", |
|
class_list=BIRD_NAMES_HAWAII, |
|
), |
|
BirdSetConfig( |
|
name="UHH_xc", |
|
description=_HAWAIIAN_ISLANDS_DESCRIPTION, |
|
citation=_HAWAIIAN_ISLANDS_CITATION, |
|
data_dir=f"{base_url}/UHH", |
|
class_list=BIRD_NAMES_HAWAII, |
|
), |
|
BirdSetConfig( |
|
name="UHH_scape", |
|
description=_HAWAIIAN_ISLANDS_DESCRIPTION, |
|
citation=_HAWAIIAN_ISLANDS_CITATION, |
|
data_dir=f"{base_url}/UHH", |
|
class_list=BIRD_NAMES_HAWAII, |
|
), |
|
BirdSetConfig( |
|
name="SNE", |
|
description=_SIERRA_NEVADA_DESCRIPTION, |
|
citation=_SIERRA_NEVADA_CITATION, |
|
data_dir=f"{base_url}/SNE", |
|
class_list=BIRD_NAMES_SIERRA_NEVADA, |
|
), |
|
BirdSetConfig( |
|
name="SNE_xc", |
|
description=_SIERRA_NEVADA_DESCRIPTION, |
|
citation=_SIERRA_NEVADA_CITATION, |
|
data_dir=f"{base_url}/SNE", |
|
class_list=BIRD_NAMES_SIERRA_NEVADA, |
|
), |
|
BirdSetConfig( |
|
name="SNE_scape", |
|
description=_SIERRA_NEVADA_DESCRIPTION, |
|
citation=_SIERRA_NEVADA_CITATION, |
|
data_dir=f"{base_url}/SNE", |
|
class_list=BIRD_NAMES_SIERRA_NEVADA, |
|
), |
|
BirdSetConfig( |
|
name="POW", |
|
description=_POWDERMILL_NATURE_DESCRIPTION, |
|
citation=_POWDERMILL_NATURE_CITATION, |
|
data_dir=f"{base_url}/POW", |
|
class_list=BIRD_NAMES_POWDERMILL_NATURE, |
|
), |
|
BirdSetConfig( |
|
name="POW_xc", |
|
description=_POWDERMILL_NATURE_DESCRIPTION, |
|
citation=_POWDERMILL_NATURE_CITATION, |
|
data_dir=f"{base_url}/POW", |
|
class_list=BIRD_NAMES_POWDERMILL_NATURE, |
|
), |
|
BirdSetConfig( |
|
name="POW_scape", |
|
description=_POWDERMILL_NATURE_DESCRIPTION, |
|
citation=_POWDERMILL_NATURE_CITATION, |
|
data_dir=f"{base_url}/POW", |
|
class_list=BIRD_NAMES_POWDERMILL_NATURE, |
|
), |
|
BirdSetConfig( |
|
name="HSN", |
|
description=_HIGH_SIERRAS_DESCRIPTION, |
|
citation=_HIGH_SIERRAS_CITATION, |
|
data_dir=f"{base_url}/HSN", |
|
class_list=BIRD_NAMES_HIGH_SIERRAS, |
|
), |
|
BirdSetConfig( |
|
name="HSN_xc", |
|
description=_HIGH_SIERRAS_DESCRIPTION, |
|
citation=_HIGH_SIERRAS_CITATION, |
|
data_dir=f"{base_url}/HSN", |
|
class_list=BIRD_NAMES_HIGH_SIERRAS, |
|
), |
|
BirdSetConfig( |
|
name="HSN_scape", |
|
description=_HIGH_SIERRAS_DESCRIPTION, |
|
citation=_HIGH_SIERRAS_CITATION, |
|
data_dir=f"{base_url}/HSN", |
|
class_list=BIRD_NAMES_HIGH_SIERRAS, |
|
), |
|
BirdSetConfig( |
|
name="NES", |
|
description=_COLUMBIA_COSTA_RICA_DESCRIPTION, |
|
citation=_COLUMBIA_COSTA_RICA_CITATION, |
|
data_dir=f"{base_url}/NES", |
|
class_list=BIRD_NAMES_COLUMBIA_COSTA_RICA, |
|
), |
|
BirdSetConfig( |
|
name="NES_xc", |
|
description=_COLUMBIA_COSTA_RICA_DESCRIPTION, |
|
citation=_COLUMBIA_COSTA_RICA_CITATION, |
|
data_dir=f"{base_url}/NES", |
|
class_list=BIRD_NAMES_COLUMBIA_COSTA_RICA, |
|
), |
|
BirdSetConfig( |
|
name="NES_scape", |
|
description=_COLUMBIA_COSTA_RICA_DESCRIPTION, |
|
citation=_COLUMBIA_COSTA_RICA_CITATION, |
|
data_dir=f"{base_url}/NES", |
|
class_list=BIRD_NAMES_COLUMBIA_COSTA_RICA, |
|
), |
|
BirdSetConfig( |
|
name="NIPS", |
|
description=_NIPS4BPLUS_DESCRIPTION, |
|
citation=_NIPS4BPLUS_CITATION, |
|
data_dir=f"{base_url}/NIPS", |
|
class_list=BIRD_NAMES_NIPS4BPLUS, |
|
), |
|
BirdSetConfig( |
|
name="NIPS_xc", |
|
description=_NIPS4BPLUS_DESCRIPTION, |
|
citation=_NIPS4BPLUS_CITATION, |
|
data_dir=f"{base_url}/NIPS", |
|
class_list=BIRD_NAMES_NIPS4BPLUS, |
|
), |
|
BirdSetConfig( |
|
name="NIPS_scape", |
|
description=_NIPS4BPLUS_DESCRIPTION, |
|
citation=_NIPS4BPLUS_CITATION, |
|
data_dir=f"{base_url}/NIPS", |
|
class_list=BIRD_NAMES_NIPS4BPLUS, |
|
), |
|
BirdSetConfig( |
|
name="xenocanto", |
|
description="TODO", |
|
citation="TODO", |
|
data_dir=f"{base_url}/xenocanto", |
|
class_list=BIRD_NAMES_XENOCANTO, |
|
), |
|
] |
|
|
|
def _info(self): |
|
return datasets.DatasetInfo( |
|
description=_BIRDSET_DESCRIPTION + self.config.description, |
|
features=self.config.features, |
|
citation=self.config.citation + "\n" + _BIRDSET_CITATION, |
|
) |
|
|
|
def _split_generators(self, dl_manager): |
|
ds_name = self.config.name |
|
train_files = {"PER": 10, |
|
"NES": 12, |
|
"UHH": 4, |
|
"HSN": 6, |
|
"NIPS": 14, |
|
"POW": 36, |
|
"SSW": 27, |
|
"SNE": 20} |
|
|
|
test_files = {"PER": 3, |
|
"NES": 8, |
|
"UHH": 7, |
|
"HSN": 3, |
|
"NIPS": 1, |
|
"POW": 3, |
|
"SSW": 36, |
|
"SNE": 5} |
|
|
|
if self.config.name.endswith("_xc"): |
|
ds_name = ds_name[:-3] |
|
dl_dir = dl_manager.download({ |
|
"train": [os.path.join(self.config.data_dir, f"{ds_name}_train_shard_{n:04d}.tar.gz") for n in range(1, train_files[ds_name] + 1)], |
|
"metadata": os.path.join(self.config.data_dir, f"{ds_name}_metadata_train.parquet"), |
|
}) |
|
|
|
elif self.config.name.endswith("_scape"): |
|
ds_name = ds_name[:-6] |
|
dl_dir = dl_manager.download({ |
|
"test": [os.path.join(self.config.data_dir, f"{ds_name}_test_shard_{n:04d}.tar.gz") for n in range(1, test_files[ds_name] + 1)], |
|
"metadata": os.path.join(self.config.data_dir, f"{ds_name}_metadata_test.parquet"), |
|
"metadata_5s": os.path.join(self.config.data_dir, f"{ds_name}_metadata_test_5s.parquet"), |
|
}) |
|
|
|
elif self.config.name == "xenocanto": |
|
dl_dir = dl_manager.download({ |
|
"train": [os.path.join(self.config.data_dir, f"{ds_name}_shard_{n:04d}.tar.gz") for n in range(1, train_files[ds_name] + 1)], |
|
"metadata": os.path.join(self.config.data_dir, f"{ds_name}_metadata_metadata.parquet"), |
|
}) |
|
|
|
elif self.config.name in train_files.keys(): |
|
dl_dir = dl_manager.download({ |
|
"train": [os.path.join(self.config.data_dir, f"{ds_name}_train_shard_{n:04d}.tar.gz") for n in range(1, train_files[ds_name] + 1)], |
|
"test": [os.path.join(self.config.data_dir, f"{ds_name}_test_shard_{n:04d}.tar.gz") for n in range(1, test_files[ds_name] + 1)], |
|
"meta_train": os.path.join(self.config.data_dir, f"{ds_name}_metadata_train.parquet"), |
|
"meta_test": os.path.join(self.config.data_dir, f"{ds_name}_metadata_test.parquet"), |
|
"meta_test_5s": os.path.join(self.config.data_dir, f"{ds_name}_metadata_test_5s.parquet"), |
|
}) |
|
|
|
local_audio_archives_paths = dl_manager.extract(dl_dir) if not dl_manager.is_streaming else None |
|
|
|
if self.config.name == "xenocanto" or self.config.name.endswith("_xc"): |
|
return [ |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TRAIN, |
|
gen_kwargs={ |
|
"audio_archive_iterators": [dl_manager.iter_archive(archive_path) for archive_path in dl_dir["train"]], |
|
"local_audio_archives_paths": local_audio_archives_paths["train"] if local_audio_archives_paths else None, |
|
"metapath": dl_dir["metadata"], |
|
"split": datasets.Split.TRAIN, |
|
}, |
|
), |
|
] |
|
|
|
elif self.config.name.endswith("_scape"): |
|
return [ |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TEST, |
|
gen_kwargs={ |
|
"audio_archive_iterators": [dl_manager.iter_archive(archive_path) for archive_path in dl_dir["test"]], |
|
"local_audio_archives_paths": local_audio_archives_paths["test"] if local_audio_archives_paths else None, |
|
"metapath": dl_dir["metadata"], |
|
"split": datasets.Split.TEST, |
|
}, |
|
), |
|
datasets.SplitGenerator( |
|
name="test_5s", |
|
gen_kwargs={ |
|
"audio_archive_iterators": [dl_manager.iter_archive(archive_path) for archive_path in dl_dir["test"]], |
|
"local_audio_archives_paths": local_audio_archives_paths["test"] if local_audio_archives_paths else None, |
|
"metapath": dl_dir["metadata_5s"], |
|
"split": "test_multiclass" |
|
}, |
|
), |
|
] |
|
|
|
return [ |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TRAIN, |
|
gen_kwargs={ |
|
"audio_archive_iterators": [dl_manager.iter_archive(archive_path) for archive_path in dl_dir["train"]], |
|
"local_audio_archives_paths": local_audio_archives_paths["train"] if local_audio_archives_paths else None, |
|
"metapath": dl_dir["meta_train"], |
|
"split": datasets.Split.TRAIN, |
|
}, |
|
), |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TEST, |
|
gen_kwargs={ |
|
"audio_archive_iterators": [dl_manager.iter_archive(archive_path) for archive_path in dl_dir["test"]], |
|
"local_audio_archives_paths": local_audio_archives_paths["test"] if local_audio_archives_paths else None, |
|
"metapath": dl_dir["meta_test"], |
|
"split": datasets.Split.TEST, |
|
}, |
|
), |
|
datasets.SplitGenerator( |
|
name="test_5s", |
|
gen_kwargs={ |
|
"audio_archive_iterators": [dl_manager.iter_archive(archive_path) for archive_path in dl_dir["test"]], |
|
"local_audio_archives_paths": local_audio_archives_paths["test"] if local_audio_archives_paths else None, |
|
"metapath": dl_dir["meta_test_5s"], |
|
"split": "test_multiclass" |
|
}, |
|
), |
|
] |
|
|
|
def _generate_examples(self, audio_archive_iterators, local_audio_archives_paths, metapath, split): |
|
metadata = pd.read_parquet(metapath) |
|
idx = 0 |
|
for i, audio_archive_iterator in enumerate(audio_archive_iterators): |
|
for audio_path_in_archive, audio_file in audio_archive_iterator: |
|
id = os.path.split(audio_path_in_archive)[-1] |
|
rows = metadata.loc[[int(id[2:].split(".")[0])] if split == "train" else [id]] |
|
audio_path = os.path.join(local_audio_archives_paths[i], audio_path_in_archive) if local_audio_archives_paths else audio_path_in_archive |
|
|
|
audio = audio_path if local_audio_archives_paths else audio_file.read() |
|
for _, row in rows.iterrows(): |
|
idx += 1 |
|
yield id if split == "train" else idx, { |
|
"audio": audio, |
|
"filepath": audio_path, |
|
"start_time": row["start_time"], |
|
"end_time": row["end_time"], |
|
"low_freq": row["low_freq"], |
|
"high_freq": row["high_freq"], |
|
"ebird_code": row["ebird_code"] if split != "test_multiclass" else None, |
|
"ebird_code_multiclass": None if split != "test_multiclass" else row.get("ebird_code_multiclass", None), |
|
"ebird_code_secondary": row.get("ebird_code_multiclass", None), |
|
"call_type": row["call_type"], |
|
"sex": row["sex"], |
|
"lat": row["lat"], |
|
"long": row["long"], |
|
"length": row.get("length", None), |
|
"microphone": row["microphone"], |
|
"license": row.get("license", None), |
|
"source": row["source"], |
|
"local_time": row["local_time"], |
|
"detected_events": row.get("detected_events", None), |
|
"event_cluster": row.get("event_cluster", None), |
|
"quality": row.get("quality", None), |
|
"recordist": row.get("recordist", None) |
|
} |
|
|