yangwang825 commited on
Commit
b6746bc
β€’
1 Parent(s): 24d16fc

Create audioset.py

Browse files
Files changed (1) hide show
  1. audioset.py +181 -0
audioset.py ADDED
@@ -0,0 +1,181 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+
3
+ """AudioSet sound event classification dataset."""
4
+
5
+
6
+ import os
7
+ import json
8
+ import gzip
9
+ import joblib
10
+ import shutil
11
+ import pathlib
12
+ import logging
13
+ import zipfile
14
+ import textwrap
15
+ import datasets
16
+ import requests
17
+ import itertools
18
+ import typing as tp
19
+ import pandas as pd
20
+ from pathlib import Path
21
+ from copy import deepcopy
22
+ from tqdm.auto import tqdm
23
+ from rich.logging import RichHandler
24
+
25
+ logger = logging.getLogger(__name__)
26
+ logger.addHandler(RichHandler())
27
+ logger.setLevel(logging.INFO)
28
+
29
+
30
+ DATA_DIR_STRUCTURE = """
31
+ audios/
32
+ β”œβ”€β”€ balanced_train_segments [20550 entries]
33
+ β”œβ”€β”€ eval_segments [18887 entries]
34
+ └── unbalanced_train_segments
35
+ β”œβ”€β”€ unbalanced_train_segments_part00 [46940 entries]
36
+ ...
37
+ └── unbalanced_train_segments_part40 [9844 entries]
38
+ """
39
+
40
+
41
+ class AudioSetConfig(datasets.BuilderConfig):
42
+ """BuilderConfig for AudioSet."""
43
+
44
+ def __init__(self, features, **kwargs):
45
+ super(AudioSetConfig, self).__init__(version=datasets.Version("0.0.1", ""), **kwargs)
46
+ self.features = features
47
+
48
+
49
+ class AudioSet(datasets.GeneratorBasedBuilder):
50
+
51
+ BUILDER_CONFIGS = [
52
+ AudioSetConfig(
53
+ features=datasets.Features(
54
+ {
55
+ "file": datasets.Value("string"),
56
+ "audio": datasets.Audio(sampling_rate=SAMPLE_RATE),
57
+ "sound": datasets.Sequence(datasets.Value("string")),
58
+ "label": datasets.Sequence(datasets.features.ClassLabel(names=CLASSES)),
59
+ }
60
+ ),
61
+ name="balanced",
62
+ description="",
63
+ ),
64
+ AudioSetConfig(
65
+ features=datasets.Features(
66
+ {
67
+ "file": datasets.Value("string"),
68
+ "audio": datasets.Audio(sampling_rate=SAMPLE_RATE),
69
+ "sound": datasets.Sequence(datasets.Value("string")),
70
+ "label": datasets.Sequence(datasets.features.ClassLabel(names=CLASSES)),
71
+ }
72
+ ),
73
+ name="unbalanced",
74
+ description="",
75
+ ),
76
+ ]
77
+
78
+ def _info(self):
79
+ return datasets.DatasetInfo(
80
+ description="",
81
+ features=self.config.features,
82
+ supervised_keys=None,
83
+ homepage="",
84
+ citation="",
85
+ task_templates=None,
86
+ )
87
+
88
+ @property
89
+ def manual_download_instructions(self):
90
+ return (
91
+ "To use AudioSet you have to download it manually. "
92
+ "Please download the dataset from https://huggingface.co/datasets/confit/audioset-full \n"
93
+ "Then extract all files in one folder called `audios` and load the dataset with: "
94
+ "`datasets.load_dataset('confit/audioset', 'balanced', data_dir='path/to/folder')`\n"
95
+ "The tree structure of the downloaded data looks like: \n"
96
+ f"{DATA_DIR_STRUCTURE}"
97
+ )
98
+
99
+ def _split_generators(self, dl_manager):
100
+
101
+ data_dir = os.path.abspath(os.path.expanduser(dl_manager.manual_dir))
102
+
103
+ if not os.path.exists(data_dir):
104
+ raise FileNotFoundError(
105
+ f"{data_dir} does not exist. Make sure you insert a manual dir via "
106
+ f"`datasets.load_dataset('confit/audioset', 'balanced', data_dir=...)` that includes files unzipped from all the zip files. "
107
+ f"Manual download instructions: {self.manual_download_instructions}"
108
+ )
109
+
110
+ return [
111
+ datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"split": "train", "data_dir": data_dir}),
112
+ datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={"split": "test", "data_dir": data_dir}),
113
+ ]
114
+
115
+ def _generate_examples(self, split, data_dir):
116
+ """Generate examples from AudioSet"""
117
+ # Iterating the contents of the data to extract the relevant information
118
+ extensions = ['.wav']
119
+
120
+ if split == 'train':
121
+ if self.config.name == 'balanced':
122
+ archive_path = os.path.join(data_dir, 'audios', 'balanced_train_segments')
123
+ metadata_url = 'https://huggingface.co/datasets/confit/audioset/resolve/main/metadata/audioset-20k.jsonl'
124
+ elif self.config.name == 'unbalanced':
125
+ archive_path = os.path.join(data_dir, 'audios', 'unbalanced_train_segments')
126
+ metadata_url = 'https://huggingface.co/datasets/confit/audioset/resolve/main/metadata/audioset-2m.jsonl'
127
+ elif split == 'test':
128
+ archive_path = os.path.join(data_dir, 'audios', 'eval_segments')
129
+ metadata_url = 'https://huggingface.co/datasets/confit/audioset/resolve/main/metadata/audioset-eval.jsonl'
130
+
131
+ response = requests.get(url)
132
+ if response.status_code == 200:
133
+ # Split the content by lines and parse each line as JSON
134
+ # Each line is like {"filename":"YN6UbMsh-q1c.wav","label":["Vehicle","Car"]}
135
+ data_list = [json.loads(line) for line in response.text.splitlines()]
136
+ fileid2labels = {item['filename']:item['label'] for item in data_list}
137
+ else:
138
+ logger.info(f"Failed to retrieve data: Status code {response.status_code}")
139
+
140
+ _, wav_paths = fast_scandir(archive_path, extensions, recursive=True)
141
+
142
+ for guid, wav_path in enumerate(wav_paths):
143
+ fileid = Path(wav_path).name
144
+ sound = fileid2labels.get(fileid)
145
+ try:
146
+ yield guid, {
147
+ "id": str(guid),
148
+ "file": wav_path,
149
+ "audio": wav_path,
150
+ "sound": sound,
151
+ "label": sound,
152
+ }
153
+ except:
154
+ continue
155
+
156
+
157
+ def fast_scandir(path: str, extensions: tp.List[str], recursive: bool = False):
158
+ # Scan files recursively faster than glob
159
+ # From github.com/drscotthawley/aeiou/blob/main/aeiou/core.py
160
+ subfolders, files = [], []
161
+
162
+ try: # hope to avoid 'permission denied' by this try
163
+ for f in os.scandir(path):
164
+ try: # 'hope to avoid too many levels of symbolic links' error
165
+ if f.is_dir():
166
+ subfolders.append(f.path)
167
+ elif f.is_file():
168
+ if os.path.splitext(f.name)[1].lower() in extensions:
169
+ files.append(f.path)
170
+ except Exception:
171
+ pass
172
+ except Exception:
173
+ pass
174
+
175
+ if recursive:
176
+ for path in list(subfolders):
177
+ sf, f = fast_scandir(path, extensions, recursive=recursive)
178
+ subfolders.extend(sf)
179
+ files.extend(f) # type: ignore
180
+
181
+ return subfolders, files