wikipedia_bn / wikipedia_bn.py
1
"""Bengali wikipedia from 03/20/2021"""
2
3
import os
4
import pyarrow.parquet as pq
5
6
import datasets
7
8
9
logger = datasets.logging.get_logger(__name__)
10
11
12
_CITATION = """\
13
@ONLINE {wikidump,
14
    author = {Wikimedia Foundation},
15
    title  = {Wikimedia Downloads},
16
    url    = {https://dumps.wikimedia.org}
17
}
18
"""
19
20
_DESCRIPTION = """\
21
Bengali Wikipedia from the dump of 03/20/2021.
22
The data was processed using the huggingface datasets wikipedia script early april 2021.
23
The dataset was built from the Wikipedia dump (https://dumps.wikimedia.org/).
24
Each example contains the content of one full Wikipedia article with cleaning to strip
25
markdown and unwanted sections (references, etc.).
26
"""
27
28
_LICENSE = (
29
    "This work is licensed under the Creative Commons Attribution-ShareAlike "
30
    "3.0 Unported License. To view a copy of this license, visit "
31
    "http://creativecommons.org/licenses/by-sa/3.0/ or send a letter to "
32
    "Creative Commons, PO Box 1866, Mountain View, CA 94042, USA."
33
)
34
35
_N_SHARDS = 10
36
37
_URLS = {
38
    "train": [f"data/20210320/shard-{i:05d}-of-{_N_SHARDS:05d}.parquet" for i in range(_N_SHARDS)],
39
}
40
41
42
class WikipediaBn(datasets.ArrowBasedBuilder):
43
    """Bengali wikipedia from 03/20/2021"""
44
45
    def _info(self):
46
        return datasets.DatasetInfo(
47
            description=_DESCRIPTION,
48
            features=datasets.Features(
49
                {
50
                    "title": datasets.Value("string"),
51
                    "text": datasets.Value("string"),
52
                }
53
            ),
54
            supervised_keys=None,
55
            homepage="https://dumps.wikimedia.org",
56
            citation=_CITATION,
57
        )
58
59
    def _split_generators(self, dl_manager):
60
        downloaded_files = dl_manager.download(_URLS)
61
62
        return [
63
            datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"filepaths": downloaded_files["train"]}),
64
        ]
65
66
    def _generate_tables(self, filepaths):
67
        """This function returns the examples in the raw (text) form."""
68
        for filepath in filepaths:
69
            logger.info("generating examples from = %s", filepath)
70
            filepath_id = os.path.basename(filepath)
71
            with open(filepath, "rb") as f:
72
                pf = pq.ParquetFile(f)
73
                for i in range(pf.num_row_groups):
74
                    id_ = f"{filepath_id}_{i}"
75
                    yield id_, pf.read_row_group(i)
76