Wikidepia commited on
Commit
b3e24c7
1 Parent(s): 5590c33

Add Indonesian Wikipedia from 20-Nov-2023 dump

Browse files
dataset_dict.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"splits": ["train"]}
train/data-00000-of-00003.arrow ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:311d8a25874ba07869dc697627e5dbd34e1af35a3232ce6389556734ce9f1b91
3
+ size 499433456
train/data-00001-of-00003.arrow ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d38c389ef19fc47532f01b21c02432ff3eced4cace9ff935305c10f3bd4bc19a
3
+ size 308310856
train/data-00002-of-00003.arrow ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b98ee98b5dba4ab7d662fa2ceb764ce189b879c437dd33cf5e28909ce65f04ba
3
+ size 322556280
train/dataset_info.json ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "builder_name": "wikipedia",
3
+ "citation": "@ONLINE {wikidump,\n author = {Wikimedia Foundation},\n title = {Wikimedia Downloads},\n url = {https://dumps.wikimedia.org}\n}\n",
4
+ "config_name": "20231120.id",
5
+ "dataset_name": "wikipedia",
6
+ "dataset_size": 667812,
7
+ "description": "Wikipedia dataset containing cleaned articles of all languages.\nThe datasets are built from the Wikipedia dump\n(https://dumps.wikimedia.org/) with one split per language. Each example\ncontains the content of one full Wikipedia article with cleaning to strip\nmarkdown and unwanted sections (references, etc.).\n",
8
+ "download_checksums": {
9
+ "https://dumps.wikimedia.org/idwiki/20231120/dumpstatus.json": {
10
+ "num_bytes": 14097,
11
+ "checksum": null
12
+ },
13
+ "https://dumps.wikimedia.org/idwiki/20231120/idwiki-20231120-pages-articles-multistream.xml.bz2": {
14
+ "num_bytes": 961997021,
15
+ "checksum": null
16
+ }
17
+ },
18
+ "download_size": 962011118,
19
+ "features": {
20
+ "id": {
21
+ "dtype": "string",
22
+ "_type": "Value"
23
+ },
24
+ "url": {
25
+ "dtype": "string",
26
+ "_type": "Value"
27
+ },
28
+ "title": {
29
+ "dtype": "string",
30
+ "_type": "Value"
31
+ },
32
+ "text": {
33
+ "dtype": "string",
34
+ "_type": "Value"
35
+ }
36
+ },
37
+ "homepage": "https://dumps.wikimedia.org",
38
+ "license": "",
39
+ "size_in_bytes": 962678930,
40
+ "splits": {
41
+ "train": {
42
+ "name": "train",
43
+ "num_bytes": 667812,
44
+ "num_examples": 667812,
45
+ "dataset_name": "wikipedia"
46
+ }
47
+ },
48
+ "version": {
49
+ "version_str": "2.0.0",
50
+ "description": "",
51
+ "major": 2,
52
+ "minor": 0,
53
+ "patch": 0
54
+ }
55
+ }
train/state.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_data_files": [
3
+ {
4
+ "filename": "data-00000-of-00003.arrow"
5
+ },
6
+ {
7
+ "filename": "data-00001-of-00003.arrow"
8
+ },
9
+ {
10
+ "filename": "data-00002-of-00003.arrow"
11
+ }
12
+ ],
13
+ "_fingerprint": "7184339ee075001f",
14
+ "_format_columns": null,
15
+ "_format_kwargs": {},
16
+ "_format_type": null,
17
+ "_output_all_columns": false,
18
+ "_split": "train"
19
+ }