dchaplinsky commited on
Commit
ef8552d
1 Parent(s): a0e7121

Create malyuk.py

Browse files
Files changed (1) hide show
  1. malyuk.py +99 -0
malyuk.py ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import datasets
3
+
4
+ logger = datasets.logging.get_logger(__name__)
5
+
6
+
7
+ _DESCRIPTION = """\
8
+ Malyuk — a large, compiled corpus of ukrainian language texts.
9
+ 113GB of text data in jsonl format.
10
+ Combination of UberText 2.0, Ukrainian part of the Oscar, and Ukrainian News.
11
+
12
+ Nothing is guaranteed. Use at your own risk.
13
+ """
14
+
15
+
16
+ _URLS = [
17
+ "combined_ds_pt00.jsonlines",
18
+ "combined_ds_pt01.jsonlines",
19
+ "combined_ds_pt02.jsonlines",
20
+ "combined_ds_pt03.jsonlines",
21
+ "combined_ds_pt04.jsonlines",
22
+ "combined_ds_pt05.jsonlines",
23
+ "combined_ds_pt06.jsonlines",
24
+ "combined_ds_pt07.jsonlines",
25
+ "combined_ds_pt08.jsonlines",
26
+ "combined_ds_pt09.jsonlines",
27
+ "combined_ds_pt10.jsonlines",
28
+ "combined_ds_pt11.jsonlines",
29
+ "combined_ds_pt12.jsonlines",
30
+ "combined_ds_pt13.jsonlines",
31
+ "combined_ds_pt14.jsonlines",
32
+ "combined_ds_pt15.jsonlines",
33
+ "combined_ds_pt16.jsonlines",
34
+ "combined_ds_pt17.jsonlines",
35
+ "combined_ds_pt18.jsonlines",
36
+ "combined_ds_pt19.jsonlines",
37
+ "combined_ds_pt20.jsonlines",
38
+ "combined_ds_pt21.jsonlines",
39
+ "combined_ds_pt22.jsonlines",
40
+ ]
41
+
42
+
43
+ class MalyukDataset(datasets.GeneratorBasedBuilder):
44
+ """
45
+ Malyuk Dataset
46
+
47
+ ~^^^^^^:::^?7!!!!!~~~~~~~~~~~~~~^^^^^^::::::.............:::::::::^^:::::::^^^^^~~~~~~~~~~!:........
48
+ ~^^^^^^:::^7!~!!!!!~~~~~~~^^^^^^^~~~~^^^^^^^::::::::::::^^^^^^^^^^:::::::::::^^~~~~~!!!~~!!:........
49
+ ~~~^^^^:::^7!~!!!~~~~~~^^^^^^^^^~~~~~~~~~~~~~~~~~~~!!~~~~!~~~~~~~~~~~~~~~^^:::^^~~~~~~!!~!7^........
50
+ ~~~^^^^^^:~7!!!~~!~~~~~~~~!!!!!!!!777777777777!!!~~!777???JJYY55YYJ?7?777777!~~~^~~~~~!77??^........
51
+ !~~~^^^^^:~?!!!!!!!!!!7?Y55555555PBGPPPPPPPP5YJJ?777JY5PPPG########BBBBGGGGGP5Y?!~!~~~~77??:........
52
+ 7!~~^^^^^:^7!!!!!!!7?YPGBBGGGGGGGBB########BBGGPP5YY5GB####&&&&&##BGGGBB####BBG5?!!~~~~~!?7.........
53
+ 7!~~^~~~~~^!!!!!!!!?YPGGGP55PPGBB####&&&&&&##BBPJ7!~!75##&&&&&#BB###BBGGGB###BGPY?7~~~~^~7~:::^^^...
54
+ ?7~~^JPJ7!7?~!!!77?J5PPPGBB#BGG&&@&#PG#&&&##BBGJ!^::::!5BBB###GY5#&&&#GG##BBBBGP5?7~~^^^^~~^~!?P5:..
55
+ J7~~^JBGJ??7~!!!!7?JJ5PGGBBBG5J5GBG5Y5GBBGGGPPJ7!~::.:^!J555GPP5JY555JJ5GGGP55Y?!~~^^^^^^~!JY5B#?...
56
+ J7!~~7PY?J??7!!!!!!7?JYYYYY5555555PPPP55YYYYYJ?!~~^:::^~!?JJJJY55YY555555Y?7!~~^^^^^^~~~^7Y55JYP~...
57
+ J7!~~!J7?YYJ7!!!!!!!!777???JJY55555YYJJ????????7!~^:::^~!777!!!7??????77!~^^::::::^^^~!~~7PP5J7?~...
58
+ J7!~~~!?Y5PG?7!!!!~~~~~~~~!!777777??777777????77!!~^::^!!777!~~^~~!!~~^:::...::::^^~~~!!!7GGPY!~:...
59
+ ?7!!~~~7YPGGJ7!!!!!~~~~~~~~~~~~~~!!~~~~!77????77!!~^^:^~!77?7!~^^::^^^^^^^:^^^^^^~~~!!!77?PPY7~^....
60
+ ?77!~~~!?5PPJ77!!!!!!~!!!!~~~~~~~^^^~~~!7?J?777!!~^^:::^!!7?J!~^^::::::^^~~~~~~~~~!!!!7??7YY?~~^....
61
+ """
62
+
63
+ VERSION = datasets.Version("1.0.0")
64
+ DEFAULT_CONFIG_NAME = "default"
65
+ BUILDER_CONFIGS = [
66
+ datasets.BuilderConfig(name="default", version=VERSION, description=""),
67
+ ]
68
+
69
+ def _info(self):
70
+ return datasets.DatasetInfo(
71
+ description=_DESCRIPTION,
72
+ features=datasets.Features(
73
+ {
74
+ "id": datasets.Value("string"),
75
+ "compound_id": datasets.Value("string"),
76
+ "text": datasets.Value("string"),
77
+ }
78
+ ),
79
+ )
80
+
81
+ def _split_generators(self, dl_manager):
82
+ downloaded_files = dl_manager.download(_URLS)
83
+
84
+ return [
85
+ datasets.SplitGenerator(
86
+ name=datasets.Split.TRAIN, gen_kwargs={"filepaths": downloaded_files}
87
+ )
88
+ ]
89
+
90
+ def _generate_examples(self, filepaths):
91
+ """This function returns the examples in the raw (text) form."""
92
+ logger.info("generating examples from = %s", filepaths)
93
+ key = 0
94
+ for path in filepaths:
95
+ with open(path, encoding="utf-8") as f:
96
+ for instruction_str in f:
97
+ instruction = json.loads(instruction_str)
98
+ yield key, instruction
99
+ key += 1