ai-forever commited on
Commit
63f847c
1 Parent(s): d058807

Upload georeview-classification.py

Browse files
Files changed (1) hide show
  1. georeview-classification.py +85 -0
georeview-classification.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import datasets
3
+
4
+
5
+ _CITATION = """\
6
+ """
7
+
8
+ _LICENSE = """\
9
+ """
10
+
11
+ _DESCRIPTION = """\
12
+ Review classification based on Yandex Georeview dataset.
13
+ """
14
+
15
+ _LANGUAGES = {
16
+ "ru": "Russian"
17
+ }
18
+ _HOMEPAGE_URL = ""
19
+ _DOWNLOAD_URL = "{split}.jsonl"
20
+ _VERSION = "1.0.0"
21
+
22
+
23
+ class GeoreviewClassConfig(datasets.BuilderConfig):
24
+ """BuilderConfig for GeoreviewCalssConfig."""
25
+
26
+ def __init__(self, languages=None, **kwargs):
27
+ super(GeoreviewClassConfig, self).__init__(version=datasets.Version(_VERSION, ""), **kwargs),
28
+ self.languages = languages
29
+
30
+
31
+ class GeoreviewClass(datasets.GeneratorBasedBuilder):
32
+
33
+ """The Georeview Corpus"""
34
+
35
+ BUILDER_CONFIGS = [
36
+ GeoreviewClassConfig(
37
+ name='ru',
38
+ languages='ru',
39
+ description="Review classification based on Yandex Georeview dataset",
40
+ )
41
+ ]
42
+ BUILDER_CONFIG_CLASS = GeoreviewClassConfig
43
+ DEFAULT_CONFIG_NAME = 'ru'
44
+
45
+ def _info(self):
46
+ return datasets.DatasetInfo(
47
+ description=_DESCRIPTION,
48
+ features=datasets.Features(
49
+ {
50
+ "id": datasets.Value("string"),
51
+ "text": datasets.Value("string"),
52
+ "label": datasets.Value("int32"),
53
+ "label_text": datasets.Value("string"),
54
+ }
55
+ ),
56
+ supervised_keys=None,
57
+ license=_LICENSE,
58
+ homepage=_HOMEPAGE_URL,
59
+ citation=_CITATION,
60
+ )
61
+
62
+ def _split_generators(self, dl_manager):
63
+
64
+ train_urls = [_DOWNLOAD_URL.format(split="train")]
65
+ dev_urls = [_DOWNLOAD_URL.format(split="validation")]
66
+ test_urls = [_DOWNLOAD_URL.format(split="test")]
67
+
68
+ train_paths = dl_manager.download_and_extract(train_urls)
69
+ dev_paths = dl_manager.download_and_extract(dev_urls)
70
+ test_paths = dl_manager.download_and_extract(test_urls)
71
+ print(train_paths, dev_paths, test_paths)
72
+
73
+ return [
74
+ datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"file_paths": train_paths}),
75
+ datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={"file_paths": dev_paths}),
76
+ datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={"file_paths": test_paths}),
77
+ ]
78
+
79
+ def _generate_examples(self, file_paths):
80
+ row_count = 0
81
+ for file_path in file_paths:
82
+ with open(file_path, "r", encoding="utf-8") as f:
83
+ for line in f:
84
+ yield row_count, json.loads(line)
85
+ row_count += 1