nouamanetazi HF staff commited on
Commit
bbdafef
1 Parent(s): 594ce93

Create mtop-intent.py

Browse files
Files changed (1) hide show
  1. mtop-intent.py +65 -0
mtop-intent.py ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import datasets
3
+
4
+ _DESCRIPTION = "MTOP: Multilingual Task-Oriented Semantic Parsing"
5
+ _LANGUAGES = ["en", "de", "es", "fr", "hi", "th"]
6
+
7
+ URL = "" # https://huggingface.co/datasets/mteb/mtop/resolve/main/"
8
+ _URLs = {
9
+ split: {
10
+ "train": URL + f"{split}/train.jsonl",
11
+ "test": URL + f"{split}/test.jsonl",
12
+ "validation": URL + f"{split}/validation.jsonl",
13
+ }
14
+ for split in _LANGUAGES
15
+ }
16
+
17
+
18
+ class MTOP(datasets.GeneratorBasedBuilder):
19
+ """MTOP Dataset."""
20
+
21
+ BUILDER_CONFIGS = [
22
+ datasets.BuilderConfig(name=name, description=f"This part of my dataset covers {name} part of MTOP Dataset.",)
23
+ for name in _LANGUAGES
24
+ ]
25
+
26
+ DEFAULT_CONFIG_NAME = "en"
27
+
28
+ def _info(self):
29
+ return datasets.DatasetInfo(
30
+ description=_DESCRIPTION,
31
+ features=datasets.Features(
32
+ {
33
+ "id": datasets.Value("int64"),
34
+ "text": datasets.Value("string"),
35
+ "label": datasets.Value("int32"),
36
+ "label_text": datasets.Value("string"),
37
+ }
38
+ ),
39
+ supervised_keys=None,
40
+ )
41
+
42
+ def _split_generators(self, dl_manager):
43
+ """Returns SplitGenerators."""
44
+ my_urls = _URLs[self.config.name]
45
+ data_dir = dl_manager.download_and_extract(my_urls)
46
+ return [
47
+ datasets.SplitGenerator(
48
+ name=datasets.Split.TRAIN,
49
+ # These kwargs will be passed to _generate_examples
50
+ gen_kwargs={"text_path": data_dir["train"]},
51
+ ),
52
+ datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={"text_path": data_dir["validation"]},),
53
+ datasets.SplitGenerator(
54
+ name=datasets.Split.TEST,
55
+ # These kwargs will be passed to _generate_examples
56
+ gen_kwargs={"text_path": data_dir["test"]},
57
+ ),
58
+ ]
59
+
60
+ def _generate_examples(self, text_path):
61
+ """Yields examples."""
62
+ with open(text_path, encoding="utf-8") as f:
63
+ texts = f.readlines()
64
+ for i, text in enumerate(texts):
65
+ yield i, json.loads(text)