Qiguang Chen commited on
Commit
fceca7c
·
1 Parent(s): acc4f88

update mix data and script

Browse files
OpenSLU.py ADDED
@@ -0,0 +1,183 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import os
3
+
4
+ import datasets
5
+
6
+ _OPEN_SLU_CITATION = """\
7
+ xxx"""
8
+
9
+ _OPEN_SLU_DESCRIPTION = """\
10
+ xxx"""
11
+
12
+ _ATIS_DESCRIPTION = """\
13
+ @article{wang2019superglue,
14
+ title={SuperGLUE: A Stickier Benchmark for General-Purpose Language Understanding Systems},
15
+ author={Wang, Alex and Pruksachatkun, Yada and Nangia, Nikita and Singh, Amanpreet and Michael, Julian and Hill, Felix and Levy, Omer and Bowman, Samuel R},
16
+ journal={arXiv preprint arXiv:1905.00537},
17
+ year={2019}
18
+ }
19
+ Note that each SuperGLUE dataset has its own citation. Please see the source to
20
+ get the correct citation for each contained dataset.
21
+ """
22
+
23
+ _BOOLQ_CITATION = """\
24
+ @inproceedings{clark2019boolq,
25
+ title={BoolQ: Exploring the Surprising Difficulty of Natural Yes/No Questions},
26
+ author={Clark, Christopher and Lee, Kenton and Chang, Ming-Wei, and Kwiatkowski, Tom and Collins, Michael, and Toutanova, Kristina},
27
+ booktitle={NAACL},
28
+ year={2019}
29
+ }"""
30
+
31
+
32
+ class OpenSLUConfig(datasets.BuilderConfig):
33
+ """BuilderConfig for OpenSLU."""
34
+
35
+ def __init__(self, features, data_url, citation, url, intent_label_classes=None, slot_label_classes=None, **kwargs):
36
+ """BuilderConfig for OpenSLU.
37
+ Args:
38
+ features: `list[string]`, list of the features that will appear in the
39
+ feature dict. Should not include "label".
40
+ data_url: `string`, url to download the zip file from.
41
+ citation: `string`, citation for the data set.
42
+ url: `string`, url for information about the data set.
43
+ intent_label_classes: `list[string]`, the list of classes for the intent label
44
+ slot_label_classes: `list[string]`, the list of classes for the slot label
45
+ **kwargs: keyword arguments forwarded to super.
46
+ """
47
+ # Version history:
48
+ # 0.0.1: Initial version.
49
+ super(OpenSLUConfig, self).__init__(version=datasets.Version("0.0.1"), **kwargs)
50
+ self.features = features
51
+ self.intent_label_classes = intent_label_classes
52
+ self.slot_label_classes = slot_label_classes
53
+ self.data_url = data_url
54
+ self.citation = citation
55
+ self.url = url
56
+
57
+
58
+ class OpenSLU(datasets.GeneratorBasedBuilder):
59
+ """The SuperGLUE benchmark."""
60
+
61
+ BUILDER_CONFIGS = [
62
+ OpenSLUConfig(
63
+ name="atis",
64
+ description=_ATIS_DESCRIPTION,
65
+ features=["text"],
66
+ data_url="https://huggingface.co/datasets/LightChen2333/OpenSLU/resolve/main/atis.tar.gz",
67
+ citation="",
68
+ url="",
69
+ ),
70
+ OpenSLUConfig(
71
+ name="snips",
72
+ description=_ATIS_DESCRIPTION,
73
+ features=["text"],
74
+ data_url="https://huggingface.co/datasets/LightChen2333/OpenSLU/resolve/main/snips.tar.gz",
75
+ citation="",
76
+ url="",
77
+ ),
78
+ OpenSLUConfig(
79
+ name="mix-atis",
80
+ description=_ATIS_DESCRIPTION,
81
+ features=["text"],
82
+ data_url="https://huggingface.co/datasets/LightChen2333/OpenSLU/resolve/main/mix_atis.tar.gz",
83
+ citation="",
84
+ url="",
85
+ ),
86
+ OpenSLUConfig(
87
+ name="mix-snips",
88
+ description=_ATIS_DESCRIPTION,
89
+ features=["text"],
90
+ data_url="https://huggingface.co/datasets/LightChen2333/OpenSLU/resolve/main/mix_snips.tar.gz",
91
+ citation="",
92
+ url="",
93
+ ),
94
+ ]
95
+
96
+ def _info(self):
97
+ features = {feature: datasets.Value("string") for feature in self.config.features}
98
+ features["slot"] = datasets.Sequence(datasets.Value("string"))
99
+ features["intent"] = datasets.Value("string")
100
+
101
+ return datasets.DatasetInfo(
102
+ description=_OPEN_SLU_DESCRIPTION + self.config.description,
103
+ features=datasets.Features(features),
104
+ homepage=self.config.url,
105
+ citation=self.config.citation + "\n" + _OPEN_SLU_CITATION,
106
+ )
107
+
108
+ def _split_generators(self, dl_manager):
109
+ print(self.config.data_url)
110
+ dl_dir = dl_manager.download_and_extract(self.config.data_url) or ""
111
+
112
+ task_name = _get_task_name_from_data_url(self.config.data_url)
113
+ print(dl_dir)
114
+ print(task_name)
115
+ dl_dir = os.path.join(dl_dir, task_name)
116
+ return [
117
+ datasets.SplitGenerator(
118
+ name=datasets.Split.TRAIN,
119
+ gen_kwargs={
120
+ "data_file": os.path.join(dl_dir, "train.jsonl"),
121
+ "split": datasets.Split.TRAIN,
122
+ },
123
+ ),
124
+ datasets.SplitGenerator(
125
+ name=datasets.Split.VALIDATION,
126
+ gen_kwargs={
127
+ "data_file": os.path.join(dl_dir, "dev.jsonl"),
128
+ "split": datasets.Split.VALIDATION,
129
+ },
130
+ ),
131
+ datasets.SplitGenerator(
132
+ name=datasets.Split.TEST,
133
+ gen_kwargs={
134
+ "data_file": os.path.join(dl_dir, "test.jsonl"),
135
+ "split": datasets.Split.TEST,
136
+ },
137
+ ),
138
+ ]
139
+
140
+ def _generate_examples(self, data_file, split):
141
+ with open(data_file, encoding="utf-8") as f:
142
+ for index, line in enumerate(f):
143
+ row = json.loads(line)
144
+ yield index, row
145
+
146
+
147
+ def _cast_label(label):
148
+ """Converts the label into the appropriate string version."""
149
+ if isinstance(label, str):
150
+ return label
151
+ elif isinstance(label, bool):
152
+ return "True" if label else "False"
153
+ elif isinstance(label, int):
154
+ assert label in (0, 1)
155
+ return str(label)
156
+ else:
157
+ raise ValueError("Invalid label format.")
158
+
159
+
160
+ def _get_record_entities(passage):
161
+ """Returns the unique set of entities."""
162
+ text = passage["text"]
163
+ entity_spans = list()
164
+ for entity in passage["entities"]:
165
+ entity_text = text[entity["start"]: entity["end"] + 1]
166
+ entity_spans.append({"text": entity_text, "start": entity["start"], "end": entity["end"] + 1})
167
+ entity_spans = sorted(entity_spans, key=lambda e: e["start"]) # sort by start index
168
+ entity_texts = set(e["text"] for e in entity_spans) # for backward compatability
169
+ return entity_texts, entity_spans
170
+
171
+
172
+ def _get_record_answers(qa):
173
+ """Returns the unique set of answers."""
174
+ if "answers" not in qa:
175
+ return []
176
+ answers = set()
177
+ for answer in qa["answers"]:
178
+ answers.add(answer["text"])
179
+ return sorted(answers)
180
+
181
+
182
+ def _get_task_name_from_data_url(data_url):
183
+ return data_url.split("/")[-1].split(".")[0]
atis.tar.gz CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:181196f7f92104a6b649b479b7c986c4bf1d356d1acca26d9b09284fd37101e6
3
- size 129044
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80063f7b2046a7e80ed06e5870a08e7e9d22c224aaaa69ee14114b254bab00bc
3
+ size 134507
atis.tar → mix_atis.tar.gz RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bc5befdeecbb2e78cd213dd024cafbf9e803ef79a32b4280043abbe44c15a94a
3
- size 1216512
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:398dbda06def10deca858aabbb557c5b7ab11f710d89ac74105e467a852c8a43
3
+ size 602622
snips.tar → mix_snips.tar.gz RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:32de64ed2f3ee2daf704b8e4bf4822b6ce4facbe355e232bc37fef7f2367259b
3
- size 2461696
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48933496e422a498f03969fbdf8899efdbf047d3c29bb97af430478d2a8903f8
3
+ size 2476548
snips.tar.gz CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:679df168f809e83ef2191a13591d7dfc2c753891e2afb59e6a1670b3ba5222dd
3
- size 378851
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7fd1f889cb516df34d1200f2db7e644c5e218b9239eb74fd5a2073a94ac4cf0b
3
+ size 395478