Rams901 commited on
Commit
d539682
1 Parent(s): 68c153f

Delete OpenSLU_Clone.py

Browse files
Files changed (1) hide show
  1. OpenSLU_Clone.py +0 -156
OpenSLU_Clone.py DELETED
@@ -1,156 +0,0 @@
1
- import json
2
- import os
3
-
4
- import datasets
5
-
6
- _OPEN_SLU_CITATION = """\
7
- xxx"""
8
-
9
- _OPEN_SLU_DESCRIPTION = """\
10
- xxx"""
11
-
12
- _ATIS_CITATION = """\
13
- @inproceedings{hemphill1990atis,
14
- title = "The {ATIS} Spoken Language Systems Pilot Corpus",
15
- author = "Hemphill, Charles T. and
16
- Godfrey, John J. and
17
- Doddington, George R.",
18
- booktitle = "Speech and Natural Language: Proceedings of a Workshop Held at Hidden Valley, {P}ennsylvania, June 24-27,1990",
19
- year = "1990",
20
- url = "https://aclanthology.org/H90-1021",
21
- }
22
- """
23
-
24
- _ATIS_DESCRIPTION = """\
25
- A widely used SLU corpus for single-intent SLU.
26
- """
27
-
28
-
29
- class OpenSLUConfig(datasets.BuilderConfig):
30
- """BuilderConfig for OpenSLU."""
31
-
32
- def __init__(self, features, data_url, citation, url, intent_label_classes=None, slot_label_classes=None, **kwargs):
33
- """BuilderConfig for OpenSLU.
34
- Args:
35
- features: `list[string]`, list of the features that will appear in the
36
- feature dict. Should not include "label".
37
- data_url: `string`, url to download the zip file from.
38
- citation: `string`, citation for the data set.
39
- url: `string`, url for information about the data set.
40
- intent_label_classes: `list[string]`, the list of classes for the intent label
41
- slot_label_classes: `list[string]`, the list of classes for the slot label
42
- **kwargs: keyword arguments forwarded to super.
43
- """
44
- # Version history:
45
- # 0.0.1: Initial version.
46
- super(OpenSLUConfig, self).__init__(version=datasets.Version("0.0.1"), **kwargs)
47
- self.features = features
48
- self.intent_label_classes = intent_label_classes
49
- self.slot_label_classes = slot_label_classes
50
- self.data_url = data_url
51
- self.citation = citation
52
- self.url = url
53
-
54
-
55
- class OpenSLU(datasets.GeneratorBasedBuilder):
56
- """The SuperGLUE benchmark."""
57
-
58
- BUILDER_CONFIGS = [
59
- OpenSLUConfig(
60
- name="products",
61
- description=_ATIS_DESCRIPTION,
62
- features=["text"],
63
- data_url="https://huggingface.co/datasets/rams901/OpenSLU_Clone/resolve/main/prods.tar.gz",
64
- citation=_ATIS_CITATION,
65
- url="https://aclanthology.org/H90-1021",
66
- ),
67
- ]
68
-
69
- def _info(self):
70
- features = {feature: datasets.Sequence(datasets.Value("string")) for feature in self.config.features}
71
- features["slot"] = datasets.Sequence(datasets.Value("string"))
72
- features["intent"] = datasets.Value("string")
73
-
74
- return datasets.DatasetInfo(
75
- description=_OPEN_SLU_DESCRIPTION + self.config.description,
76
- features=datasets.Features(features),
77
- homepage=self.config.url,
78
- citation=self.config.citation + "\n" + _OPEN_SLU_CITATION,
79
- )
80
-
81
- def _split_generators(self, dl_manager):
82
- print(self.config.data_url)
83
- dl_dir = dl_manager.download_and_extract(self.config.data_url) or ""
84
-
85
- task_name = _get_task_name_from_data_url(self.config.data_url)
86
- print(dl_dir)
87
- print(task_name)
88
- dl_dir = os.path.join(dl_dir, task_name)
89
- return [
90
- datasets.SplitGenerator(
91
- name=datasets.Split.TRAIN,
92
- gen_kwargs={
93
- "data_file": os.path.join(dl_dir, "train.jsonl"),
94
- "split": datasets.Split.TRAIN,
95
- },
96
- ),
97
- datasets.SplitGenerator(
98
- name=datasets.Split.VALIDATION,
99
- gen_kwargs={
100
- "data_file": os.path.join(dl_dir, "dev.jsonl"),
101
- "split": datasets.Split.VALIDATION,
102
- },
103
- ),
104
- datasets.SplitGenerator(
105
- name=datasets.Split.TEST,
106
- gen_kwargs={
107
- "data_file": os.path.join(dl_dir, "test.jsonl"),
108
- "split": datasets.Split.TEST,
109
- },
110
- ),
111
- ]
112
-
113
- def _generate_examples(self, data_file, split):
114
- with open(data_file, encoding="utf-8") as f:
115
- for index, line in enumerate(f):
116
- row = json.loads(line)
117
- yield index, row
118
-
119
-
120
- def _cast_label(label):
121
- """Converts the label into the appropriate string version."""
122
- if isinstance(label, str):
123
- return label
124
- elif isinstance(label, bool):
125
- return "True" if label else "False"
126
- elif isinstance(label, int):
127
- assert label in (0, 1)
128
- return str(label)
129
- else:
130
- raise ValueError("Invalid label format.")
131
-
132
-
133
- def _get_record_entities(passage):
134
- """Returns the unique set of entities."""
135
- text = passage["text"]
136
- entity_spans = list()
137
- for entity in passage["entities"]:
138
- entity_text = text[entity["start"]: entity["end"] + 1]
139
- entity_spans.append({"text": entity_text, "start": entity["start"], "end": entity["end"] + 1})
140
- entity_spans = sorted(entity_spans, key=lambda e: e["start"]) # sort by start index
141
- entity_texts = set(e["text"] for e in entity_spans) # for backward compatability
142
- return entity_texts, entity_spans
143
-
144
-
145
- def _get_record_answers(qa):
146
- """Returns the unique set of answers."""
147
- if "answers" not in qa:
148
- return []
149
- answers = set()
150
- for answer in qa["answers"]:
151
- answers.add(answer["text"])
152
- return sorted(answers)
153
-
154
-
155
- def _get_task_name_from_data_url(data_url):
156
- return data_url.split("/")[-1].split(".")[0]