lawinsiderdocs commited on
Commit
4f661d8
1 Parent(s): 81f2fcc

Add dataset script

Browse files
Files changed (1) hide show
  1. uk_legal_ner.py +125 -0
uk_legal_ner.py ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2020 HuggingFace Datasets Authors.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ # Lint as: python3
17
+
18
+ import os
19
+
20
+ import datasets
21
+
22
+
23
+ logger = datasets.logging.get_logger(__name__)
24
+
25
+
26
+ _CITATION = """\
27
+ """
28
+
29
+ _DESCRIPTION = """\
30
+ """
31
+
32
+ _URL = "https://../.zip"
33
+ _TRAINING_FILE = "train.json"
34
+ _DEV_FILE = "valid.json"
35
+ _TEST_FILE = "test.json"
36
+
37
+
38
+ class UKlegalNERConfig(datasets.BuilderConfig):
39
+ """BuilderConfig for uk_legal_ner"""
40
+
41
+ def __init__(self, **kwargs):
42
+ """BuilderConfig for uk_legal_ner.
43
+
44
+ Args:
45
+ **kwargs: keyword arguments forwarded to super.
46
+ """
47
+ super(UKlegalNERConfig, self).__init__(**kwargs)
48
+
49
+
50
+ class UKlegalNER(datasets.GeneratorBasedBuilder):
51
+ """uk_legal_ner dataset."""
52
+
53
+ BUILDER_CONFIGS = [
54
+ UKlegalNERConfig(name="uk_legal_ner", version=datasets.Version("0.0.1"), description="uk-legal-ner dataset"),
55
+ ]
56
+
57
+ def _info(self):
58
+ return datasets.DatasetInfo(
59
+ description=_DESCRIPTION,
60
+ features=datasets.Features(
61
+ {
62
+ "id": datasets.Value("string"),
63
+ "tokens": datasets.Sequence(datasets.Value("string")),
64
+ "ner_tags": datasets.Sequence(
65
+ datasets.features.ClassLabel(
66
+ names=[
67
+ "O",
68
+ "CLAUSE_NUMBER",
69
+ "CLAUSE_TITLE",
70
+ "CONTRACT_TYPE",
71
+ "DEFINITION_TITLE",
72
+ ]
73
+ )
74
+ ),
75
+ }
76
+ ),
77
+ supervised_keys=None,
78
+ homepage="",
79
+ citation=_CITATION,
80
+ )
81
+
82
+ def _split_generators(self, dl_manager):
83
+ """Returns SplitGenerators."""
84
+ downloaded_file = dl_manager.download_and_extract(_URL)
85
+ data_files = {
86
+ "train": os.path.join(downloaded_file, _TRAINING_FILE),
87
+ "dev": os.path.join(downloaded_file, _DEV_FILE),
88
+ "test": os.path.join(downloaded_file, _TEST_FILE),
89
+ }
90
+
91
+ return [
92
+ datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"filepath": data_files["train"]}),
93
+ datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={"filepath": data_files["dev"]}),
94
+ datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={"filepath": data_files["test"]}),
95
+ ]
96
+
97
+ def _generate_examples(self, filepath):
98
+ logger.info("⏳ Generating examples from = %s", filepath)
99
+ with open(filepath, encoding="utf-8") as f:
100
+ guid = 0
101
+ tokens = []
102
+ ner_tags = []
103
+ for line in f:
104
+ if line.startswith("-DOCSTART-") or line == "" or line == "\n":
105
+ if tokens:
106
+ yield guid, {
107
+ "id": str(guid),
108
+ "tokens": tokens,
109
+ "ner_tags": ner_tags,
110
+ }
111
+ guid += 1
112
+ tokens = []
113
+ ner_tags = []
114
+ else:
115
+ # conll2003 tokens are space separated
116
+ splits = line.split(" ")
117
+ tokens.append(splits[0])
118
+ ner_tags.append(splits[3].rstrip())
119
+ # last example
120
+ if tokens:
121
+ yield guid, {
122
+ "id": str(guid),
123
+ "tokens": tokens,
124
+ "ner_tags": ner_tags,
125
+ }