DipakBundheliya commited on
Commit
4ec75e3
1 Parent(s): 4ae9f75

Upload file for convert conll file to NER input text

Browse files
Files changed (1) hide show
  1. convert_text_for_ner.py +128 -0
convert_text_for_ner.py ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import datasets
3
+ # coding=utf-8
4
+ # Copyright 2024 HuggingFace Datasets Authors.
5
+ # Lint as: python3
6
+ """The Shipping label Dataset."""
7
+
8
+ logger = datasets.logging.get_logger(__name__)
9
+
10
+
11
+ _CITATION = """
12
+ """
13
+
14
+ _DESCRIPTION = """
15
+ The goal of this task is to provide a dataset for name entity recognition."""
16
+
17
+ _URL = "https://raw.githubusercontent.com/SanghaviHarshPankajkumar/shipping_label_project/main/NER/data/"
18
+ _TRAINING_FILE = "train.txt"
19
+ _VAL_FILE = "val.txt"
20
+ _TEST_FILE = "test.txt"
21
+
22
+
23
+ class shipping_labels_Config(datasets.BuilderConfig):
24
+ """Shipping Label Dataset for ner"""
25
+
26
+ def __init__(self, **kwargs):
27
+ """BuilderConfig for Shipping Label data.
28
+ Args:
29
+ **kwargs: keyword arguments forwarded to super.
30
+ """
31
+ super(shipping_labels_Config, self).__init__(**kwargs)
32
+
33
+
34
+ class shiping_label_ner(datasets.GeneratorBasedBuilder):
35
+ """Shipping Label Dataset for ner"""
36
+
37
+ BUILDER_CONFIGS = [
38
+ shipping_labels_Config(
39
+ name="shipping_label_ner", version=datasets.Version("1.0.0"), description="Shipping Label Dataset for ner"
40
+ ),
41
+ ]
42
+
43
+ def _info(self):
44
+ return datasets.DatasetInfo(
45
+ description=_DESCRIPTION,
46
+ features=datasets.Features(
47
+ {
48
+ "id": datasets.Value("string"),
49
+ "tokens": datasets.Sequence(datasets.Value("string")),
50
+ "ner_tags": datasets.Sequence(
51
+ datasets.features.ClassLabel(
52
+ names=[
53
+ "O",
54
+ "B-GCNUM",
55
+ "I-GCNUM",
56
+ "B-BGNUM",
57
+ "I-BGNUM",
58
+ "B-DATE",
59
+ "I-DATE",
60
+ "B-ORG",
61
+ "I-ORG",
62
+ "B-LOCATION",
63
+ "I-LOCATION",
64
+ "B-NAME",
65
+ "I-NAME",
66
+ "B-BARCODE",
67
+ "I-BARCODE",
68
+ ]
69
+ )
70
+ ),
71
+ }
72
+ ),
73
+ supervised_keys=None,
74
+ citation=_CITATION,
75
+ )
76
+
77
+ def _split_generators(self, dl_manager):
78
+ """Returns SplitGenerators."""
79
+ urls_to_download = {
80
+ "train": f"{_URL}{_TRAINING_FILE}",
81
+ "test": f"{_URL}{_TEST_FILE}",
82
+ "val": f"{_URL}{_VAL_FILE}",
83
+ }
84
+ downloaded_files = dl_manager.download_and_extract(urls_to_download)
85
+
86
+ return [
87
+ datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"filepath": downloaded_files["train"]}),
88
+ datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={"filepath": downloaded_files["test"]}),
89
+ datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={"filepath": downloaded_files["val"]}),
90
+ ]
91
+
92
+ def _generate_examples(self, filepath):
93
+ logger.info("⏳ Generating examples from = %s", filepath)
94
+ with open(filepath, encoding="utf-8") as f:
95
+ current_tokens = []
96
+ current_labels = []
97
+ sentence_counter = 0
98
+ for row in f:
99
+ row = row.rstrip()
100
+ if row:
101
+ token, label = row.split(" ")
102
+ current_tokens.append(token)
103
+ current_labels.append(label)
104
+ else:
105
+ # New sentence
106
+ if not current_tokens:
107
+ # Consecutive empty lines will cause empty sentences
108
+ continue
109
+ assert len(current_tokens) == len(current_labels), "💔 between len of tokens & labels"
110
+ sentence = (
111
+ sentence_counter,
112
+ {
113
+ "id": str(sentence_counter),
114
+ "tokens": current_tokens,
115
+ "ner_tags": current_labels,
116
+ },
117
+ )
118
+ sentence_counter += 1
119
+ current_tokens = []
120
+ current_labels = []
121
+ yield sentence
122
+ # Don't forget last sentence in dataset 🧐
123
+ if current_tokens:
124
+ yield sentence_counter, {
125
+ "id": str(sentence_counter),
126
+ "tokens": current_tokens,
127
+ "ner_tags": current_labels,
128
+ }