vickt commited on
Commit
3632bc0
1 Parent(s): 41ea574

Upload model

Browse files
Files changed (2) hide show
  1. config.json +258 -0
  2. pytorch_model.bin +3 -0
config.json ADDED
@@ -0,0 +1,258 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "chpt_addi2b2/formal_roberta-large/output/checkpoint-66430",
3
+ "architectures": [
4
+ "RobertaModel"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "classifier_dropout": null,
9
+ "eos_token_id": 2,
10
+ "finetuning_task": "ner",
11
+ "hidden_act": "gelu",
12
+ "hidden_dropout_prob": 0.1,
13
+ "hidden_size": 1024,
14
+ "id2label": {
15
+ "0": "B-AGE",
16
+ "1": "B-BIOID",
17
+ "2": "B-CITY",
18
+ "3": "B-COUNTRY",
19
+ "4": "B-DATE",
20
+ "5": "B-DEPARTMENT",
21
+ "6": "B-DEVICE",
22
+ "7": "B-DOCTOR",
23
+ "8": "B-DURATION",
24
+ "9": "B-EMAIL",
25
+ "10": "B-FAX",
26
+ "11": "B-HEALTHPLAN",
27
+ "12": "B-HOSPITAL",
28
+ "13": "B-IDNUM",
29
+ "14": "B-LOCATION-OTHER",
30
+ "15": "B-MEDICALRECORD",
31
+ "16": "B-ORGANIZATION",
32
+ "17": "B-PATIENT",
33
+ "18": "B-PHONE",
34
+ "19": "B-PROFESSION",
35
+ "20": "B-ROOM",
36
+ "21": "B-SET",
37
+ "22": "B-STATE",
38
+ "23": "B-STREET",
39
+ "24": "B-TIME",
40
+ "25": "B-URL",
41
+ "26": "B-USERNAME",
42
+ "27": "B-ZIP",
43
+ "28": "I-AGE",
44
+ "29": "I-BIOID",
45
+ "30": "I-CITY",
46
+ "31": "I-COUNTRY",
47
+ "32": "I-DATE",
48
+ "33": "I-DEPARTMENT",
49
+ "34": "I-DEVICE",
50
+ "35": "I-DOCTOR",
51
+ "36": "I-DURATION",
52
+ "37": "I-EMAIL",
53
+ "38": "I-FAX",
54
+ "39": "I-HEALTHPLAN",
55
+ "40": "I-HOSPITAL",
56
+ "41": "I-IDNUM",
57
+ "42": "I-LOCATION-OTHER",
58
+ "43": "I-MEDICALRECORD",
59
+ "44": "I-ORGANIZATION",
60
+ "45": "I-PATIENT",
61
+ "46": "I-PHONE",
62
+ "47": "I-PROFESSION",
63
+ "48": "I-ROOM",
64
+ "49": "I-SET",
65
+ "50": "I-STATE",
66
+ "51": "I-STREET",
67
+ "52": "I-TIME",
68
+ "53": "I-URL",
69
+ "54": "I-USERNAME",
70
+ "55": "I-ZIP",
71
+ "56": "L-AGE",
72
+ "57": "L-BIOID",
73
+ "58": "L-CITY",
74
+ "59": "L-COUNTRY",
75
+ "60": "L-DATE",
76
+ "61": "L-DEPARTMENT",
77
+ "62": "L-DEVICE",
78
+ "63": "L-DOCTOR",
79
+ "64": "L-DURATION",
80
+ "65": "L-EMAIL",
81
+ "66": "L-FAX",
82
+ "67": "L-HEALTHPLAN",
83
+ "68": "L-HOSPITAL",
84
+ "69": "L-IDNUM",
85
+ "70": "L-LOCATION-OTHER",
86
+ "71": "L-MEDICALRECORD",
87
+ "72": "L-ORGANIZATION",
88
+ "73": "L-PATIENT",
89
+ "74": "L-PHONE",
90
+ "75": "L-PROFESSION",
91
+ "76": "L-ROOM",
92
+ "77": "L-SET",
93
+ "78": "L-STATE",
94
+ "79": "L-STREET",
95
+ "80": "L-TIME",
96
+ "81": "L-URL",
97
+ "82": "L-USERNAME",
98
+ "83": "L-ZIP",
99
+ "84": "O",
100
+ "85": "U-AGE",
101
+ "86": "U-BIOID",
102
+ "87": "U-CITY",
103
+ "88": "U-COUNTRY",
104
+ "89": "U-DATE",
105
+ "90": "U-DEPARTMENT",
106
+ "91": "U-DEVICE",
107
+ "92": "U-DOCTOR",
108
+ "93": "U-DURATION",
109
+ "94": "U-EMAIL",
110
+ "95": "U-FAX",
111
+ "96": "U-HEALTHPLAN",
112
+ "97": "U-HOSPITAL",
113
+ "98": "U-IDNUM",
114
+ "99": "U-LOCATION-OTHER",
115
+ "100": "U-MEDICALRECORD",
116
+ "101": "U-ORGANIZATION",
117
+ "102": "U-PATIENT",
118
+ "103": "U-PHONE",
119
+ "104": "U-PROFESSION",
120
+ "105": "U-ROOM",
121
+ "106": "U-SET",
122
+ "107": "U-STATE",
123
+ "108": "U-STREET",
124
+ "109": "U-TIME",
125
+ "110": "U-URL",
126
+ "111": "U-USERNAME",
127
+ "112": "U-ZIP"
128
+ },
129
+ "initializer_range": 0.02,
130
+ "intermediate_size": 4096,
131
+ "label2id": {
132
+ "B-AGE": 0,
133
+ "B-BIOID": 1,
134
+ "B-CITY": 2,
135
+ "B-COUNTRY": 3,
136
+ "B-DATE": 4,
137
+ "B-DEPARTMENT": 5,
138
+ "B-DEVICE": 6,
139
+ "B-DOCTOR": 7,
140
+ "B-DURATION": 8,
141
+ "B-EMAIL": 9,
142
+ "B-FAX": 10,
143
+ "B-HEALTHPLAN": 11,
144
+ "B-HOSPITAL": 12,
145
+ "B-IDNUM": 13,
146
+ "B-LOCATION-OTHER": 14,
147
+ "B-MEDICALRECORD": 15,
148
+ "B-ORGANIZATION": 16,
149
+ "B-PATIENT": 17,
150
+ "B-PHONE": 18,
151
+ "B-PROFESSION": 19,
152
+ "B-ROOM": 20,
153
+ "B-SET": 21,
154
+ "B-STATE": 22,
155
+ "B-STREET": 23,
156
+ "B-TIME": 24,
157
+ "B-URL": 25,
158
+ "B-USERNAME": 26,
159
+ "B-ZIP": 27,
160
+ "I-AGE": 28,
161
+ "I-BIOID": 29,
162
+ "I-CITY": 30,
163
+ "I-COUNTRY": 31,
164
+ "I-DATE": 32,
165
+ "I-DEPARTMENT": 33,
166
+ "I-DEVICE": 34,
167
+ "I-DOCTOR": 35,
168
+ "I-DURATION": 36,
169
+ "I-EMAIL": 37,
170
+ "I-FAX": 38,
171
+ "I-HEALTHPLAN": 39,
172
+ "I-HOSPITAL": 40,
173
+ "I-IDNUM": 41,
174
+ "I-LOCATION-OTHER": 42,
175
+ "I-MEDICALRECORD": 43,
176
+ "I-ORGANIZATION": 44,
177
+ "I-PATIENT": 45,
178
+ "I-PHONE": 46,
179
+ "I-PROFESSION": 47,
180
+ "I-ROOM": 48,
181
+ "I-SET": 49,
182
+ "I-STATE": 50,
183
+ "I-STREET": 51,
184
+ "I-TIME": 52,
185
+ "I-URL": 53,
186
+ "I-USERNAME": 54,
187
+ "I-ZIP": 55,
188
+ "L-AGE": 56,
189
+ "L-BIOID": 57,
190
+ "L-CITY": 58,
191
+ "L-COUNTRY": 59,
192
+ "L-DATE": 60,
193
+ "L-DEPARTMENT": 61,
194
+ "L-DEVICE": 62,
195
+ "L-DOCTOR": 63,
196
+ "L-DURATION": 64,
197
+ "L-EMAIL": 65,
198
+ "L-FAX": 66,
199
+ "L-HEALTHPLAN": 67,
200
+ "L-HOSPITAL": 68,
201
+ "L-IDNUM": 69,
202
+ "L-LOCATION-OTHER": 70,
203
+ "L-MEDICALRECORD": 71,
204
+ "L-ORGANIZATION": 72,
205
+ "L-PATIENT": 73,
206
+ "L-PHONE": 74,
207
+ "L-PROFESSION": 75,
208
+ "L-ROOM": 76,
209
+ "L-SET": 77,
210
+ "L-STATE": 78,
211
+ "L-STREET": 79,
212
+ "L-TIME": 80,
213
+ "L-URL": 81,
214
+ "L-USERNAME": 82,
215
+ "L-ZIP": 83,
216
+ "O": 84,
217
+ "U-AGE": 85,
218
+ "U-BIOID": 86,
219
+ "U-CITY": 87,
220
+ "U-COUNTRY": 88,
221
+ "U-DATE": 89,
222
+ "U-DEPARTMENT": 90,
223
+ "U-DEVICE": 91,
224
+ "U-DOCTOR": 92,
225
+ "U-DURATION": 93,
226
+ "U-EMAIL": 94,
227
+ "U-FAX": 95,
228
+ "U-HEALTHPLAN": 96,
229
+ "U-HOSPITAL": 97,
230
+ "U-IDNUM": 98,
231
+ "U-LOCATION-OTHER": 99,
232
+ "U-MEDICALRECORD": 100,
233
+ "U-ORGANIZATION": 101,
234
+ "U-PATIENT": 102,
235
+ "U-PHONE": 103,
236
+ "U-PROFESSION": 104,
237
+ "U-ROOM": 105,
238
+ "U-SET": 106,
239
+ "U-STATE": 107,
240
+ "U-STREET": 108,
241
+ "U-TIME": 109,
242
+ "U-URL": 110,
243
+ "U-USERNAME": 111,
244
+ "U-ZIP": 112
245
+ },
246
+ "layer_norm_eps": 1e-05,
247
+ "max_position_embeddings": 514,
248
+ "model_type": "roberta",
249
+ "num_attention_heads": 16,
250
+ "num_hidden_layers": 24,
251
+ "pad_token_id": 1,
252
+ "position_embedding_type": "absolute",
253
+ "torch_dtype": "float32",
254
+ "transformers_version": "4.30.2",
255
+ "type_vocab_size": 1,
256
+ "use_cache": true,
257
+ "vocab_size": 50265
258
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20f6fb698fbd2dd1f560ce9ce6eb61b231b86a85553a02a5fff1e3517b159c97
3
+ size 1421566897