hsohn3 commited on
Commit
925e60b
1 Parent(s): 1177c46

Training in progress epoch 0

Browse files
Files changed (7) hide show
  1. README.md +52 -0
  2. config.json +25 -0
  3. special_tokens_map.json +7 -0
  4. tf_model.h5 +3 -0
  5. tokenizer.json +486 -0
  6. tokenizer_config.json +13 -0
  7. vocab.json +1 -0
README.md ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ tags:
4
+ - generated_from_keras_callback
5
+ model-index:
6
+ - name: hsohn3/mayo-bert-visit-uncased-wordlevel-block512-batch4-ep10
7
+ results: []
8
+ ---
9
+
10
+ <!-- This model card has been generated automatically according to the information Keras had access to. You should
11
+ probably proofread and complete it, then remove this comment. -->
12
+
13
+ # hsohn3/mayo-bert-visit-uncased-wordlevel-block512-batch4-ep10
14
+
15
+ This model is a fine-tuned version of [bert-base-uncased](https://huggingface.co/bert-base-uncased) on an unknown dataset.
16
+ It achieves the following results on the evaluation set:
17
+ - Train Loss: 4.1298
18
+ - Epoch: 0
19
+
20
+ ## Model description
21
+
22
+ More information needed
23
+
24
+ ## Intended uses & limitations
25
+
26
+ More information needed
27
+
28
+ ## Training and evaluation data
29
+
30
+ More information needed
31
+
32
+ ## Training procedure
33
+
34
+ ### Training hyperparameters
35
+
36
+ The following hyperparameters were used during training:
37
+ - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': 2e-05, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False, 'weight_decay_rate': 0.01}
38
+ - training_precision: float32
39
+
40
+ ### Training results
41
+
42
+ | Train Loss | Epoch |
43
+ |:----------:|:-----:|
44
+ | 4.1298 | 0 |
45
+
46
+
47
+ ### Framework versions
48
+
49
+ - Transformers 4.20.1
50
+ - TensorFlow 2.8.2
51
+ - Datasets 2.3.2
52
+ - Tokenizers 0.12.1
config.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "bert-base-uncased",
3
+ "architectures": [
4
+ "BertForMaskedLM"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3072,
14
+ "layer_norm_eps": 1e-12,
15
+ "max_position_embeddings": 512,
16
+ "model_type": "bert",
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "pad_token_id": 0,
20
+ "position_embedding_type": "absolute",
21
+ "transformers_version": "4.20.1",
22
+ "type_vocab_size": 2,
23
+ "use_cache": true,
24
+ "vocab_size": 30522
25
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tf_model.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cff3b5264707b6324b217cacd2391512847524d6016f1feaeea794ef78f69f52
3
+ size 533687680
tokenizer.json ADDED
@@ -0,0 +1,486 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
5
+ "added_tokens": [
6
+ {
7
+ "id": 0,
8
+ "content": "[UNK]",
9
+ "single_word": false,
10
+ "lstrip": false,
11
+ "rstrip": false,
12
+ "normalized": false,
13
+ "special": true
14
+ },
15
+ {
16
+ "id": 1,
17
+ "content": "[PAD]",
18
+ "single_word": false,
19
+ "lstrip": false,
20
+ "rstrip": false,
21
+ "normalized": false,
22
+ "special": true
23
+ },
24
+ {
25
+ "id": 2,
26
+ "content": "[CLS]",
27
+ "single_word": false,
28
+ "lstrip": false,
29
+ "rstrip": false,
30
+ "normalized": false,
31
+ "special": true
32
+ },
33
+ {
34
+ "id": 3,
35
+ "content": "[SEP]",
36
+ "single_word": false,
37
+ "lstrip": false,
38
+ "rstrip": false,
39
+ "normalized": false,
40
+ "special": true
41
+ },
42
+ {
43
+ "id": 4,
44
+ "content": "[MASK]",
45
+ "single_word": false,
46
+ "lstrip": false,
47
+ "rstrip": false,
48
+ "normalized": false,
49
+ "special": true
50
+ }
51
+ ],
52
+ "normalizer": {
53
+ "type": "BertNormalizer",
54
+ "clean_text": true,
55
+ "handle_chinese_chars": true,
56
+ "strip_accents": null,
57
+ "lowercase": true
58
+ },
59
+ "pre_tokenizer": {
60
+ "type": "WhitespaceSplit"
61
+ },
62
+ "post_processor": {
63
+ "type": "TemplateProcessing",
64
+ "single": [
65
+ {
66
+ "SpecialToken": {
67
+ "id": "[CLS]",
68
+ "type_id": 0
69
+ }
70
+ },
71
+ {
72
+ "Sequence": {
73
+ "id": "A",
74
+ "type_id": 0
75
+ }
76
+ },
77
+ {
78
+ "SpecialToken": {
79
+ "id": "[SEP]",
80
+ "type_id": 0
81
+ }
82
+ }
83
+ ],
84
+ "pair": [
85
+ {
86
+ "SpecialToken": {
87
+ "id": "[CLS]",
88
+ "type_id": 0
89
+ }
90
+ },
91
+ {
92
+ "Sequence": {
93
+ "id": "A",
94
+ "type_id": 0
95
+ }
96
+ },
97
+ {
98
+ "SpecialToken": {
99
+ "id": "[SEP]",
100
+ "type_id": 0
101
+ }
102
+ },
103
+ {
104
+ "Sequence": {
105
+ "id": "B",
106
+ "type_id": 1
107
+ }
108
+ },
109
+ {
110
+ "SpecialToken": {
111
+ "id": "[SEP]",
112
+ "type_id": 1
113
+ }
114
+ }
115
+ ],
116
+ "special_tokens": {
117
+ "[CLS]": {
118
+ "id": "[CLS]",
119
+ "ids": [
120
+ 2
121
+ ],
122
+ "tokens": [
123
+ "[CLS]"
124
+ ]
125
+ },
126
+ "[SEP]": {
127
+ "id": "[SEP]",
128
+ "ids": [
129
+ 3
130
+ ],
131
+ "tokens": [
132
+ "[SEP]"
133
+ ]
134
+ }
135
+ }
136
+ },
137
+ "decoder": {
138
+ "type": "WordPiece",
139
+ "prefix": "##",
140
+ "cleanup": true
141
+ },
142
+ "model": {
143
+ "type": "WordLevel",
144
+ "vocab": {
145
+ "[UNK]": 0,
146
+ "[PAD]": 1,
147
+ "[CLS]": 2,
148
+ "[SEP]": 3,
149
+ "[MASK]": 4,
150
+ "hr=n": 5,
151
+ "rr=n": 6,
152
+ "sbp=n": 7,
153
+ "spo2=n": 8,
154
+ "temp=n": 9,
155
+ "fio2=n": 10,
156
+ "map=n": 11,
157
+ "dbp=n": 12,
158
+ "hr=h": 13,
159
+ "hr=vh": 14,
160
+ "map=h": 15,
161
+ "dbp=h": 16,
162
+ "sbp=h": 17,
163
+ "hr=l": 18,
164
+ "map=vh": 19,
165
+ "dbp=vh": 20,
166
+ "temp=h": 21,
167
+ "sbp=vh": 22,
168
+ "spo2=vh": 23,
169
+ "spo2=n>n": 24,
170
+ "spo2=l": 25,
171
+ "rr=vh": 26,
172
+ "of=n": 27,
173
+ "hr=n>n": 28,
174
+ "temp=vh": 29,
175
+ "rr=n>n": 30,
176
+ "dbp=vl": 31,
177
+ "fio2=h": 32,
178
+ "fio2=vh": 33,
179
+ "rr=h": 34,
180
+ "map=vl": 35,
181
+ "dbp=l": 36,
182
+ "sbp=vl": 37,
183
+ "spo2=n>l": 38,
184
+ "map=l": 39,
185
+ "spo2=l>n": 40,
186
+ "hr=vl": 41,
187
+ "map=n>n": 42,
188
+ "sbp=l": 43,
189
+ "sbp=n>n": 44,
190
+ "dbp=n>n": 45,
191
+ "plt=n": 46,
192
+ "hr=n>l": 47,
193
+ "hr=l>n": 48,
194
+ "cr=n": 49,
195
+ "wbc=n": 50,
196
+ "rr=n>h": 51,
197
+ "bun=n": 52,
198
+ "rr=h>n": 53,
199
+ "hr=h>n": 54,
200
+ "hr=n>h": 55,
201
+ "spo2=vl": 56,
202
+ "of=h": 57,
203
+ "spo2=h>n": 58,
204
+ "spo2=n>h": 59,
205
+ "hr=l>l": 60,
206
+ "spo2=h": 61,
207
+ "rr=h>h": 62,
208
+ "map=l>n": 63,
209
+ "map=n>l": 64,
210
+ "spo2=l>l": 65,
211
+ "sbp=h>n": 66,
212
+ "map=h>n": 67,
213
+ "dbp=n>l": 68,
214
+ "dbp=l>n": 69,
215
+ "spo2=vh>vh": 70,
216
+ "sbp=l>n": 71,
217
+ "hr=h>h": 72,
218
+ "dbp=h>n": 73,
219
+ "sbp=n>l": 74,
220
+ "of=vh": 75,
221
+ "map=n>h": 76,
222
+ "sbp=n>h": 77,
223
+ "dbp=n>h": 78,
224
+ "spo2=n>vh": 79,
225
+ "spo2=vh>n": 80,
226
+ "spo2=h>vh": 81,
227
+ "spo2=vh>h": 82,
228
+ "spo2=vl>l": 83,
229
+ "rr=l>n": 84,
230
+ "spo2=l>vl": 85,
231
+ "spo2=h>h": 86,
232
+ "hr=vl>l": 87,
233
+ "hr=l>vl": 88,
234
+ "rr=n>l": 89,
235
+ "of=l": 90,
236
+ "spo2=vl>n": 91,
237
+ "temp=l": 92,
238
+ "spo2=n>vl": 93,
239
+ "rr=h>vh": 94,
240
+ "rr=l": 95,
241
+ "rr=vh>h": 96,
242
+ "hr=vl>vl": 97,
243
+ "dbp=l>l": 98,
244
+ "hr=vh>h": 99,
245
+ "map=l>l": 100,
246
+ "sbp=vh>h": 101,
247
+ "plt=l": 102,
248
+ "map=vh>h": 103,
249
+ "hr=h>vh": 104,
250
+ "bun=h": 105,
251
+ "rr=vl>n": 106,
252
+ "wbc=h": 107,
253
+ "dbp=vh>h": 108,
254
+ "sbp=h>h": 109,
255
+ "sbp=h>vh": 110,
256
+ "map=h>h": 111,
257
+ "sbp=l>l": 112,
258
+ "map=h>vh": 113,
259
+ "dbp=vl>l": 114,
260
+ "plt=h": 115,
261
+ "dbp=vh>n": 116,
262
+ "bun=l": 117,
263
+ "dbp=l>vl": 118,
264
+ "dbp=h>h": 119,
265
+ "dbp=h>vh": 120,
266
+ "spo2=vl>vl": 121,
267
+ "wbc=l": 122,
268
+ "map=vl>l": 123,
269
+ "rr=vl": 124,
270
+ "map=vh>n": 125,
271
+ "cr=h": 126,
272
+ "sbp=vh>n": 127,
273
+ "rr=n>vl": 128,
274
+ "rr=vh>vh": 129,
275
+ "map=l>vl": 130,
276
+ "sbp=vl>l": 131,
277
+ "dbp=n>vh": 132,
278
+ "bun=vh": 133,
279
+ "rr=l>l": 134,
280
+ "wbc=vh": 135,
281
+ "hr=vl>n": 136,
282
+ "sbp=l>vl": 137,
283
+ "rr=vl>l": 138,
284
+ "sbp=n>vh": 139,
285
+ "lt=n": 140,
286
+ "map=n>vh": 141,
287
+ "map=vl>n": 142,
288
+ "hr=vh>vh": 143,
289
+ "map=n>vl": 144,
290
+ "hr=n>vl": 145,
291
+ "plt=vl": 146,
292
+ "rr=vl>vl": 147,
293
+ "temp=vl": 148,
294
+ "br=n": 149,
295
+ "dbp=vl>n": 150,
296
+ "dbp=n>vl": 151,
297
+ "dbp=vl>vl": 152,
298
+ "cr=vh": 153,
299
+ "sbp=vl>n": 154,
300
+ "rr=l>vl": 155,
301
+ "sbp=vh>vh": 156,
302
+ "sbp=n>vl": 157,
303
+ "plt=vh": 158,
304
+ "bun=vl": 159,
305
+ "wbc=vl": 160,
306
+ "dbp=vh>vh": 161,
307
+ "map=vh>vh": 162,
308
+ "rr=vh>n": 163,
309
+ "rr=n>vh": 164,
310
+ "map=vl>vl": 165,
311
+ "cr=l": 166,
312
+ "sbp=vl>vl": 167,
313
+ "fio2=n>h": 168,
314
+ "cr=vl": 169,
315
+ "temp=n>h": 170,
316
+ "temp=l>n": 171,
317
+ "hr=n>vh": 172,
318
+ "hr=vh>n": 173,
319
+ "temp=h>n": 174,
320
+ "spo2=vh>l": 175,
321
+ "spo2=h>l": 176,
322
+ "spo2=l>vh": 177,
323
+ "spo2=l>h": 178,
324
+ "spo2=vl>vh": 179,
325
+ "temp=h>vh": 180,
326
+ "spo2=vh>vl": 181,
327
+ "lt=h": 182,
328
+ "map=h>l": 183,
329
+ "temp=n>l": 184,
330
+ "spo2=vl>h": 185,
331
+ "dbp=h>l": 186,
332
+ "sbp=h>l": 187,
333
+ "rr=h>l": 188,
334
+ "rr=l>h": 189,
335
+ "temp=vh>h": 190,
336
+ "temp=vl>l": 191,
337
+ "map=l>h": 192,
338
+ "br=h": 193,
339
+ "bd=n": 194,
340
+ "spo2=h>vl": 195,
341
+ "lt=l": 196,
342
+ "hr=l>h": 197,
343
+ "dbp=l>h": 198,
344
+ "rr=vl>h": 199,
345
+ "sbp=l>h": 200,
346
+ "temp=n>n": 201,
347
+ "lt=vh": 202,
348
+ "dbp=vh>l": 203,
349
+ "rr=h>vl": 204,
350
+ "map=vh>l": 205,
351
+ "temp=l>vl": 206,
352
+ "temp=h>h": 207,
353
+ "hr=h>l": 208,
354
+ "br=vh": 209,
355
+ "rr=vh>vl": 210,
356
+ "hr=vl>h": 211,
357
+ "rr=vl>vh": 212,
358
+ "crp=n": 213,
359
+ "lt=vl": 214,
360
+ "sbp=vh>l": 215,
361
+ "br=l": 216,
362
+ "fio2=h>n": 217,
363
+ "br=vl": 218,
364
+ "hr=h>vl": 219,
365
+ "temp=vl>n": 220,
366
+ "map=h>vl": 221,
367
+ "dbp=l>vh": 222,
368
+ "dbp=h>vl": 223,
369
+ "map=l>vh": 224,
370
+ "temp=l>l": 225,
371
+ "of=vl": 226,
372
+ "map=vh>vl": 227,
373
+ "hr=vl>vh": 228,
374
+ "dbp=vh>vl": 229,
375
+ "dbp=vl>h": 230,
376
+ "map=vl>h": 231,
377
+ "hr=l>vh": 232,
378
+ "sbp=l>vh": 233,
379
+ "sbp=h>vl": 234,
380
+ "crp=l": 235,
381
+ "hr=vh>l": 236,
382
+ "sr=n": 237,
383
+ "sbp=vl>h": 238,
384
+ "hr=vh>vl": 239,
385
+ "sbp=vh>vl": 240,
386
+ "temp=n>vh": 241,
387
+ "bd=l": 242,
388
+ "dbp=vl>vh": 243,
389
+ "rr=vh>l": 244,
390
+ "temp=vh>n": 245,
391
+ "bd=h": 246,
392
+ "temp=vh>vh": 247,
393
+ "map=vl>vh": 248,
394
+ "fio2=vh>h": 249,
395
+ "bd=vh": 250,
396
+ "temp=n>vl": 251,
397
+ "fio2=n>n": 252,
398
+ "crp=h": 253,
399
+ "rr=l>vh": 254,
400
+ "of=n>l": 255,
401
+ "of=vh>h": 256,
402
+ "fio2=h>vh": 257,
403
+ "temp=l>h": 258,
404
+ "sbp=vl>vh": 259,
405
+ "fio2=n>vh": 260,
406
+ "temp=vl>vl": 261,
407
+ "of=h>n": 262,
408
+ "of=h>vh": 263,
409
+ "bd=vl": 264,
410
+ "fio2=h>h": 265,
411
+ "crp=vh": 266,
412
+ "of=n>h": 267,
413
+ "fio2=vh>n": 268,
414
+ "sr=l": 269,
415
+ "of=l>n": 270,
416
+ "temp=vl>h": 271,
417
+ "temp=h>l": 272,
418
+ "sr=h": 273,
419
+ "sr=vl": 274,
420
+ "sr=vh": 275,
421
+ "of=vh>n": 276,
422
+ "pct=n": 277,
423
+ "of=n>vh": 278,
424
+ "temp=h>vl": 279,
425
+ "temp=vh>vl": 280,
426
+ "temp=vh>l": 281,
427
+ "temp=l>vh": 282,
428
+ "temp=vl>vh": 283,
429
+ "fio2=vh>vh": 284,
430
+ "of=n>n": 285,
431
+ "fio2=vl": 286,
432
+ "plt=l>vl": 287,
433
+ "of=h>h": 288,
434
+ "of=h>l": 289,
435
+ "pct=h": 290,
436
+ "plt=vl>l": 291,
437
+ "of=l>l": 292,
438
+ "of=vh>vh": 293,
439
+ "of=l>vl": 294,
440
+ "of=vl>n": 295,
441
+ "pct=vh": 296,
442
+ "plt=l>n": 297,
443
+ "plt=n>l": 298,
444
+ "plt=vl>n": 299,
445
+ "of=vl>l": 300,
446
+ "of=l>h": 301,
447
+ "of=l>vh": 302,
448
+ "of=n>vl": 303,
449
+ "bd=l>n": 304,
450
+ "bun=l>n": 305,
451
+ "bun=n>h": 306,
452
+ "cr=n>h": 307,
453
+ "fio2=n>vl": 308,
454
+ "fio2=vh>vl": 309,
455
+ "lt=h>n": 310,
456
+ "lt=n>h": 311,
457
+ "bd=vl>l": 312,
458
+ "br=n>n": 313,
459
+ "bun=h>vh": 314,
460
+ "bun=n>l": 315,
461
+ "cr=h>vh": 316,
462
+ "cr=n>l": 317,
463
+ "cr=n>vl": 318,
464
+ "lt=l>n": 319,
465
+ "lt=n>l": 320,
466
+ "lt=n>vl": 321,
467
+ "lt=vh>h": 322,
468
+ "lt=vl>l": 323,
469
+ "of=h>vl": 324,
470
+ "of=vh>l": 325,
471
+ "of=vh>vl": 326,
472
+ "of=vl>h": 327,
473
+ "plt=h>n": 328,
474
+ "plt=l>l": 329,
475
+ "wbc=h>n": 330,
476
+ "wbc=h>vh": 331,
477
+ "wbc=l>vl": 332,
478
+ "wbc=n>h": 333,
479
+ "wbc=n>l": 334,
480
+ "wbc=vh>h": 335,
481
+ "wbc=vl>l": 336,
482
+ "wbc=vl>vh": 337
483
+ },
484
+ "unk_token": "[UNK]"
485
+ }
486
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "do_lower_case": true,
4
+ "mask_token": "[MASK]",
5
+ "name_or_path": "/content/drive/MyDrive/Experiments/tokenizers/mayo-bert-wordlevel-uncased",
6
+ "pad_token": "[PAD]",
7
+ "sep_token": "[SEP]",
8
+ "special_tokens_map_file": "/content/drive/MyDrive/Experiments/tokenizers/mayo-bert-wordlevel-uncased/special_tokens_map.json",
9
+ "strip_accents": null,
10
+ "tokenize_chinese_chars": true,
11
+ "tokenizer_class": "BertTokenizer",
12
+ "unk_token": "[UNK]"
13
+ }
vocab.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"[UNK]":0,"[PAD]":1,"[CLS]":2,"[SEP]":3,"[MASK]":4,"hr=n":5,"rr=n":6,"sbp=n":7,"spo2=n":8,"temp=n":9,"fio2=n":10,"map=n":11,"dbp=n":12,"hr=h":13,"hr=vh":14,"map=h":15,"dbp=h":16,"sbp=h":17,"hr=l":18,"map=vh":19,"dbp=vh":20,"temp=h":21,"sbp=vh":22,"spo2=vh":23,"spo2=n>n":24,"spo2=l":25,"rr=vh":26,"of=n":27,"hr=n>n":28,"temp=vh":29,"rr=n>n":30,"dbp=vl":31,"fio2=h":32,"fio2=vh":33,"rr=h":34,"map=vl":35,"dbp=l":36,"sbp=vl":37,"spo2=n>l":38,"map=l":39,"spo2=l>n":40,"hr=vl":41,"map=n>n":42,"sbp=l":43,"sbp=n>n":44,"dbp=n>n":45,"plt=n":46,"hr=n>l":47,"hr=l>n":48,"cr=n":49,"wbc=n":50,"rr=n>h":51,"bun=n":52,"rr=h>n":53,"hr=h>n":54,"hr=n>h":55,"spo2=vl":56,"of=h":57,"spo2=h>n":58,"spo2=n>h":59,"hr=l>l":60,"spo2=h":61,"rr=h>h":62,"map=l>n":63,"map=n>l":64,"spo2=l>l":65,"sbp=h>n":66,"map=h>n":67,"dbp=n>l":68,"dbp=l>n":69,"spo2=vh>vh":70,"sbp=l>n":71,"hr=h>h":72,"dbp=h>n":73,"sbp=n>l":74,"of=vh":75,"map=n>h":76,"sbp=n>h":77,"dbp=n>h":78,"spo2=n>vh":79,"spo2=vh>n":80,"spo2=h>vh":81,"spo2=vh>h":82,"spo2=vl>l":83,"rr=l>n":84,"spo2=l>vl":85,"spo2=h>h":86,"hr=vl>l":87,"hr=l>vl":88,"rr=n>l":89,"of=l":90,"spo2=vl>n":91,"temp=l":92,"spo2=n>vl":93,"rr=h>vh":94,"rr=l":95,"rr=vh>h":96,"hr=vl>vl":97,"dbp=l>l":98,"hr=vh>h":99,"map=l>l":100,"sbp=vh>h":101,"plt=l":102,"map=vh>h":103,"hr=h>vh":104,"bun=h":105,"rr=vl>n":106,"wbc=h":107,"dbp=vh>h":108,"sbp=h>h":109,"sbp=h>vh":110,"map=h>h":111,"sbp=l>l":112,"map=h>vh":113,"dbp=vl>l":114,"plt=h":115,"dbp=vh>n":116,"bun=l":117,"dbp=l>vl":118,"dbp=h>h":119,"dbp=h>vh":120,"spo2=vl>vl":121,"wbc=l":122,"map=vl>l":123,"rr=vl":124,"map=vh>n":125,"cr=h":126,"sbp=vh>n":127,"rr=n>vl":128,"rr=vh>vh":129,"map=l>vl":130,"sbp=vl>l":131,"dbp=n>vh":132,"bun=vh":133,"rr=l>l":134,"wbc=vh":135,"hr=vl>n":136,"sbp=l>vl":137,"rr=vl>l":138,"sbp=n>vh":139,"lt=n":140,"map=n>vh":141,"map=vl>n":142,"hr=vh>vh":143,"map=n>vl":144,"hr=n>vl":145,"plt=vl":146,"rr=vl>vl":147,"temp=vl":148,"br=n":149,"dbp=vl>n":150,"dbp=n>vl":151,"dbp=vl>vl":152,"cr=vh":153,"sbp=vl>n":154,"rr=l>vl":155,"sbp=vh>vh":156,"sbp=n>vl":157,"plt=vh":158,"bun=vl":159,"wbc=vl":160,"dbp=vh>vh":161,"map=vh>vh":162,"rr=vh>n":163,"rr=n>vh":164,"map=vl>vl":165,"cr=l":166,"sbp=vl>vl":167,"fio2=n>h":168,"cr=vl":169,"temp=n>h":170,"temp=l>n":171,"hr=n>vh":172,"hr=vh>n":173,"temp=h>n":174,"spo2=vh>l":175,"spo2=h>l":176,"spo2=l>vh":177,"spo2=l>h":178,"spo2=vl>vh":179,"temp=h>vh":180,"spo2=vh>vl":181,"lt=h":182,"map=h>l":183,"temp=n>l":184,"spo2=vl>h":185,"dbp=h>l":186,"sbp=h>l":187,"rr=h>l":188,"rr=l>h":189,"temp=vh>h":190,"temp=vl>l":191,"map=l>h":192,"br=h":193,"bd=n":194,"spo2=h>vl":195,"lt=l":196,"hr=l>h":197,"dbp=l>h":198,"rr=vl>h":199,"sbp=l>h":200,"temp=n>n":201,"lt=vh":202,"dbp=vh>l":203,"rr=h>vl":204,"map=vh>l":205,"temp=l>vl":206,"temp=h>h":207,"hr=h>l":208,"br=vh":209,"rr=vh>vl":210,"hr=vl>h":211,"rr=vl>vh":212,"crp=n":213,"lt=vl":214,"sbp=vh>l":215,"br=l":216,"fio2=h>n":217,"br=vl":218,"hr=h>vl":219,"temp=vl>n":220,"map=h>vl":221,"dbp=l>vh":222,"dbp=h>vl":223,"map=l>vh":224,"temp=l>l":225,"of=vl":226,"map=vh>vl":227,"hr=vl>vh":228,"dbp=vh>vl":229,"dbp=vl>h":230,"map=vl>h":231,"hr=l>vh":232,"sbp=l>vh":233,"sbp=h>vl":234,"crp=l":235,"hr=vh>l":236,"sr=n":237,"sbp=vl>h":238,"hr=vh>vl":239,"sbp=vh>vl":240,"temp=n>vh":241,"bd=l":242,"dbp=vl>vh":243,"rr=vh>l":244,"temp=vh>n":245,"bd=h":246,"temp=vh>vh":247,"map=vl>vh":248,"fio2=vh>h":249,"bd=vh":250,"temp=n>vl":251,"fio2=n>n":252,"crp=h":253,"rr=l>vh":254,"of=n>l":255,"of=vh>h":256,"fio2=h>vh":257,"temp=l>h":258,"sbp=vl>vh":259,"fio2=n>vh":260,"temp=vl>vl":261,"of=h>n":262,"of=h>vh":263,"bd=vl":264,"fio2=h>h":265,"crp=vh":266,"of=n>h":267,"fio2=vh>n":268,"sr=l":269,"of=l>n":270,"temp=vl>h":271,"temp=h>l":272,"sr=h":273,"sr=vl":274,"sr=vh":275,"of=vh>n":276,"pct=n":277,"of=n>vh":278,"temp=h>vl":279,"temp=vh>vl":280,"temp=vh>l":281,"temp=l>vh":282,"temp=vl>vh":283,"fio2=vh>vh":284,"of=n>n":285,"fio2=vl":286,"plt=l>vl":287,"of=h>h":288,"of=h>l":289,"pct=h":290,"plt=vl>l":291,"of=l>l":292,"of=vh>vh":293,"of=l>vl":294,"of=vl>n":295,"pct=vh":296,"plt=l>n":297,"plt=n>l":298,"plt=vl>n":299,"of=vl>l":300,"of=l>h":301,"of=l>vh":302,"of=n>vl":303,"bd=l>n":304,"bun=l>n":305,"bun=n>h":306,"cr=n>h":307,"fio2=n>vl":308,"fio2=vh>vl":309,"lt=h>n":310,"lt=n>h":311,"bd=vl>l":312,"br=n>n":313,"bun=h>vh":314,"bun=n>l":315,"cr=h>vh":316,"cr=n>l":317,"cr=n>vl":318,"lt=l>n":319,"lt=n>l":320,"lt=n>vl":321,"lt=vh>h":322,"lt=vl>l":323,"of=h>vl":324,"of=vh>l":325,"of=vh>vl":326,"of=vl>h":327,"plt=h>n":328,"plt=l>l":329,"wbc=h>n":330,"wbc=h>vh":331,"wbc=l>vl":332,"wbc=n>h":333,"wbc=n>l":334,"wbc=vh>h":335,"wbc=vl>l":336,"wbc=vl>vh":337}