{ "architectures": [ "BertForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "finetuning_task": "ner", "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "B-AGE", "1": "B-DATE", "2": "B-EMAIL", "3": "B-HOSP", "4": "B-ID", "5": "B-LOC", "6": "B-OTHERPHI", "7": "B-PATIENT", "8": "B-PATORG", "9": "B-PHONE", "10": "B-STAFF", "11": "I-AGE", "12": "I-DATE", "13": "I-EMAIL", "14": "I-HOSP", "15": "I-ID", "16": "I-LOC", "17": "I-OTHERPHI", "18": "I-PATIENT", "19": "I-PATORG", "20": "I-PHONE", "21": "I-STAFF", "22": "L-AGE", "23": "L-DATE", "24": "L-EMAIL", "25": "L-HOSP", "26": "L-ID", "27": "L-LOC", "28": "L-OTHERPHI", "29": "L-PATIENT", "30": "L-PATORG", "31": "L-PHONE", "32": "L-STAFF", "33": "O", "34": "U-AGE", "35": "U-DATE", "36": "U-EMAIL", "37": "U-HOSP", "38": "U-ID", "39": "U-LOC", "40": "U-OTHERPHI", "41": "U-PATIENT", "42": "U-PATORG", "43": "U-PHONE", "44": "U-STAFF" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "B-AGE": 0, "B-DATE": 1, "B-EMAIL": 2, "B-HOSP": 3, "B-ID": 4, "B-LOC": 5, "B-OTHERPHI": 6, "B-PATIENT": 7, "B-PATORG": 8, "B-PHONE": 9, "B-STAFF": 10, "I-AGE": 11, "I-DATE": 12, "I-EMAIL": 13, "I-HOSP": 14, "I-ID": 15, "I-LOC": 16, "I-OTHERPHI": 17, "I-PATIENT": 18, "I-PATORG": 19, "I-PHONE": 20, "I-STAFF": 21, "L-AGE": 22, "L-DATE": 23, "L-EMAIL": 24, "L-HOSP": 25, "L-ID": 26, "L-LOC": 27, "L-OTHERPHI": 28, "L-PATIENT": 29, "L-PATORG": 30, "L-PHONE": 31, "L-STAFF": 32, "O": 33, "U-AGE": 34, "U-DATE": 35, "U-EMAIL": 36, "U-HOSP": 37, "U-ID": 38, "U-LOC": 39, "U-OTHERPHI": 40, "U-PATIENT": 41, "U-PATORG": 42, "U-PHONE": 43, "U-STAFF": 44 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "bert", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 0, "position_embedding_type": "absolute", "transformers_version": "4.6.1", "type_vocab_size": 2, "use_cache": true, "vocab_size": 28996 }