{ "_name_or_path": "dmis-lab/biobert-v1.1", "architectures": [ "BertForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "classifier_dropout": null, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "O", "1": "B-DATE", "2": "I-DATE", "3": "B-PATIENT", "4": "I-PATIENT", "5": "B-MEDICALRECORD", "6": "I-MEDICALRECORD", "7": "B-HOSPITAL", "8": "I-HOSPITAL", "9": "B-STREET", "10": "I-STREET", "11": "B-CITY", "12": "I-CITY", "13": "B-STATE", "14": "I-STATE", "15": "B-ZIP", "16": "I-ZIP", "17": "B-PHONE", "18": "I-PHONE", "19": "B-DOCTOR", "20": "I-DOCTOR", "21": "B-AGE", "22": "I-AGE", "23": "B-IDNUM", "24": "I-IDNUM", "25": "B-USERNAME", "26": "I-USERNAME", "27": "B-PROFESSION", "28": "I-PROFESSION", "29": "B-ORGANIZATION", "30": "I-ORGANIZATION", "31": "B-COUNTRY", "32": "I-COUNTRY", "33": "B-LOCATION-OTHER", "34": "I-LOCATION-OTHER", "35": "B-EMAIL", "36": "I-EMAIL", "37": "B-BIOID", "38": "I-BIOID", "39": "B-DEVICE", "40": "I-DEVICE", "41": "B-URL", "42": "I-URL", "43": "B-HEALTHPLAN", "44": "I-HEALTHPLAN" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "B-AGE": 21, "B-BIOID": 37, "B-CITY": 11, "B-COUNTRY": 31, "B-DATE": 1, "B-DEVICE": 39, "B-DOCTOR": 19, "B-EMAIL": 35, "B-HEALTHPLAN": 43, "B-HOSPITAL": 7, "B-IDNUM": 23, "B-LOCATION-OTHER": 33, "B-MEDICALRECORD": 5, "B-ORGANIZATION": 29, "B-PATIENT": 3, "B-PHONE": 17, "B-PROFESSION": 27, "B-STATE": 13, "B-STREET": 9, "B-URL": 41, "B-USERNAME": 25, "B-ZIP": 15, "I-AGE": 22, "I-BIOID": 38, "I-CITY": 12, "I-COUNTRY": 32, "I-DATE": 2, "I-DEVICE": 40, "I-DOCTOR": 20, "I-EMAIL": 36, "I-HEALTHPLAN": 44, "I-HOSPITAL": 8, "I-IDNUM": 24, "I-LOCATION-OTHER": 34, "I-MEDICALRECORD": 6, "I-ORGANIZATION": 30, "I-PATIENT": 4, "I-PHONE": 18, "I-PROFESSION": 28, "I-STATE": 14, "I-STREET": 10, "I-URL": 42, "I-USERNAME": 26, "I-ZIP": 16, "O": 0 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "bert", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 0, "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.26.0", "type_vocab_size": 2, "use_cache": true, "vocab_size": 28996 }