{ "architectures": [ "CustomBertNER" ], "attention_probs_dropout_prob": 0.1, "directionality": "bidi", "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "O", "1": "B-ADD", "2": "I-ADD", "3": "B-DCD", "4": "I-DCD", "5": "B-SGN", "6": "I-SGN", "7": "B-DTN", "8": "I-DTN", "9": "B-ICD", "10": "I-ICD", "11": "B-OCD", "12": "I-OCD", "13": "B-IBD", "14": "I-IBD", "15": "B-OBD", "16": "I-OBD", "17": "B-IND", "18": "I-IND", "19": "B-OND", "20": "I-OND", "21": "B-OPC", "22": "I-OPC", "23": "B-EMC", "24": "I-EMC", "25": "B-EMDE", "26": "I-EMDE", "27": "B-EMDS", "28": "I-EMDS", "29": "B-OPDE", "30": "I-OPDE", "31": "B-OPDS", "32": "I-OPDS", "33": "B-RTDE", "34": "I-RTDE", "35": "B-RTDS", "36": "I-RTDS", "37": "B-SGDE", "38": "I-SGDE", "39": "B-SGDS", "40": "I-SGDS", "41": "B-CTC", "42": "I-CTC", "43": "B-CTDE", "44": "I-CTDE", "45": "B-CTDS", "46": "I-CTDS", "47": "B-RTC", "48": "I-RTC", "49": "B-DIN", "50": "I-DIN", "51": "B-CTD", "52": "I-CTD", "53": "B-DPN", "54": "I-DPN", "55": "B-EMD", "56": "I-EMD", "57": "B-OPD", "58": "I-OPD", "59": "B-RTD", "60": "I-RTD", "61": "B-SGC", "62": "I-SGC", "63": "B-SGD", "64": "I-SGD" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "B-ADD": 1, "B-CTC": 41, "B-CTD": 51, "B-CTDE": 43, "B-CTDS": 45, "B-DCD": 3, "B-DIN": 49, "B-DPN": 53, "B-DTN": 7, "B-EMC": 23, "B-EMD": 55, "B-EMDE": 25, "B-EMDS": 27, "B-IBD": 13, "B-ICD": 9, "B-IND": 17, "B-OBD": 15, "B-OCD": 11, "B-OND": 19, "B-OPC": 21, "B-OPD": 57, "B-OPDE": 29, "B-OPDS": 31, "B-RTC": 47, "B-RTD": 59, "B-RTDE": 33, "B-RTDS": 35, "B-SGC": 61, "B-SGD": 63, "B-SGDE": 37, "B-SGDS": 39, "B-SGN": 5, "I-ADD": 2, "I-CTC": 42, "I-CTD": 52, "I-CTDE": 44, "I-CTDS": 46, "I-DCD": 4, "I-DIN": 50, "I-DPN": 54, "I-DTN": 8, "I-EMC": 24, "I-EMD": 56, "I-EMDE": 26, "I-EMDS": 28, "I-IBD": 14, "I-ICD": 10, "I-IND": 18, "I-OBD": 16, "I-OCD": 12, "I-OND": 20, "I-OPC": 22, "I-OPD": 58, "I-OPDE": 30, "I-OPDS": 32, "I-RTC": 48, "I-RTD": 60, "I-RTDE": 34, "I-RTDS": 36, "I-SGC": 62, "I-SGD": 64, "I-SGDE": 38, "I-SGDS": 40, "I-SGN": 6, "O": 0 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "bert", "num_attention_heads": 12, "num_hidden_layers": 12, "output_past": true, "pad_token_id": 0, "pooler_fc_size": 768, "pooler_num_attention_heads": 12, "pooler_num_fc_layers": 3, "pooler_size_per_head": 128, "pooler_type": "first_token_transform", "type_vocab_size": 2, "vocab_size": 21136 }