{ "architectures": [ "BertForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": null, "directionality": "bidi", "do_sample": false, "eos_token_ids": null, "finetuning_task": null, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 1024, "id2label": { "0": "B-CONT", "1": "B-CTRY", "10": "B-ST", "11": "B-OTHR", "12": "I-CONT", "13": "I-CTRY", "14": "I-STAT", "15": "I-CNTY", "16": "I-CITY", "17": "I-DIST", "18": "I-NBHD", "19": "I-ISL", "2": "B-STAT", "20": "I-NPOI", "21": "I-HPOI", "22": "I-ST", "23": "I-OTHR", "24": "L-CONT", "25": "L-CTRY", "26": "L-STAT", "27": "L-CNTY", "28": "L-CITY", "29": "L-DIST", "3": "B-CNTY", "30": "L-NBHD", "31": "L-ISL", "32": "L-NPOI", "33": "L-HPOI", "34": "L-ST", "35": "L-OTHR", "36": "U-CONT", "37": "U-CTRY", "38": "U-STAT", "39": "U-CNTY", "4": "B-CITY", "40": "U-CITY", "41": "U-DIST", "42": "U-NBHD", "43": "U-ISL", "44": "U-NPOI", "45": "U-HPOI", "46": "U-ST", "47": "U-OTHR", "48": "O", "5": "B-DIST", "6": "B-NBHD", "7": "B-ISL", "8": "B-NPOI", "9": "B-HPOI" }, "initializer_range": 0.02, "intermediate_size": 4096, "is_decoder": false, "label2id": { "B-CITY": 4, "B-CNTY": 3, "B-CONT": 0, "B-CTRY": 1, "B-DIST": 5, "B-HPOI": 9, "B-ISL": 7, "B-NBHD": 6, "B-NPOI": 8, "B-OTHR": 11, "B-ST": 10, "B-STAT": 2, "I-CITY": 16, "I-CNTY": 15, "I-CONT": 12, "I-CTRY": 13, "I-DIST": 17, "I-HPOI": 21, "I-ISL": 19, "I-NBHD": 18, "I-NPOI": 20, "I-OTHR": 23, "I-ST": 22, "I-STAT": 14, "L-CITY": 28, "L-CNTY": 27, "L-CONT": 24, "L-CTRY": 25, "L-DIST": 29, "L-HPOI": 33, "L-ISL": 31, "L-NBHD": 30, "L-NPOI": 32, "L-OTHR": 35, "L-ST": 34, "L-STAT": 26, "O": 48, "U-CITY": 40, "U-CNTY": 39, "U-CONT": 36, "U-CTRY": 37, "U-DIST": 41, "U-HPOI": 45, "U-ISL": 43, "U-NBHD": 42, "U-NPOI": 44, "U-OTHR": 47, "U-ST": 46, "U-STAT": 38 }, "layer_norm_eps": 1e-12, "length_penalty": 1.0, "max_length": 20, "max_position_embeddings": 512, "model_type": "bert", "num_attention_heads": 16, "num_beams": 1, "num_hidden_layers": 24, "num_labels": 49, "num_return_sequences": 1, "output_attentions": false, "output_hidden_states": false, "output_past": true, "pad_token_id": 0, "pooler_fc_size": 768, "pooler_num_attention_heads": 12, "pooler_num_fc_layers": 3, "pooler_size_per_head": 128, "pooler_type": "first_token_transform", "pruned_heads": {}, "repetition_penalty": 1.0, "temperature": 1.0, "top_k": 50, "top_p": 1.0, "torchscript": false, "type_vocab_size": 2, "use_bfloat16": false, "vocab_size": 28996 }