{ "_name_or_path": "FacebookAI/xlm-roberta-base", "architectures": [ "XLMRobertaForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "O", "1": "B_ORG", "2": "B_PER", "3": "B_LOC", "4": "B_MEA", "5": "I_DTM", "6": "I_ORG", "7": "E_ORG", "8": "I_PER", "9": "B_TTL", "10": "E_PER", "11": "B_DES", "12": "E_LOC", "13": "B_DTM", "14": "B_NUM", "15": "I_MEA", "16": "E_DTM", "17": "E_MEA", "18": "I_LOC", "19": "I_DES", "20": "E_DES", "21": "I_NUM", "22": "E_NUM", "23": "B_TRM", "24": "B_BRN", "25": "I_TRM", "26": "E_TRM", "27": "I_TTL", "28": "I_BRN", "29": "E_BRN", "30": "E_TTL", "31": "B_NAME" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "B_BRN": 24, "B_DES": 11, "B_DTM": 13, "B_LOC": 3, "B_MEA": 4, "B_NAME": 31, "B_NUM": 14, "B_ORG": 1, "B_PER": 2, "B_TRM": 23, "B_TTL": 9, "E_BRN": 29, "E_DES": 20, "E_DTM": 16, "E_LOC": 12, "E_MEA": 17, "E_NUM": 22, "E_ORG": 7, "E_PER": 10, "E_TRM": 26, "E_TTL": 30, "I_BRN": 28, "I_DES": 19, "I_DTM": 5, "I_LOC": 18, "I_MEA": 15, "I_NUM": 21, "I_ORG": 6, "I_PER": 8, "I_TRM": 25, "I_TTL": 27, "O": 0 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "xlm-roberta", "num_attention_heads": 12, "num_hidden_layers": 12, "output_past": true, "pad_token_id": 1, "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.41.2", "type_vocab_size": 1, "use_cache": true, "vocab_size": 250002 }