|
{ |
|
"_name_or_path": "mbruton/spa_enpt_XLM-R", |
|
"architectures": [ |
|
"XLMRobertaForTokenClassification" |
|
], |
|
"attention_probs_dropout_prob": 0.1, |
|
"bos_token_id": 0, |
|
"classifier_dropout": null, |
|
"eos_token_id": 2, |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.1, |
|
"hidden_size": 768, |
|
"id2label": { |
|
"0": "O", |
|
"1": "r0:arg0", |
|
"2": "r0:arg1", |
|
"3": "r0:arg2", |
|
"4": "r0:root", |
|
"5": "r10:arg0", |
|
"6": "r10:arg1", |
|
"7": "r10:root", |
|
"8": "r11:arg0", |
|
"9": "r11:root", |
|
"10": "r12:arg1", |
|
"11": "r12:root", |
|
"12": "r13:arg1", |
|
"13": "r13:root", |
|
"14": "r1:arg0", |
|
"15": "r1:arg1", |
|
"16": "r1:arg2", |
|
"17": "r1:root", |
|
"18": "r2:arg0", |
|
"19": "r2:arg1", |
|
"20": "r2:arg2", |
|
"21": "r2:root", |
|
"22": "r3:arg0", |
|
"23": "r3:arg1", |
|
"24": "r3:arg2", |
|
"25": "r3:root", |
|
"26": "r4:arg0", |
|
"27": "r4:arg1", |
|
"28": "r4:arg2", |
|
"29": "r4:root", |
|
"30": "r5:arg0", |
|
"31": "r5:arg1", |
|
"32": "r5:arg2", |
|
"33": "r5:root", |
|
"34": "r6:arg0", |
|
"35": "r6:arg1", |
|
"36": "r6:arg2", |
|
"37": "r6:root", |
|
"38": "r7:arg0", |
|
"39": "r7:arg1", |
|
"40": "r7:arg2", |
|
"41": "r7:root", |
|
"42": "r8:arg0", |
|
"43": "r8:arg1", |
|
"44": "r8:arg2", |
|
"45": "r8:root", |
|
"46": "r9:arg0", |
|
"47": "r9:arg1", |
|
"48": "r9:arg2", |
|
"49": "r9:root" |
|
}, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 3072, |
|
"label2id": { |
|
"O": 0, |
|
"r0:arg0": 1, |
|
"r0:arg1": 2, |
|
"r0:arg2": 3, |
|
"r0:root": 4, |
|
"r10:arg0": 5, |
|
"r10:arg1": 6, |
|
"r10:root": 7, |
|
"r11:arg0": 8, |
|
"r11:root": 9, |
|
"r12:arg1": 10, |
|
"r12:root": 11, |
|
"r13:arg1": 12, |
|
"r13:root": 13, |
|
"r1:arg0": 14, |
|
"r1:arg1": 15, |
|
"r1:arg2": 16, |
|
"r1:root": 17, |
|
"r2:arg0": 18, |
|
"r2:arg1": 19, |
|
"r2:arg2": 20, |
|
"r2:root": 21, |
|
"r3:arg0": 22, |
|
"r3:arg1": 23, |
|
"r3:arg2": 24, |
|
"r3:root": 25, |
|
"r4:arg0": 26, |
|
"r4:arg1": 27, |
|
"r4:arg2": 28, |
|
"r4:root": 29, |
|
"r5:arg0": 30, |
|
"r5:arg1": 31, |
|
"r5:arg2": 32, |
|
"r5:root": 33, |
|
"r6:arg0": 34, |
|
"r6:arg1": 35, |
|
"r6:arg2": 36, |
|
"r6:root": 37, |
|
"r7:arg0": 38, |
|
"r7:arg1": 39, |
|
"r7:arg2": 40, |
|
"r7:root": 41, |
|
"r8:arg0": 42, |
|
"r8:arg1": 43, |
|
"r8:arg2": 44, |
|
"r8:root": 45, |
|
"r9:arg0": 46, |
|
"r9:arg1": 47, |
|
"r9:arg2": 48, |
|
"r9:root": 49 |
|
}, |
|
"layer_norm_eps": 1e-05, |
|
"max_position_embeddings": 514, |
|
"model_type": "xlm-roberta", |
|
"num_attention_heads": 12, |
|
"num_hidden_layers": 12, |
|
"output_past": true, |
|
"pad_token_id": 1, |
|
"position_embedding_type": "absolute", |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.27.4", |
|
"type_vocab_size": 2, |
|
"use_cache": true, |
|
"vocab_size": 250002 |
|
} |
|
|