|
{ |
|
"_name_or_path": "xlm-roberta-base", |
|
"architectures": [ |
|
"XLMRobertaForTokenClassification" |
|
], |
|
"attention_probs_dropout_prob": 0.1, |
|
"bos_token_id": 0, |
|
"classifier_dropout": null, |
|
"eos_token_id": 2, |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.1, |
|
"hidden_size": 768, |
|
"id2label": { |
|
"0": "AB", |
|
"1": "AUX", |
|
"2": "CC", |
|
"3": "CD", |
|
"4": "DBL", |
|
"5": "DT", |
|
"6": "ETC", |
|
"7": "IN", |
|
"8": "JJ", |
|
"9": "KAN", |
|
"10": "M", |
|
"11": "NN", |
|
"12": "PA", |
|
"13": "PN", |
|
"14": "PRO", |
|
"15": "QT", |
|
"16": "RB", |
|
"17": "RPN", |
|
"18": "SYM", |
|
"19": "UH", |
|
"20": "VB", |
|
"21": "VB_JJ", |
|
"22": "VCOM" |
|
}, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 3072, |
|
"label2id": { |
|
"AB": 0, |
|
"AUX": 1, |
|
"CC": 2, |
|
"CD": 3, |
|
"DBL": 4, |
|
"DT": 5, |
|
"ETC": 6, |
|
"IN": 7, |
|
"JJ": 8, |
|
"KAN": 9, |
|
"M": 10, |
|
"NN": 11, |
|
"PA": 12, |
|
"PN": 13, |
|
"PRO": 14, |
|
"QT": 15, |
|
"RB": 16, |
|
"RPN": 17, |
|
"SYM": 18, |
|
"UH": 19, |
|
"VB": 20, |
|
"VB_JJ": 21, |
|
"VCOM": 22 |
|
}, |
|
"layer_norm_eps": 1e-05, |
|
"max_position_embeddings": 514, |
|
"model_type": "xlm-roberta", |
|
"num_attention_heads": 12, |
|
"num_hidden_layers": 12, |
|
"output_past": true, |
|
"pad_token_id": 1, |
|
"position_embedding_type": "absolute", |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.30.2", |
|
"type_vocab_size": 1, |
|
"use_cache": true, |
|
"vocab_size": 250002 |
|
} |