|
{ |
|
"_name_or_path": "FacebookAI/roberta-base", |
|
"architectures": [ |
|
"RobertaForTokenClassification" |
|
], |
|
"attention_probs_dropout_prob": 0.1, |
|
"bos_token_id": 0, |
|
"classifier_dropout": null, |
|
"eos_token_id": 2, |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.1, |
|
"hidden_size": 768, |
|
"id2label": { |
|
"0": "O", |
|
"1": "B-1", |
|
"2": "I-1", |
|
"3": "B-2", |
|
"4": "I-2", |
|
"5": "B-3", |
|
"6": "I-3", |
|
"7": "B-4", |
|
"8": "I-4", |
|
"9": "B-5", |
|
"10": "I-5", |
|
"11": "B-6", |
|
"12": "I-6", |
|
"13": "B-7", |
|
"14": "I-7", |
|
"15": "B-8", |
|
"16": "I-8", |
|
"17": "B-9", |
|
"18": "I-9", |
|
"19": "B-10", |
|
"20": "I-10", |
|
"21": "B-11", |
|
"22": "I-11", |
|
"23": "B-12", |
|
"24": "I-12", |
|
"25": "B-13", |
|
"26": "I-13", |
|
"27": "B-14", |
|
"28": "I-14", |
|
"29": "B-15", |
|
"30": "I-15", |
|
"31": "B-16", |
|
"32": "I-16", |
|
"33": "B-17", |
|
"34": "I-17", |
|
"35": "B-18", |
|
"36": "I-18", |
|
"37": "B-19", |
|
"38": "I-19", |
|
"39": "B-20", |
|
"40": "I-20", |
|
"41": "B-21", |
|
"42": "I-21", |
|
"43": "B-22", |
|
"44": "I-22", |
|
"45": "B-23", |
|
"46": "I-23", |
|
"47": "B-24", |
|
"48": "I-24", |
|
"49": "B-25", |
|
"50": "I-25", |
|
"51": "B-26", |
|
"52": "I-26", |
|
"53": "B-27", |
|
"54": "I-27", |
|
"55": "B-28", |
|
"56": "I-28", |
|
"57": "B-29", |
|
"58": "I-29" |
|
}, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 3072, |
|
"label2id": { |
|
"B-1": 1, |
|
"B-10": 19, |
|
"B-11": 21, |
|
"B-12": 23, |
|
"B-13": 25, |
|
"B-14": 27, |
|
"B-15": 29, |
|
"B-16": 31, |
|
"B-17": 33, |
|
"B-18": 35, |
|
"B-19": 37, |
|
"B-2": 3, |
|
"B-20": 39, |
|
"B-21": 41, |
|
"B-22": 43, |
|
"B-23": 45, |
|
"B-24": 47, |
|
"B-25": 49, |
|
"B-26": 51, |
|
"B-27": 53, |
|
"B-28": 55, |
|
"B-29": 57, |
|
"B-3": 5, |
|
"B-4": 7, |
|
"B-5": 9, |
|
"B-6": 11, |
|
"B-7": 13, |
|
"B-8": 15, |
|
"B-9": 17, |
|
"I-1": 2, |
|
"I-10": 20, |
|
"I-11": 22, |
|
"I-12": 24, |
|
"I-13": 26, |
|
"I-14": 28, |
|
"I-15": 30, |
|
"I-16": 32, |
|
"I-17": 34, |
|
"I-18": 36, |
|
"I-19": 38, |
|
"I-2": 4, |
|
"I-20": 40, |
|
"I-21": 42, |
|
"I-22": 44, |
|
"I-23": 46, |
|
"I-24": 48, |
|
"I-25": 50, |
|
"I-26": 52, |
|
"I-27": 54, |
|
"I-28": 56, |
|
"I-29": 58, |
|
"I-3": 6, |
|
"I-4": 8, |
|
"I-5": 10, |
|
"I-6": 12, |
|
"I-7": 14, |
|
"I-8": 16, |
|
"I-9": 18, |
|
"O": 0 |
|
}, |
|
"layer_norm_eps": 1e-05, |
|
"max_position_embeddings": 514, |
|
"model_type": "roberta", |
|
"num_attention_heads": 12, |
|
"num_hidden_layers": 12, |
|
"pad_token_id": 1, |
|
"position_embedding_type": "absolute", |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.39.3", |
|
"type_vocab_size": 1, |
|
"use_cache": true, |
|
"vocab_size": 50265 |
|
} |
|
|