{ "architectures": [ "RobertaForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 2, "classifier_dropout": null, "eos_token_id": 3, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 256, "id2label": { "0": "SCONJ", "1": "B-VERB", "2": "I-SCONJ", "3": "VERB", "4": "B-AUX", "5": "PART", "6": "I-ADJ", "7": "I-ADP", "8": "B-NOUN", "9": "PRON", "10": "B-ADP", "11": "NOUN", "12": "NUM", "13": "PUNCT", "14": "B-X", "15": "CCONJ", "16": "ADV", "17": "B-PRON", "18": "I-PRON", "19": "I-NOUN", "20": "B-ADV", "21": "AUX", "22": "ADP", "23": "I-NUM", "24": "B-DET", "25": "I-X", "26": "B-PART", "27": "I-DET", "28": "B-INTJ", "29": "INTJ", "30": "B-NUM", "31": "I-VERB", "32": "I-ADV", "33": "I-AUX", "34": "B-PROPN", "35": "B-SYM", "36": "I-PART", "37": "I-INTJ", "38": "X", "39": "I-PROPN", "40": "B-ADJ", "41": "DET", "42": "SYM", "43": "ADJ", "44": "B-SCONJ", "45": "B-CCONJ", "46": "PROPN", "47": "I-CCONJ", "48": "I-SYM" }, "initializer_range": 0.02, "intermediate_size": 768, "label2id": { "ADJ": 43, "ADP": 22, "ADV": 16, "AUX": 21, "B-ADJ": 40, "B-ADP": 10, "B-ADV": 20, "B-AUX": 4, "B-CCONJ": 45, "B-DET": 24, "B-INTJ": 28, "B-NOUN": 8, "B-NUM": 30, "B-PART": 26, "B-PRON": 17, "B-PROPN": 34, "B-SCONJ": 44, "B-SYM": 35, "B-VERB": 1, "B-X": 14, "CCONJ": 15, "DET": 41, "I-ADJ": 6, "I-ADP": 7, "I-ADV": 32, "I-AUX": 33, "I-CCONJ": 47, "I-DET": 27, "I-INTJ": 37, "I-NOUN": 19, "I-NUM": 23, "I-PART": 36, "I-PRON": 18, "I-PROPN": 39, "I-SCONJ": 2, "I-SYM": 48, "I-VERB": 31, "I-X": 25, "INTJ": 29, "NOUN": 11, "NUM": 12, "PART": 5, "PRON": 9, "PROPN": 46, "PUNCT": 13, "SCONJ": 0, "SYM": 42, "VERB": 3, "X": 38 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 128, "model_type": "roberta", "num_attention_heads": 4, "num_hidden_layers": 12, "pad_token_id": 0, "position_embedding_type": "absolute", "tokenizer_class": "RemBertTokenizerFast", "torch_dtype": "float32", "transformers_version": "4.11.3", "type_vocab_size": 2, "use_cache": true, "vocab_size": 250315 }