{ | |
"_name_or_path": "w11wo/indonesian-roberta-base-posp-tagger", | |
"architectures": [ | |
"RobertaForMaskedLM" | |
], | |
"attention_probs_dropout_prob": 0.1, | |
"bos_token_id": 0, | |
"classifier_dropout": null, | |
"eos_token_id": 2, | |
"gradient_checkpointing": false, | |
"hidden_act": "gelu", | |
"hidden_dropout_prob": 0.1, | |
"hidden_size": 768, | |
"id2label": { | |
"0": "B-PPO", | |
"1": "B-KUA", | |
"2": "B-ADV", | |
"3": "B-PRN", | |
"4": "B-VBI", | |
"5": "B-PAR", | |
"6": "B-VBP", | |
"7": "B-NNP", | |
"8": "B-UNS", | |
"9": "B-VBT", | |
"10": "B-VBL", | |
"11": "B-NNO", | |
"12": "B-ADJ", | |
"13": "B-PRR", | |
"14": "B-PRK", | |
"15": "B-CCN", | |
"16": "B-$$$", | |
"17": "B-ADK", | |
"18": "B-ART", | |
"19": "B-CSN", | |
"20": "B-NUM", | |
"21": "B-SYM", | |
"22": "B-INT", | |
"23": "B-NEG", | |
"24": "B-PRI", | |
"25": "B-VBE" | |
}, | |
"initializer_range": 0.02, | |
"intermediate_size": 3072, | |
"label2id": { | |
"B-$$$": 16, | |
"B-ADJ": 12, | |
"B-ADK": 17, | |
"B-ADV": 2, | |
"B-ART": 18, | |
"B-CCN": 15, | |
"B-CSN": 19, | |
"B-INT": 22, | |
"B-KUA": 1, | |
"B-NEG": 23, | |
"B-NNO": 11, | |
"B-NNP": 7, | |
"B-NUM": 20, | |
"B-PAR": 5, | |
"B-PPO": 0, | |
"B-PRI": 24, | |
"B-PRK": 14, | |
"B-PRN": 3, | |
"B-PRR": 13, | |
"B-SYM": 21, | |
"B-UNS": 8, | |
"B-VBE": 25, | |
"B-VBI": 4, | |
"B-VBL": 10, | |
"B-VBP": 6, | |
"B-VBT": 9 | |
}, | |
"layer_norm_eps": 1e-05, | |
"max_position_embeddings": 514, | |
"model_type": "roberta", | |
"num_attention_heads": 12, | |
"num_hidden_layers": 12, | |
"pad_token_id": 1, | |
"position_embedding_type": "absolute", | |
"torch_dtype": "float32", | |
"transformers_version": "4.35.2", | |
"type_vocab_size": 1, | |
"use_cache": true, | |
"vocab_size": 50265 | |
} | |