{ "_name_or_path": "StevenLimcorn/indonesian-roberta-base-bapos-tagger", "architectures": [ "RobertaForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "eos_token_id": 2, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "B-PR", "1": "B-CD", "2": "I-PR", "3": "B-SYM", "4": "B-JJ", "5": "B-DT", "6": "I-UH", "7": "I-NND", "8": "B-SC", "9": "I-WH", "10": "I-IN", "11": "I-NNP", "12": "I-VB", "13": "B-IN", "14": "B-NND", "15": "I-CD", "16": "I-JJ", "17": "I-X", "18": "B-OD", "19": "B-RP", "20": "B-RB", "21": "B-NNP", "22": "I-RB", "23": "I-Z", "24": "B-CC", "25": "B-NEG", "26": "B-VB", "27": "B-NN", "28": "B-MD", "29": "B-UH", "30": "I-NN", "31": "B-PRP", "32": "I-SC", "33": "B-Z", "34": "I-PRP", "35": "I-OD", "36": "I-SYM", "37": "B-WH", "38": "B-FW", "39": "I-CC", "40": "B-X" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "B-CC": 24, "B-CD": 1, "B-DT": 5, "B-FW": 38, "B-IN": 13, "B-JJ": 4, "B-MD": 28, "B-NEG": 25, "B-NN": 27, "B-NND": 14, "B-NNP": 21, "B-OD": 18, "B-PR": 0, "B-PRP": 31, "B-RB": 20, "B-RP": 19, "B-SC": 8, "B-SYM": 3, "B-UH": 29, "B-VB": 26, "B-WH": 37, "B-X": 40, "B-Z": 33, "I-CC": 39, "I-CD": 15, "I-IN": 10, "I-JJ": 16, "I-NN": 30, "I-NND": 7, "I-NNP": 11, "I-OD": 35, "I-PR": 2, "I-PRP": 34, "I-RB": 22, "I-SC": 32, "I-SYM": 36, "I-UH": 6, "I-VB": 12, "I-WH": 9, "I-X": 17, "I-Z": 23 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "roberta", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 1, "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.8.2", "type_vocab_size": 1, "use_cache": true, "vocab_size": 50265 }