{ "_name_or_path": "xlm-roberta-base", "architectures": [ "XLMRobertaForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "NN", "1": "JJ", "2": "PSP", "3": "PRR", "4": "PU", "5": "VBF", "6": "Q", "7": "AUXT", "8": "CD", "9": "CC", "10": "NNP", "11": "VBI", "12": "AUXM", "13": "VALA", "14": "PDM", "15": "AUXA", "16": "NEG", "17": "PRF", "18": "PRP", "19": "OD", "20": "SCK", "21": "PPR", "22": "AUXP", "23": "PRD", "24": "SCP", "25": "SC", "26": "RB", "27": "INJ", "28": "PRE", "29": "PRS", "30": "FR", "31": "SYM", "32": "QM", "33": "PRT", "34": "FF", "35": "AUXT ", "36": "PSP ", "37": "VBF ", "38": "", "39": "PU\u2019" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "": 38, "AUXA": 15, "AUXM": 12, "AUXP": 22, "AUXT": 7, "AUXT ": 35, "CC": 9, "CD": 8, "FF": 34, "FR": 30, "INJ": 27, "JJ": 1, "NEG": 16, "NN": 0, "NNP": 10, "OD": 19, "PDM": 14, "PPR": 21, "PRD": 23, "PRE": 28, "PRF": 17, "PRP": 18, "PRR": 3, "PRS": 29, "PRT": 33, "PSP": 2, "PSP ": 36, "PU": 4, "PU\u2019": 39, "Q": 6, "QM": 32, "RB": 26, "SC": 25, "SCK": 20, "SCP": 24, "SYM": 31, "VALA": 13, "VBF": 5, "VBF ": 37, "VBI": 11 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "xlm-roberta", "num_attention_heads": 12, "num_hidden_layers": 12, "output_past": true, "pad_token_id": 1, "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.28.0", "type_vocab_size": 1, "use_cache": true, "vocab_size": 250002 }