{ "_name_or_path": "xlm-roberta-base", "architectures": ["XLMRobertaForSequenceClassification"], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "ar", "1": "eu", "10": "nl", "11": "en", "12": "eo", "13": "et", "14": "fr", "15": "fy", "16": "ka", "17": "de", "18": "el", "19": "cn", "2": "br", "20": "id", "21": "ia", "22": "it", "23": "ja", "24": "kab", "25": "rw", "26": "ky", "27": "lv", "28": "mt", "29": "mn", "3": "ca", "30": "fa", "31": "pl", "32": "pt", "33": "ro", "34": "rm", "35": "ru", "36": "sah", "37": "sl", "38": "es", "39": "sv", "4": "zh", "40": "ta", "41": "tt", "42": "tr", "43": "uk", "44": "cy", "5": "zh", "6": "zh", "7": "cv", "8": "cs", "9": "dv" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "ar": "0", "eu": "1", "br": "2", "ca": "3", "zh-CN": "4", "zh-HK": "5", "zh-TW": "6", "cv": "7", "cs": "8", "dv": "9", "nl": "10", "en": "11", "eo": "12", "et": "13", "fr": "14", "fy": "15", "ka": "16", "de": "17", "el": "18", "cnh": "19", "id": "20", "ia": "21", "it": "22", "ja": "23", "kab": "24", "rw": "25", "ky": "26", "lv": "27", "mt": "28", "mn": "29", "fa": "30", "pl": "31", "pt": "32", "ro": "33", "rm": "34", "ru": "35", "sah": "36", "sl": "37", "es": "38", "sv": "39", "ta": "40", "tt": "41", "tr": "42", "uk": "43", "cy": "44" }, "layer_norm_eps": 1e-5, "max_position_embeddings": 514, "model_type": "xlm-roberta", "num_attention_heads": 12, "num_hidden_layers": 12, "output_past": true, "pad_token_id": 1, "position_embedding_type": "absolute", "problem_type": "single_label_classification", "torch_dtype": "float32", "transformers_version": "4.12.5", "type_vocab_size": 1, "use_cache": true, "vocab_size": 250002 }