{ "_name_or_path": "AutoTrain", "_num_labels": 50, "architectures": [ "XLMRobertaForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 1024, "id2label": { "0": "Question1", "1": "Question10", "2": "Question11", "3": "Question12", "4": "Question13", "5": "Question14", "6": "Question15", "7": "Question16", "8": "Question17", "9": "Question18", "10": "Question19", "11": "Question2", "12": "Question20", "13": "Question21", "14": "Question22", "15": "Question23", "16": "Question24", "17": "Question25", "18": "Question26", "19": "Question27", "20": "Question28", "21": "Question29", "22": "Question3", "23": "Question30", "24": "Question31", "25": "Question32", "26": "Question33", "27": "Question34", "28": "Question35", "29": "Question36", "30": "Question37", "31": "Question38", "32": "Question39", "33": "Question4", "34": "Question40", "35": "Question41", "36": "Question42", "37": "Question43", "38": "Question44", "39": "Question45", "40": "Question46", "41": "Question47", "42": "Question49", "43": "Question5", "44": "Question50", "45": "Question6", "46": "Question7", "47": "Question8", "48": "Question9", "49": "question48" }, "initializer_range": 0.02, "intermediate_size": 4096, "label2id": { "Question1": 0, "Question10": 1, "Question11": 2, "Question12": 3, "Question13": 4, "Question14": 5, "Question15": 6, "Question16": 7, "Question17": 8, "Question18": 9, "Question19": 10, "Question2": 11, "Question20": 12, "Question21": 13, "Question22": 14, "Question23": 15, "Question24": 16, "Question25": 17, "Question26": 18, "Question27": 19, "Question28": 20, "Question29": 21, "Question3": 22, "Question30": 23, "Question31": 24, "Question32": 25, "Question33": 26, "Question34": 27, "Question35": 28, "Question36": 29, "Question37": 30, "Question38": 31, "Question39": 32, "Question4": 33, "Question40": 34, "Question41": 35, "Question42": 36, "Question43": 37, "Question44": 38, "Question45": 39, "Question46": 40, "Question47": 41, "Question49": 42, "Question5": 43, "Question50": 44, "Question6": 45, "Question7": 46, "Question8": 47, "Question9": 48, "question48": 49 }, "layer_norm_eps": 1e-05, "max_length": 64, "max_position_embeddings": 514, "model_type": "xlm-roberta", "num_attention_heads": 16, "num_hidden_layers": 24, "output_past": true, "pad_token_id": 1, "padding": "max_length", "position_embedding_type": "absolute", "problem_type": "single_label_classification", "torch_dtype": "float32", "transformers_version": "4.25.1", "type_vocab_size": 1, "use_cache": true, "vocab_size": 250002 }