{ "_name_or_path": "cardiffnlp/twitter-xlm-roberta-base", "architectures": [ "XLMRobertaForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "❀", "1": "😍", "2": "πŸ˜‚", "3": "πŸ’•", "4": "πŸ”₯", "5": "😊", "6": "😎", "7": "✨", "8": "πŸ’™", "9": "😘", "10": "πŸ“·", "11": "πŸ‡ΊπŸ‡Έ", "12": "β˜€", "13": "πŸ’œ", "14": "πŸ˜‰", "15": "πŸ’―", "16": "😁", "17": "πŸŽ„", "18": "πŸ“Έ", "19": "😜" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "❀": 0, "😍": 1, "πŸ˜‚": 2, "πŸ’•": 3, "πŸ”₯": 4, "😊": 5, "😎": 6, "✨": 7, "πŸ’™": 8, "😘": 9, "πŸ“·": 10, "πŸ‡ΊπŸ‡Έ": 11, "β˜€": 12, "πŸ’œ": 13, "πŸ˜‰": 14, "πŸ’―": 15, "😁": 16, "πŸŽ„": 17, "πŸ“Έ": 18, "😜": 19 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "xlm-roberta", "num_attention_heads": 12, "num_hidden_layers": 12, "output_past": true, "pad_token_id": 1, "position_embedding_type": "absolute", "problem_type": "single_label_classification", "torch_dtype": "float32", "transformers_version": "4.25.1", "type_vocab_size": 1, "use_cache": true, "vocab_size": 250002 }