{ "_name_or_path": "AlekseyDorkin/xlm-roberta-en-ru-emoji", "architectures": [ "XLMRobertaForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 1024, "id2label": { "0": "\u2764", "1": "\ud83d\ude0d", "10": "\ud83d\udcf7", "11": "\ud83c\uddfa\ud83c\uddf8", "12": "\u2600", "13": "\ud83d\udc9c", "14": "\ud83d\ude09", "15": "\ud83d\udcaf", "16": "\ud83d\ude01", "17": "\ud83c\udf84", "18": "\ud83d\udcf8", "19": "\ud83d\ude1c", "2": "\ud83d\ude02", "3": "\ud83d\udc95", "4": "\ud83d\udd25", "5": "\ud83d\ude0a", "6": "\ud83d\ude0e", "7": "\u2728", "8": "\ud83d\udc99", "9": "\ud83d\ude18" }, "initializer_range": 0.02, "intermediate_size": 4096, "label2id": { "\u2600": "12", "\u2728": "7", "\u2764": "0", "\ud83c\uddfa\ud83c\uddf8": "11", "\ud83c\udf84": "17", "\ud83d\udc95": "3", "\ud83d\udc99": "8", "\ud83d\udc9c": "13", "\ud83d\udcaf": "15", "\ud83d\udcf7": "10", "\ud83d\udcf8": "18", "\ud83d\udd25": "4", "\ud83d\ude01": "16", "\ud83d\ude02": "2", "\ud83d\ude09": "14", "\ud83d\ude0a": "5", "\ud83d\ude0d": "1", "\ud83d\ude0e": "6", "\ud83d\ude18": "9", "\ud83d\ude1c": "19" }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "xlm-roberta", "num_attention_heads": 16, "num_hidden_layers": 24, "output_past": true, "pad_token_id": 1, "position_embedding_type": "absolute", "problem_type": "single_label_classification", "torch_dtype": "float32", "transformers_version": "4.12.3", "type_vocab_size": 1, "use_cache": true, "vocab_size": 35054 }