{ "_name_or_path": "bergum/xtremedistil-l6-h384-go-emotion", "architectures": [ "BertForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "classifier_dropout": null, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 384, "id2label": { "0": "admiration \ud83d\udc4f", "1": "amusement \ud83d\ude02", "2": "anger \ud83d\ude21", "3": "annoyance \ud83d\ude12", "4": "approval \ud83d\udc4d", "5": "caring \ud83e\udd17", "6": "confusion \ud83d\ude15", "7": "curiosity \ud83e\udd14", "8": "desire \ud83d\ude0d", "9": "disappointment \ud83d\ude1e", "10": "disapproval \ud83d\udc4e", "11": "disgust \ud83e\udd2e", "12": "embarrassment \ud83d\ude33", "13": "excitement \ud83e\udd29", "14": "fear \ud83d\ude28", "15": "gratitude \ud83d\ude4f", "16": "grief \ud83d\ude22", "17": "joy \ud83d\ude03", "18": "love \u2764\ufe0f", "19": "nervousness \ud83d\ude2c", "20": "optimism \ud83e\udd1e", "21": "pride \ud83d\ude0c", "22": "realization \ud83d\udca1", "23": "relief \ud83d\ude05", "24": "remorse \ud83d\ude1e", "25": "sadness \ud83d\ude1e", "26": "surprise \ud83d\ude32", "27": "neutral \ud83d\ude10" }, "initializer_range": 0.02, "intermediate_size": 1536, "label2id": { "admiration \ud83d\udc4f": 0, "amusement \ud83d\ude02": 1, "anger \ud83d\ude21": 2, "annoyance \ud83d\ude12": 3, "approval \ud83d\udc4d": 4, "caring \ud83e\udd17": 5, "confusion \ud83d\ude15": 6, "curiosity \ud83e\udd14": 7, "desire \ud83d\ude0d": 8, "disappointment \ud83d\ude1e": 9, "disapproval \ud83d\udc4e": 10, "disgust \ud83e\udd2e": 11, "embarrassment \ud83d\ude33": 12, "excitement \ud83e\udd29": 13, "fear \ud83d\ude28": 14, "gratitude \ud83d\ude4f": 15, "grief \ud83d\ude22": 16, "joy \ud83d\ude03": 17, "love \u2764\ufe0f": 18, "nervousness \ud83d\ude2c": 19, "neutral \ud83d\ude10": 27, "optimism \ud83e\udd1e": 20, "pride \ud83d\ude0c": 21, "realization \ud83d\udca1": 22, "relief \ud83d\ude05": 23, "remorse \ud83d\ude1e": 24, "sadness \ud83d\ude1e": 25, "surprise \ud83d\ude32": 26 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "bert", "num_attention_heads": 12, "num_hidden_layers": 6, "pad_token_id": 0, "position_embedding_type": "absolute", "problem_type": "multi_label_classification", "torch_dtype": "float32", "transformers_version": "4.44.2", "type_vocab_size": 2, "use_cache": true, "vocab_size": 30522 }