Jessy3ric's picture
Upload 4 essential files for the model
6064997
{
"_name_or_path": "cmarkea/distilcamembert-base",
"architectures": [
"CamembertForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"eos_token_id": 2,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": ":face_with_tears_of_joy:",
"1": ":loudly_crying_face:",
"2": ":rolling_on_the_floor_laughing:",
"3": ":folded_hands:",
"4": ":grinning_face_with_sweat:",
"5": ":winking_face:",
"6": ":skull:",
"7": ":red_heart:",
"8": ":thinking_face:",
"9": ":raising_hands:",
"10": ":beaming_face_with_smiling_eyes:",
"11": ":smiling_face_with_tear:",
"12": ":smiling_face_with_heart-eyes:",
"13": ":smiling_face_with_hearts:",
"14": ":clown_face:",
"15": ":thumbs_up:",
"16": ":face_blowing_a_kiss:",
"17": ":eyes:",
"18": ":face_with_rolling_eyes:",
"19": ":smiling_face_with_smiling_eyes:",
"20": ":enraged_face:",
"21": ":cat_with_tears_of_joy:",
"22": ":fire:",
"23": ":pensive_face:",
"24": ":face_holding_back_tears:"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
":beaming_face_with_smiling_eyes:": 10,
":cat_with_tears_of_joy:": 21,
":clown_face:": 14,
":enraged_face:": 20,
":eyes:": 17,
":face_blowing_a_kiss:": 16,
":face_holding_back_tears:": 24,
":face_with_rolling_eyes:": 18,
":face_with_tears_of_joy:": 0,
":fire:": 22,
":folded_hands:": 3,
":grinning_face_with_sweat:": 4,
":loudly_crying_face:": 1,
":pensive_face:": 23,
":raising_hands:": 9,
":red_heart:": 7,
":rolling_on_the_floor_laughing:": 2,
":skull:": 6,
":smiling_face_with_heart-eyes:": 12,
":smiling_face_with_hearts:": 13,
":smiling_face_with_smiling_eyes:": 19,
":smiling_face_with_tear:": 11,
":thinking_face:": 8,
":thumbs_up:": 15,
":winking_face:": 5
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "camembert",
"num_attention_heads": 12,
"num_hidden_layers": 6,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"problem_type": "single_label_classification",
"torch_dtype": "float32",
"transformers_version": "4.25.1",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 32005
}