{ "_name_or_path": "waboucay/french-camembert-postag-model-finetuned-perceo", "architectures": [ "CamembertForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 5, "classifier_dropout": null, "eos_token_id": 6, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "ABR", "1": "ADJ", "2": "ADV", "3": "DET:art", "4": "DET:def", "5": "DET:dem", "6": "DET:ind", "7": "DET:int", "8": "DET:par", "9": "DET:pos", "10": "DET:pre", "11": "EPE", "12": "ETR", "13": "FNO", "14": "INT", "15": "KON", "16": "MLT", "17": "NAM", "18": "NAM:sig", "19": "NOM", "20": "NOM:sig", "21": "NUM", "22": "PRO", "23": "PRO:dem", "24": "PRO:ind", "25": "PRO:per", "26": "PRO:pos", "27": "PRO:rel", "28": "PRP", "29": "PRP:det", "30": "PRT:int", "31": "PUN", "32": "PUN:cit", "33": "SENT", "34": "SYM", "35": "TRC", "36": "VER", "37": "VER:cond", "38": "VER:futu", "39": "VER:impe", "40": "VER:impf", "41": "VER:infi", "42": "VER:pper", "43": "VER:ppre", "44": "VER:pres", "45": "VER:simp", "46": "VER:subi", "47": "VER:subp", "48": "VER:trc" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "ABR": 0, "ADJ": 1, "ADV": 2, "DET:art": 3, "DET:def": 4, "DET:dem": 5, "DET:ind": 6, "DET:int": 7, "DET:par": 8, "DET:pos": 9, "DET:pre": 10, "EPE": 11, "ETR": 12, "FNO": 13, "INT": 14, "KON": 15, "MLT": 16, "NAM": 17, "NAM:sig": 18, "NOM": 19, "NOM:sig": 20, "NUM": 21, "PRO": 22, "PRO:dem": 23, "PRO:ind": 24, "PRO:per": 25, "PRO:pos": 26, "PRO:rel": 27, "PRP": 28, "PRP:det": 29, "PRT:int": 30, "PUN": 31, "PUN:cit": 32, "SENT": 33, "SYM": 34, "TRC": 35, "VER": 36, "VER:cond": 37, "VER:futu": 38, "VER:impe": 39, "VER:impf": 40, "VER:infi": 41, "VER:pper": 42, "VER:ppre": 43, "VER:pres": 44, "VER:simp": 45, "VER:subi": 46, "VER:subp": 47, "VER:trc": 48 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "camembert", "num_attention_heads": 12, "num_hidden_layers": 12, "output_past": true, "pad_token_id": 1, "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.12.5", "type_vocab_size": 1, "use_cache": true, "vocab_size": 32005 }