VulBERTa-MLP-MVD / config.json
claudios's picture
Upload folder using huggingface_hub
161917d
{
"_name_or_path": "VulBERTa-MLP-MVD",
"architectures": [
"RobertaForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"eos_token_id": 2,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "non-vulnerable",
"1": "CWE-404",
"2": "CWE-476",
"3": "CWE-119",
"4": "CWE-706",
"5": "CWE-670",
"6": "CWE-673",
"7": "CWE-119, CWE-666, CWE-573",
"8": "CWE-573",
"9": "CWE-668",
"10": "CWE-400, CWE-665, CWE-020",
"11": "CWE-662",
"12": "CWE-400",
"13": "CWE-665",
"14": "CWE-020",
"15": "CWE-074",
"16": "CWE-362",
"17": "CWE-191",
"18": "CWE-190",
"19": "CWE-610",
"20": "CWE-704",
"21": "CWE-170",
"22": "CWE-676",
"23": "CWE-187",
"24": "CWE-138",
"25": "CWE-369",
"26": "CWE-662, CWE-573",
"27": "CWE-834",
"28": "CWE-400, CWE-665",
"29": "CWE-400, CWE-404",
"30": "CWE-221",
"31": "CWE-754",
"32": "CWE-311",
"33": "CWE-404, CWE-668",
"34": "CWE-506",
"35": "CWE-758",
"36": "CWE-666",
"37": "CWE-467",
"38": "CWE-327",
"39": "CWE-666, CWE-573",
"40": "CWE-469"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"CWE-020": 14,
"CWE-074": 15,
"CWE-119": 3,
"CWE-119, CWE-666, CWE-573": 7,
"CWE-138": 24,
"CWE-170": 21,
"CWE-187": 23,
"CWE-190": 18,
"CWE-191": 17,
"CWE-221": 30,
"CWE-311": 32,
"CWE-327": 38,
"CWE-362": 16,
"CWE-369": 25,
"CWE-400": 12,
"CWE-400, CWE-404": 29,
"CWE-400, CWE-665": 28,
"CWE-400, CWE-665, CWE-020": 10,
"CWE-404": 1,
"CWE-404, CWE-668": 33,
"CWE-467": 37,
"CWE-469": 40,
"CWE-476": 2,
"CWE-506": 34,
"CWE-573": 8,
"CWE-610": 19,
"CWE-662": 11,
"CWE-662, CWE-573": 26,
"CWE-665": 13,
"CWE-666": 36,
"CWE-666, CWE-573": 39,
"CWE-668": 9,
"CWE-670": 5,
"CWE-673": 6,
"CWE-676": 22,
"CWE-704": 20,
"CWE-706": 4,
"CWE-754": 31,
"CWE-758": 35,
"CWE-834": 27,
"non-vulnerable": 0
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 1026,
"model_type": "roberta",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.37.0.dev0",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 50000
}