versae's picture
Add tags
02fdc5b
{
"_name_or_path": "bertin-project/bertin-base-gaussian-exp-512seqlen",
"architectures": [
"RobertaForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"eos_token_id": 2,
"finetuning_task": "pos",
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "AO",
"1": "AQ",
"2": "CC",
"3": "CS",
"4": "DA",
"5": "DE",
"6": "DD",
"7": "DI",
"8": "DN",
"9": "DP",
"10": "DT",
"11": "Faa",
"12": "Fat",
"13": "Fc",
"14": "Fd",
"15": "Fe",
"16": "Fg",
"17": "Fh",
"18": "Fia",
"19": "Fit",
"20": "Fp",
"21": "Fpa",
"22": "Fpt",
"23": "Fs",
"24": "Ft",
"25": "Fx",
"26": "Fz",
"27": "I",
"28": "NC",
"29": "NP",
"30": "P0",
"31": "PD",
"32": "PI",
"33": "PN",
"34": "PP",
"35": "PR",
"36": "PT",
"37": "PX",
"38": "RG",
"39": "RN",
"40": "SP",
"41": "VAI",
"42": "VAM",
"43": "VAN",
"44": "VAP",
"45": "VAS",
"46": "VMG",
"47": "VMI",
"48": "VMM",
"49": "VMN",
"50": "VMP",
"51": "VMS",
"52": "VSG",
"53": "VSI",
"54": "VSM",
"55": "VSN",
"56": "VSP",
"57": "VSS",
"58": "Y",
"59": "Z"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"AO": 0,
"AQ": 1,
"CC": 2,
"CS": 3,
"DA": 4,
"DE": 5,
"DD": 6,
"DI": 7,
"DN": 8,
"DP": 9,
"DT": 10,
"Faa": 11,
"Fat": 12,
"Fc": 13,
"Fd": 14,
"Fe": 15,
"Fg": 16,
"Fh": 17,
"Fia": 18,
"Fit": 19,
"Fp": 20,
"Fpa": 21,
"Fpt": 22,
"Fs": 23,
"Ft": 24,
"Fx": 25,
"Fz": 26,
"I": 27,
"NC": 28,
"NP": 29,
"P0": 30,
"PD": 31,
"PI": 32,
"PN": 33,
"PP": 34,
"PR": 35,
"PT": 36,
"PX": 37,
"RG": 38,
"RN": 39,
"SP": 40,
"VAI": 41,
"VAM": 42,
"VAN": 43,
"VAP": 44,
"VAS": 45,
"VMG": 46,
"VMI": 47,
"VMM": 48,
"VMN": 49,
"VMP": 50,
"VMS": 51,
"VSG": 52,
"VSI": 53,
"VSM": 54,
"VSN": 55,
"VSP": 56,
"VSS": 57,
"Y": 58,
"Z": 59
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "roberta",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.9.0.dev0",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 50265
}