hugo-albert's picture
Training in progress epoch 0
48a1bdf
{
"_name_or_path": "PlanTL-GOB-ES/roberta-large-bne",
"architectures": [
"RobertaForTokenClassification"
],
"attention_probs_dropout_prob": 0.0,
"bos_token_id": 0,
"classifier_dropout": null,
"eos_token_id": 2,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 1024,
"id2label": {
"0": "i",
"1": "Zp",
"2": "rg",
"3": "vmm",
"4": "Fs",
"5": "vsi",
"6": "dd",
"7": "vms",
"8": "Fh",
"9": "Fe",
"10": "rn",
"11": "aq",
"12": "vag",
"13": "Z",
"14": "W",
"15": "p0",
"16": "cc",
"17": "da",
"18": "di",
"19": "X",
"20": "vsn",
"21": "np",
"22": "dn",
"23": "Fg",
"24": "Fat",
"25": "Fx",
"26": "I",
"27": "vap",
"28": "Fd",
"29": "van",
"30": "vmg",
"31": "vsm",
"32": "Fc",
"33": "sp",
"34": "Fit",
"35": "pd",
"36": "px",
"37": "nc",
"38": "vai",
"39": "de",
"40": "Fia",
"41": "pe",
"42": "dp",
"43": "vsp",
"44": "pp",
"45": "dt",
"46": "ao",
"47": "Fp",
"48": "Fpt",
"49": "Faa",
"50": "pn",
"51": "pr",
"52": "vss",
"53": "vam",
"54": "vmn",
"55": "Zm",
"56": "vmp",
"57": "Y",
"58": "vsg",
"59": "vas",
"60": "pi",
"61": "vmi",
"62": "pt",
"63": "Fz",
"64": "Fpa",
"65": "cs"
},
"initializer_range": 0.02,
"intermediate_size": 4096,
"label2id": {
"Faa": 49,
"Fat": 24,
"Fc": 32,
"Fd": 28,
"Fe": 9,
"Fg": 23,
"Fh": 8,
"Fia": 40,
"Fit": 34,
"Fp": 47,
"Fpa": 64,
"Fpt": 48,
"Fs": 4,
"Fx": 25,
"Fz": 63,
"I": 26,
"W": 14,
"X": 19,
"Y": 57,
"Z": 13,
"Zm": 55,
"Zp": 1,
"ao": 46,
"aq": 11,
"cc": 16,
"cs": 65,
"da": 17,
"dd": 6,
"de": 39,
"di": 18,
"dn": 22,
"dp": 42,
"dt": 45,
"i": 0,
"nc": 37,
"np": 21,
"p0": 15,
"pd": 35,
"pe": 41,
"pi": 60,
"pn": 50,
"pp": 44,
"pr": 51,
"pt": 62,
"px": 36,
"rg": 2,
"rn": 10,
"sp": 33,
"vag": 12,
"vai": 38,
"vam": 53,
"van": 29,
"vap": 27,
"vas": 59,
"vmg": 30,
"vmi": 61,
"vmm": 3,
"vmn": 54,
"vmp": 56,
"vms": 7,
"vsg": 58,
"vsi": 5,
"vsm": 31,
"vsn": 20,
"vsp": 43,
"vss": 52
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "roberta",
"num_attention_heads": 16,
"num_hidden_layers": 24,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.44.2",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 50262
}