simoneteglia's picture
End of training
d34d96d
raw
history blame contribute delete
No virus
2.66 kB
{
"_name_or_path": "xlm-roberta-base",
"architectures": [
"XLMRobertaForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"eos_token_id": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "bulgarian",
"1": "polish",
"2": "pushto",
"3": "kyrgyz",
"4": "malay",
"5": "kazak",
"6": "hungarian",
"7": "armenian",
"8": "kashmiri",
"9": "romanian",
"10": "tajik",
"11": "dutch",
"12": "english",
"13": "finnish",
"14": "welsh",
"15": "lithuanian",
"16": "portoguese",
"17": "belarusian",
"18": "maltese",
"19": "dhivehi",
"20": "malayan",
"21": "german",
"22": "sinhala",
"23": "azerbaijani",
"24": "uzbek",
"25": "lao",
"26": "greek",
"27": "nepali",
"28": "slovak",
"29": "french",
"30": "italian",
"31": "albanian",
"32": "label",
"33": "mongolian",
"34": "estonian",
"35": "swahili",
"36": "burmese",
"37": "slovene",
"38": "faroese",
"39": "turkmen",
"40": "danish",
"41": "maori",
"42": "georgian",
"43": "spanish",
"44": "swedish",
"45": "latvian",
"46": "czech"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"albanian": 31,
"armenian": 7,
"azerbaijani": 23,
"belarusian": 17,
"bulgarian": 0,
"burmese": 36,
"czech": 46,
"danish": 40,
"dhivehi": 19,
"dutch": 11,
"english": 12,
"estonian": 34,
"faroese": 38,
"finnish": 13,
"french": 29,
"georgian": 42,
"german": 21,
"greek": 26,
"hungarian": 6,
"italian": 30,
"kashmiri": 8,
"kazak": 5,
"kyrgyz": 3,
"label": 32,
"lao": 25,
"latvian": 45,
"lithuanian": 15,
"malay": 4,
"malayan": 20,
"maltese": 18,
"maori": 41,
"mongolian": 33,
"nepali": 27,
"polish": 1,
"portoguese": 16,
"pushto": 2,
"romanian": 9,
"sinhala": 22,
"slovak": 28,
"slovene": 37,
"spanish": 43,
"swahili": 35,
"swedish": 44,
"tajik": 10,
"turkmen": 39,
"uzbek": 24,
"welsh": 14
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "xlm-roberta",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"output_past": true,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"problem_type": "single_label_classification",
"torch_dtype": "float32",
"transformers_version": "4.28.0",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 250002
}