ivanlau's picture
Training in progress, epoch 1
e99dd12
raw
history blame
2.76 kB
{
"_name_or_path": "xlm-roberta-base",
"architectures": [
"XLMRobertaForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"eos_token_id": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "Arabic",
"1": "Basque",
"10": "Dutch",
"11": "English",
"12": "Esperanto",
"13": "Estonian",
"14": "French",
"15": "Frisian",
"16": "Georgian",
"17": "German",
"18": "Greek",
"19": "Hakha_Chin",
"2": "Breton",
"20": "Indonesian",
"21": "Interlingua",
"22": "Italian",
"23": "Japanese",
"24": "Kabyle",
"25": "Kinyarwanda",
"26": "Kyrgyz",
"27": "Latvian",
"28": "Maltese",
"29": "Mangolian",
"3": "Catalan",
"30": "Persian",
"31": "Polish",
"32": "Portuguese",
"33": "Romanian",
"34": "Romansh_Sursilvan",
"35": "Russian",
"36": "Sakha",
"37": "Slovenian",
"38": "Spanish",
"39": "Swedish",
"4": "Chinese_China",
"40": "Tamil",
"41": "Tatar",
"42": "Turkish",
"43": "Ukranian",
"44": "Welsh",
"5": "Chinese_Hongkong",
"6": "Chinese_Taiwan",
"7": "Chuvash",
"8": "Czech",
"9": "Dhivehi"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"Arabic": "0",
"Basque": "1",
"Breton": "2",
"Catalan": "3",
"Chinese_China": "4",
"Chinese_Hongkong": "5",
"Chinese_Taiwan": "6",
"Chuvash": "7",
"Czech": "8",
"Dhivehi": "9",
"Dutch": "10",
"English": "11",
"Esperanto": "12",
"Estonian": "13",
"French": "14",
"Frisian": "15",
"Georgian": "16",
"German": "17",
"Greek": "18",
"Hakha_Chin": "19",
"Indonesian": "20",
"Interlingua": "21",
"Italian": "22",
"Japanese": "23",
"Kabyle": "24",
"Kinyarwanda": "25",
"Kyrgyz": "26",
"Latvian": "27",
"Maltese": "28",
"Mangolian": "29",
"Persian": "30",
"Polish": "31",
"Portuguese": "32",
"Romanian": "33",
"Romansh_Sursilvan": "34",
"Russian": "35",
"Sakha": "36",
"Slovenian": "37",
"Spanish": "38",
"Swedish": "39",
"Tamil": "40",
"Tatar": "41",
"Turkish": "42",
"Ukranian": "43",
"Welsh": "44"
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "xlm-roberta",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"output_past": true,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"problem_type": "single_label_classification",
"torch_dtype": "float32",
"transformers_version": "4.12.5",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 250002
}