Vemi's picture
Training in progress, epoch 0
f97de86
{
"_name_or_path": "gary109/orchid219_ft_vit-large-patch16-224-in21k",
"architectures": [
"ViTForImageClassification"
],
"attention_probs_dropout_prob": 0.0,
"encoder_stride": 16,
"finetuning_task": "image-classification",
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 1024,
"id2label": {
"0": "n0000",
"1": "n0001",
"2": "n0002",
"3": "n0003",
"4": "n0004",
"5": "n0005",
"6": "n0006",
"7": "n0007",
"8": "n0008",
"9": "n0009",
"10": "n0010",
"11": "n0011",
"12": "n0012",
"13": "n0013",
"14": "n0014",
"15": "n0015",
"16": "n0016",
"17": "n0017",
"18": "n0018",
"19": "n0019",
"20": "n0020",
"21": "n0021",
"22": "n0022",
"23": "n0023",
"24": "n0024",
"25": "n0025",
"26": "n0026",
"27": "n0027",
"28": "n0028",
"29": "n0029",
"30": "n0030",
"31": "n0031",
"32": "n0032",
"33": "n0033",
"34": "n0034",
"35": "n0035",
"36": "n0036",
"37": "n0037",
"38": "n0038",
"39": "n0039",
"40": "n0040",
"41": "n0041",
"42": "n0042",
"43": "n0043",
"44": "n0044",
"45": "n0045",
"46": "n0046",
"47": "n0047",
"48": "n0048",
"49": "n0049",
"50": "n0050",
"51": "n0051"
},
"image_size": 224,
"initializer_range": 0.02,
"intermediate_size": 4096,
"label2id": {
"n0000": 0,
"n0001": 1,
"n0002": 2,
"n0003": 3,
"n0004": 4,
"n0005": 5,
"n0006": 6,
"n0007": 7,
"n0008": 8,
"n0009": 9,
"n0010": 10,
"n0011": 11,
"n0012": 12,
"n0013": 13,
"n0014": 14,
"n0015": 15,
"n0016": 16,
"n0017": 17,
"n0018": 18,
"n0019": 19,
"n0020": 20,
"n0021": 21,
"n0022": 22,
"n0023": 23,
"n0024": 24,
"n0025": 25,
"n0026": 26,
"n0027": 27,
"n0028": 28,
"n0029": 29,
"n0030": 30,
"n0031": 31,
"n0032": 32,
"n0033": 33,
"n0034": 34,
"n0035": 35,
"n0036": 36,
"n0037": 37,
"n0038": 38,
"n0039": 39,
"n0040": 40,
"n0041": 41,
"n0042": 42,
"n0043": 43,
"n0044": 44,
"n0045": 45,
"n0046": 46,
"n0047": 47,
"n0048": 48,
"n0049": 49,
"n0050": 50,
"n0051": 51
},
"layer_norm_eps": 1e-12,
"model_type": "vit",
"num_attention_heads": 16,
"num_channels": 3,
"num_hidden_layers": 24,
"patch_size": 16,
"problem_type": "single_label_classification",
"qkv_bias": true,
"torch_dtype": "float32",
"transformers_version": "4.19.2"
}