|
{ |
|
"_name_or_path": "google/vit-base-patch16-224", |
|
"architectures": [ |
|
"ViTForImageClassification" |
|
], |
|
"attention_probs_dropout_prob": 0.0, |
|
"encoder_stride": 16, |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.0, |
|
"hidden_size": 768, |
|
"id2label": { |
|
"0": "R12160103", |
|
"1": "R12998153", |
|
"2": "R12413073-648.0413.3.007", |
|
"3": "R12949153", |
|
"4": "R12413584-648.0413.3.158", |
|
"5": "R12163118", |
|
"6": "R12160303", |
|
"7": "R12160213", |
|
"8": "R12638173-541.0638.3.017", |
|
"9": "R12413613-648.0413.3.061", |
|
"10": "R12999253", |
|
"11": "R12413343-648.0413.3.034", |
|
"12": "R12638163-541.0638.3.016", |
|
"13": "R12064253", |
|
"14": "R12413243-648.0413.3.024", |
|
"15": "R12413573", |
|
"16": "R12393633-114.0393.3.063", |
|
"17": "R12391103-114.0391.3.010", |
|
"18": "R12413314-648.0413.3.131", |
|
"19": "R12416393-557.0416.3.039", |
|
"20": "R12391153-114.0391.3.015", |
|
"21": "R12413803-648.0413.3.080", |
|
"22": "R12391633-114.0391.3.063", |
|
"23": "R12413193-648.0413.3.019", |
|
"24": "R12413313-648.0413.3.031", |
|
"25": "R12164153", |
|
"26": "R12162153" |
|
}, |
|
"image_size": 224, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 3072, |
|
"label2id": { |
|
"R12064253": 13, |
|
"R12160103": 0, |
|
"R12160213": 7, |
|
"R12160303": 6, |
|
"R12162153": 26, |
|
"R12163118": 5, |
|
"R12164153": 25, |
|
"R12391103-114.0391.3.010": 17, |
|
"R12391153-114.0391.3.015": 20, |
|
"R12391633-114.0391.3.063": 22, |
|
"R12393633-114.0393.3.063": 16, |
|
"R12413073-648.0413.3.007": 2, |
|
"R12413193-648.0413.3.019": 23, |
|
"R12413243-648.0413.3.024": 14, |
|
"R12413313-648.0413.3.031": 24, |
|
"R12413314-648.0413.3.131": 18, |
|
"R12413343-648.0413.3.034": 11, |
|
"R12413573": 15, |
|
"R12413584-648.0413.3.158": 4, |
|
"R12413613-648.0413.3.061": 9, |
|
"R12413803-648.0413.3.080": 21, |
|
"R12416393-557.0416.3.039": 19, |
|
"R12638163-541.0638.3.016": 12, |
|
"R12638173-541.0638.3.017": 8, |
|
"R12949153": 3, |
|
"R12998153": 1, |
|
"R12999253": 10 |
|
}, |
|
"layer_norm_eps": 1e-12, |
|
"model_type": "vit", |
|
"num_attention_heads": 12, |
|
"num_channels": 3, |
|
"num_hidden_layers": 12, |
|
"patch_size": 16, |
|
"problem_type": "single_label_classification", |
|
"qkv_bias": true, |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.41.2" |
|
} |
|
|