system's picture
system HF staff
Commit From AutoTrain
33d2eba
{
"_name_or_path": "AutoTrain",
"architectures": [
"BeitForImageClassification"
],
"attention_probs_dropout_prob": 0.0,
"auxiliary_channels": 256,
"auxiliary_concat_input": false,
"auxiliary_loss_weight": 0.4,
"auxiliary_num_convs": 1,
"drop_path_rate": 0.1,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 768,
"id2label": {
"0": "HORNBLENDE OLIVINE BEARING PIROXENE",
"1": "Serpentinized Lherzolite",
"10": "jotunite",
"11": "lujavrite",
"12": "luxulianite",
"13": "nepheline pegmatite",
"14": "nepheline syenite",
"15": "norite",
"16": "olivine gabbro",
"17": "olivine hornblendite",
"18": "ortopiroxenite",
"19": "pulaskite",
"2": "allanite",
"20": "quartz gabbro",
"21": "sienite",
"22": "sienite sodalite",
"23": "sienogranito",
"3": "antipertite",
"4": "calcite",
"5": "cancrinite pegmatite",
"6": "cumberlandite",
"7": "granite peralkaline",
"8": "granitic pegmatite",
"9": "hornblende -olivine bearing piroxenite"
},
"image_size": 224,
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"HORNBLENDE OLIVINE BEARING PIROXENE": "0",
"Serpentinized Lherzolite": "1",
"allanite": "2",
"antipertite": "3",
"calcite": "4",
"cancrinite pegmatite": "5",
"cumberlandite": "6",
"granite peralkaline": "7",
"granitic pegmatite": "8",
"hornblende -olivine bearing piroxenite": "9",
"jotunite": "10",
"lujavrite": "11",
"luxulianite": "12",
"nepheline pegmatite": "13",
"nepheline syenite": "14",
"norite": "15",
"olivine gabbro": "16",
"olivine hornblendite": "17",
"ortopiroxenite": "18",
"pulaskite": "19",
"quartz gabbro": "20",
"sienite": "21",
"sienite sodalite": "22",
"sienogranito": "23"
},
"layer_norm_eps": 1e-12,
"layer_scale_init_value": 0.1,
"max_length": 128,
"model_type": "beit",
"num_attention_heads": 12,
"num_channels": 3,
"num_hidden_layers": 12,
"out_indices": [
3,
5,
7,
11
],
"padding": "max_length",
"patch_size": 16,
"pool_scales": [
1,
2,
3,
6
],
"problem_type": "single_label_classification",
"semantic_loss_ignore_index": 255,
"torch_dtype": "float32",
"transformers_version": "4.25.1",
"use_absolute_position_embeddings": false,
"use_auxiliary_head": true,
"use_mask_token": false,
"use_mean_pooling": true,
"use_relative_position_bias": true,
"use_shared_relative_position_bias": false,
"vocab_size": 8192
}