{ "_name_or_path": "microsoft/swinv2-base-patch4-window12-192-22k", "architectures": [ "Swinv2ForImageClassification" ], "attention_probs_dropout_prob": 0.0, "depths": [ 2, 2, 18, 2 ], "drop_path_rate": 0.1, "embed_dim": 128, "encoder_stride": 32, "hidden_act": "gelu", "hidden_dropout_prob": 0.0, "hidden_size": 1024, "id2label": { "0": 0, "1": 1, "2": 2, "3": 3, "4": 4, "5": 5, "6": 6, "7": 7, "8": 8, "9": 9, "10": 10, "11": 11, "12": 12, "13": 13, "14": 14, "15": 15, "16": 16, "17": 17, "18": 18, "19": 19, "20": 20, "21": 21, "22": 22, "23": 23, "24": 24, "25": 25, "26": 26, "27": 27, "28": 28, "29": 29 }, "image_size": 192, "initializer_range": 0.02, "label2id": { "0": 0, "1": 1, "2": 2, "3": 3, "4": 4, "5": 5, "6": 6, "7": 7, "8": 8, "9": 9, "10": 10, "11": 11, "12": 12, "13": 13, "14": 14, "15": 15, "16": 16, "17": 17, "18": 18, "19": 19, "20": 20, "21": 21, "22": 22, "23": 23, "24": 24, "25": 25, "26": 26, "27": 27, "28": 28, "29": 29 }, "layer_norm_eps": 1e-05, "mlp_ratio": 4.0, "model_type": "swinv2", "num_channels": 3, "num_heads": [ 4, 8, 16, 32 ], "num_layers": 4, "out_features": [ "stage4" ], "out_indices": [ 4 ], "patch_size": 4, "path_norm": true, "pretrained_window_sizes": [ 0, 0, 0, 0 ], "problem_type": "single_label_classification", "qkv_bias": true, "stage_names": [ "stem", "stage1", "stage2", "stage3", "stage4" ], "torch_dtype": "float32", "transformers_version": "4.39.3", "use_absolute_embeddings": false, "window_size": 12 }