mrsarthakgupta commited on
Commit
2e6bfcf
1 Parent(s): 52378e8

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +10 -3
config.json CHANGED
@@ -1,15 +1,23 @@
1
  {
2
  "architectures": [
3
- "CLIPModel"
4
  ],
5
  "attention_probs_dropout_prob": 0.0,
6
  "encoder_stride": 16,
7
  "hidden_act": "gelu",
8
  "hidden_dropout_prob": 0.0,
9
  "hidden_size": 768,
 
 
 
 
10
  "image_size": 224,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 3072,
 
 
 
 
13
  "layer_norm_eps": 1e-12,
14
  "model_type": "vit",
15
  "num_attention_heads": 12,
@@ -17,6 +25,5 @@
17
  "num_hidden_layers": 12,
18
  "patch_size": 16,
19
  "qkv_bias": true,
20
- "torch_dtype": "float32",
21
- "transformers_version": "4.40.2"
22
  }
 
1
  {
2
  "architectures": [
3
+ "ViTForImageClassification"
4
  ],
5
  "attention_probs_dropout_prob": 0.0,
6
  "encoder_stride": 16,
7
  "hidden_act": "gelu",
8
  "hidden_dropout_prob": 0.0,
9
  "hidden_size": 768,
10
+ "id2label": {
11
+ "0": "real",
12
+ "1": "fake"
13
+ },
14
  "image_size": 224,
15
  "initializer_range": 0.02,
16
  "intermediate_size": 3072,
17
+ "label2id": {
18
+ "real": 0,
19
+ "fake": 1
20
+ },
21
  "layer_norm_eps": 1e-12,
22
  "model_type": "vit",
23
  "num_attention_heads": 12,
 
25
  "num_hidden_layers": 12,
26
  "patch_size": 16,
27
  "qkv_bias": true,
28
+ "transformers_version": "4.29.2"
 
29
  }