hungdang1610 commited on
Commit
c8a90da
1 Parent(s): 1751e99

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ images/male.jpg filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -1,8 +1,12 @@
1
  ---
2
  license: apache-2.0
 
3
  tags:
4
  - image-classification
5
  - pytorch
 
 
 
6
  model-index:
7
  - name: gender-classification
8
  results:
@@ -10,9 +14,9 @@ model-index:
10
  name: Image Classification
11
  type: image-classification
12
  metrics:
13
- - name: Accuracy
14
- type: accuracy
15
- value: 0.970833
16
  ---
17
 
18
  # gender-classification
 
1
  ---
2
  license: apache-2.0
3
+ ---
4
  tags:
5
  - image-classification
6
  - pytorch
7
+ metrics:
8
+ - accuracy
9
+
10
  model-index:
11
  - name: gender-classification
12
  results:
 
14
  name: Image Classification
15
  type: image-classification
16
  metrics:
17
+ - name: Accuracy
18
+ type: accuracy
19
+ value: 0.970833
20
  ---
21
 
22
  # gender-classification
config.json CHANGED
@@ -3,21 +3,22 @@
3
  "architectures": [
4
  "ViTForImageClassification"
5
  ],
6
- "attention_probs_dropout_prob": 0.0,
7
  "encoder_stride": 16,
8
  "hidden_act": "gelu",
9
- "hidden_dropout_prob": 0.0,
10
  "hidden_size": 768,
11
  "id2label": {
12
- "0": "female",
13
- "1": "male"
14
  },
15
  "image_size": 224,
16
  "initializer_range": 0.02,
17
  "intermediate_size": 3072,
 
18
  "label2id": {
19
- "female": "0",
20
- "male": "1"
21
  },
22
  "layer_norm_eps": 1e-12,
23
  "model_type": "vit",
@@ -25,7 +26,6 @@
25
  "num_channels": 3,
26
  "num_hidden_layers": 12,
27
  "patch_size": 16,
28
- "problem_type": "single_label_classification",
29
  "qkv_bias": true,
30
  "torch_dtype": "float32",
31
  "transformers_version": "4.41.0"
 
3
  "architectures": [
4
  "ViTForImageClassification"
5
  ],
6
+ "attention_probs_dropout_prob": 0.2,
7
  "encoder_stride": 16,
8
  "hidden_act": "gelu",
9
+ "hidden_dropout_prob": 0.2,
10
  "hidden_size": 768,
11
  "id2label": {
12
+ "0": "woman",
13
+ "1": "man"
14
  },
15
  "image_size": 224,
16
  "initializer_range": 0.02,
17
  "intermediate_size": 3072,
18
+ "interpolate_pos_encoding": true,
19
  "label2id": {
20
+ "man": 1,
21
+ "woman": 0
22
  },
23
  "layer_norm_eps": 1e-12,
24
  "model_type": "vit",
 
26
  "num_channels": 3,
27
  "num_hidden_layers": 12,
28
  "patch_size": 16,
 
29
  "qkv_bias": true,
30
  "torch_dtype": "float32",
31
  "transformers_version": "4.41.0"
images/female.jpg ADDED
images/male.jpg ADDED

Git LFS Details

  • SHA256: fc3ea83d38784dd3fc3019a4cac0cc0923bd15bcd3b76abab1972d9b7045d436
  • Pointer size: 132 Bytes
  • Size of remote file: 1.14 MB
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c6770eb87edee47c6bcbdd34bb7055d75c18cff6ef0401920b34b8b51fab59b5
3
  size 343223968
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00bfbb052d73246f05a25800a8b915d90468c59b3eb117880804c5a43ba324c3
3
  size 343223968
preprocessor_config.json CHANGED
@@ -16,7 +16,7 @@
16
  "resample": 2,
17
  "rescale_factor": 0.00392156862745098,
18
  "size": {
19
- "height": 224,
20
- "width": 224
21
  }
22
- }
 
16
  "resample": 2,
17
  "rescale_factor": 0.00392156862745098,
18
  "size": {
19
+ "height": 384,
20
+ "width": 384
21
  }
22
+ }