SmilingWolf commited on
Commit
296b77d
1 Parent(s): 056e4c5

Add model files

Browse files
Files changed (7) hide show
  1. README.md +1 -0
  2. config.json +39 -0
  3. model.msgpack +3 -0
  4. model.onnx +3 -0
  5. model.safetensors +3 -0
  6. selected_tags.csv +0 -0
  7. sw_jax_cv_config.json +13 -0
README.md CHANGED
@@ -1,3 +1,4 @@
1
  ---
2
  license: apache-2.0
 
3
  ---
 
1
  ---
2
  license: apache-2.0
3
+ library_name: timm
4
  ---
config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architecture": "vit_base_patch16_224",
3
+ "num_classes": 10861,
4
+ "num_features": 768,
5
+ "global_pool": "avg",
6
+ "model_args": {
7
+ "img_size": 448,
8
+ "class_token": false,
9
+ "global_pool": "avg",
10
+ "fc_norm": false,
11
+ "act_layer": "gelu_tanh"
12
+ },
13
+ "pretrained_cfg": {
14
+ "custom_load": false,
15
+ "input_size": [
16
+ 3,
17
+ 448,
18
+ 448
19
+ ],
20
+ "fixed_input_size": false,
21
+ "interpolation": "bicubic",
22
+ "crop_pct": 1.0,
23
+ "crop_mode": "center",
24
+ "mean": [
25
+ 0.5,
26
+ 0.5,
27
+ 0.5
28
+ ],
29
+ "std": [
30
+ 0.5,
31
+ 0.5,
32
+ 0.5
33
+ ],
34
+ "num_classes": 10861,
35
+ "pool_size": null,
36
+ "first_conv": null,
37
+ "classifier": null
38
+ }
39
+ }
model.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b06edd4477d3a5407afbc41a2e0d36773bc00e549eda5d19ad0395a0d23618bd
3
+ size 378408630
model.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c21152f1febcd04867c4dbc7bddf781db701f7c0e14fa3b787656477a16514c
3
+ size 378536310
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:021ace976c1e9f944ea51d591c3f6a5722125ae8bd21c51caec3fd1275587ed5
3
+ size 378417260
selected_tags.csv ADDED
The diff for this file is too large to render. See raw diff
 
sw_jax_cv_config.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_name": "vit_base",
3
+ "model_args": {
4
+ "patch_size": 16,
5
+ "num_classes": 10861,
6
+ "num_layers": 12,
7
+ "embed_dim": 768,
8
+ "mlp_dim": 3072,
9
+ "num_heads": 12,
10
+ "drop_path_rate": 0.1,
11
+ "layer_norm_eps": 1e-05
12
+ }
13
+ }