GooKSL commited on
Commit
f2604a6
1 Parent(s): c73891f

Upload model

Browse files
Files changed (2) hide show
  1. config.json +24 -1
  2. flax_model.msgpack +3 -0
config.json CHANGED
@@ -1,4 +1,11 @@
1
  {
 
 
 
 
 
 
 
2
  "architectures": [
3
  "Dinov2Model"
4
  ],
@@ -18,7 +25,23 @@
18
  "num_hidden_layers": 12,
19
  "patch_size": 14,
20
  "qkv_bias": true,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  "torch_dtype": "float32",
22
- "transformers_version": "4.31.0.dev0",
23
  "use_swiglu_ffn": false
24
  }
 
1
  {
2
+ "_out_features": [
3
+ "stage12"
4
+ ],
5
+ "_out_indices": [
6
+ 12
7
+ ],
8
+ "apply_layernorm": true,
9
  "architectures": [
10
  "Dinov2Model"
11
  ],
 
25
  "num_hidden_layers": 12,
26
  "patch_size": 14,
27
  "qkv_bias": true,
28
+ "reshape_hidden_states": true,
29
+ "stage_names": [
30
+ "stem",
31
+ "stage1",
32
+ "stage2",
33
+ "stage3",
34
+ "stage4",
35
+ "stage5",
36
+ "stage6",
37
+ "stage7",
38
+ "stage8",
39
+ "stage9",
40
+ "stage10",
41
+ "stage11",
42
+ "stage12"
43
+ ],
44
  "torch_dtype": "float32",
45
+ "transformers_version": "4.34.0.dev0",
46
  "use_swiglu_ffn": false
47
  }
flax_model.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ad9df1f63b246ed5eecea86eecc24cf816d161e5167564597a3d7d81b3888b0
3
+ size 346326646