liuhaotian commited on
Commit
ae40d1f
1 Parent(s): 23a0e2e

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -4
config.json CHANGED
@@ -8,8 +8,6 @@
8
  "freeze_mm_mlp_adapter": false,
9
  "hidden_act": "silu",
10
  "hidden_size": 5120,
11
- "image_aspect_ratio": "square",
12
- "image_grid_pinpoints": null,
13
  "initializer_range": 0.02,
14
  "intermediate_size": 13824,
15
  "max_position_embeddings": 2048,
@@ -29,8 +27,7 @@
29
  "torch_dtype": "float16",
30
  "transformers_version": "4.31.0.dev0",
31
  "tune_mm_mlp_adapter": true,
32
- "tune_mm_vision_resampler": false,
33
- "use_cache": false,
34
  "use_mm_proj": true,
35
  "vocab_size": 32000
36
  }
 
8
  "freeze_mm_mlp_adapter": false,
9
  "hidden_act": "silu",
10
  "hidden_size": 5120,
 
 
11
  "initializer_range": 0.02,
12
  "intermediate_size": 13824,
13
  "max_position_embeddings": 2048,
 
27
  "torch_dtype": "float16",
28
  "transformers_version": "4.31.0.dev0",
29
  "tune_mm_mlp_adapter": true,
30
+ "use_cache": true,
 
31
  "use_mm_proj": true,
32
  "vocab_size": 32000
33
  }