liuhaotian commited on
Commit
4f8d627
1 Parent(s): 22a8271

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -4
config.json CHANGED
@@ -9,12 +9,10 @@
9
  "hidden_act": "silu",
10
  "hidden_size": 4096,
11
  "image_aspect_ratio": "square",
12
- "image_grid_pinpoints": null,
13
  "initializer_range": 0.02,
14
  "intermediate_size": 11008,
15
  "max_position_embeddings": 2048,
16
  "mm_hidden_size": 1024,
17
- "mm_resampler_type": null,
18
  "mm_use_im_patch_token": false,
19
  "mm_use_im_start_end": false,
20
  "mm_vision_select_feature": "patch",
@@ -29,8 +27,7 @@
29
  "torch_dtype": "float16",
30
  "transformers_version": "4.31.0.dev0",
31
  "tune_mm_mlp_adapter": true,
32
- "tune_mm_vision_resampler": false,
33
- "use_cache": false,
34
  "use_mm_proj": true,
35
  "vocab_size": 32000
36
  }
 
9
  "hidden_act": "silu",
10
  "hidden_size": 4096,
11
  "image_aspect_ratio": "square",
 
12
  "initializer_range": 0.02,
13
  "intermediate_size": 11008,
14
  "max_position_embeddings": 2048,
15
  "mm_hidden_size": 1024,
 
16
  "mm_use_im_patch_token": false,
17
  "mm_use_im_start_end": false,
18
  "mm_vision_select_feature": "patch",
 
27
  "torch_dtype": "float16",
28
  "transformers_version": "4.31.0.dev0",
29
  "tune_mm_mlp_adapter": true,
30
+ "use_cache": true,
 
31
  "use_mm_proj": true,
32
  "vocab_size": 32000
33
  }