prince-canuma commited on
Commit
0e25a82
·
verified ·
1 Parent(s): 5070b10

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +42 -1
config.json CHANGED
@@ -21,7 +21,48 @@
21
  "mm_hidden_size": 1152,
22
  "mm_projector_lr": null,
23
  "mm_projector_type": "mlp2x_gelu",
24
- "mm_vision_tower": "google/siglip-so400m-patch14-384",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "model_type": "llava-qwen2",
26
  "num_attention_heads": 16,
27
  "num_hidden_layers": 24,
 
21
  "mm_hidden_size": 1152,
22
  "mm_projector_lr": null,
23
  "mm_projector_type": "mlp2x_gelu",
24
+ "vision_config": {
25
+ "return_dict": true,
26
+ "tie_word_embeddings": true,
27
+ "max_length": 20,
28
+ "min_length": 0,
29
+ "do_sample": false,
30
+ "early_stopping": false,
31
+ "num_beams": 1,
32
+ "num_beam_groups": 1,
33
+ "diversity_penalty": 0.0,
34
+ "temperature": 1.0,
35
+ "model_type": "siglip_vision_model",
36
+ "hidden_size": 1152,
37
+ "intermediate_size": 4304,
38
+ "num_hidden_layers": 27,
39
+ "num_attention_heads": 16,
40
+ "num_channels": 3,
41
+ "patch_size": 14,
42
+ "image_size": 384,
43
+ "attention_dropout": 0.0,
44
+ "layer_norm_eps": 1e-06,
45
+ "hidden_act": "gelu_pytorch_tanh"
46
+ },
47
+ "text_config": {
48
+ "vocab_size": 151936,
49
+ "max_position_embeddings": 32768,
50
+ "hidden_size": 1024,
51
+ "intermediate_size": 2816,
52
+ "num_hidden_layers": 24,
53
+ "num_attention_heads": 16,
54
+ "use_sliding_window": false,
55
+ "sliding_window": "None",
56
+ "max_window_layers": 21,
57
+ "num_key_value_heads": 16,
58
+ "hidden_act": "silu",
59
+ "initializer_range": 0.02,
60
+ "rms_norm_eps": 1e-06,
61
+ "use_cache": false,
62
+ "rope_theta": 1000000.0,
63
+ "attention_dropout": 0.0,
64
+ "model_type": "qwen2"
65
+ },
66
  "model_type": "llava-qwen2",
67
  "num_attention_heads": 16,
68
  "num_hidden_layers": 24,