Xenova HF staff commited on
Commit
695e2e9
1 Parent(s): 52d8259

Add `num_image_tokens` to config.json (#2)

Browse files

- Add `num_image_tokens` to config.json (90a106b61ea22da6c9a3385dafac608b7fe32d48)
- Upload config.json (0cb888ef0b542e9743ec1283ac5a097de71339e5)

Files changed (1) hide show
  1. config.json +3 -0
config.json CHANGED
@@ -40,6 +40,7 @@
40
  "model_type": "qwen2",
41
  "num_attention_heads": 16,
42
  "num_hidden_layers": 24,
 
43
  "num_key_value_heads": 16,
44
  "rms_norm_eps": 1e-06,
45
  "rope_theta": 1000000.0,
@@ -58,10 +59,12 @@
58
  "model_type": "siglip_vision_model",
59
  "num_attention_heads": 16,
60
  "num_hidden_layers": 27,
 
61
  "patch_size": 14
62
  },
63
  "num_attention_heads": 16,
64
  "num_hidden_layers": 24,
 
65
  "num_key_value_heads": 16,
66
  "rms_norm_eps": 1e-06,
67
  "rope_theta": 1000000.0,
 
40
  "model_type": "qwen2",
41
  "num_attention_heads": 16,
42
  "num_hidden_layers": 24,
43
+ "num_image_tokens": 729,
44
  "num_key_value_heads": 16,
45
  "rms_norm_eps": 1e-06,
46
  "rope_theta": 1000000.0,
 
59
  "model_type": "siglip_vision_model",
60
  "num_attention_heads": 16,
61
  "num_hidden_layers": 27,
62
+ "num_image_tokens": 729,
63
  "patch_size": 14
64
  },
65
  "num_attention_heads": 16,
66
  "num_hidden_layers": 24,
67
+ "num_image_tokens": 729,
68
  "num_key_value_heads": 16,
69
  "rms_norm_eps": 1e-06,
70
  "rope_theta": 1000000.0,