Update config.json
Browse files- config.json +4 -4
config.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"architectures": [
|
4 |
-
"
|
5 |
],
|
6 |
"attention_bias": false,
|
7 |
"attention_dropout": 0.0,
|
@@ -21,8 +21,8 @@
|
|
21 |
"mm_use_im_start_end": false,
|
22 |
"mm_vision_select_feature": "patch",
|
23 |
"mm_vision_select_layer": -2,
|
24 |
-
"mm_vision_tower": "
|
25 |
-
"model_type": "
|
26 |
"num_attention_heads": 32,
|
27 |
"num_hidden_layers": 22,
|
28 |
"num_key_value_heads": 4,
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "TinyLLaVA-1.5B",
|
3 |
"architectures": [
|
4 |
+
"TinyLlavaLlamaForCausalLM"
|
5 |
],
|
6 |
"attention_bias": false,
|
7 |
"attention_dropout": 0.0,
|
|
|
21 |
"mm_use_im_start_end": false,
|
22 |
"mm_vision_select_feature": "patch",
|
23 |
"mm_vision_select_layer": -2,
|
24 |
+
"mm_vision_tower": "bczhou/TinyLLaVA-1.5B-SigLIP",
|
25 |
+
"model_type": "tiny_llava",
|
26 |
"num_attention_heads": 32,
|
27 |
"num_hidden_layers": 22,
|
28 |
"num_key_value_heads": 4,
|