bczhou commited on
Commit
268d98c
1 Parent(s): c669e68

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +4 -4
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
- "_name_or_path": "./checkpoints/tiny-llava-tinyllama-sharegpt4v-unlock-vit-from-12-siglip-TinyLlama-1.1B-Chat-v1.0-siglip-so400m-patch14-384-pretrain",
3
  "architectures": [
4
- "LlavaLlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
@@ -21,8 +21,8 @@
21
  "mm_use_im_start_end": false,
22
  "mm_vision_select_feature": "patch",
23
  "mm_vision_select_layer": -2,
24
- "mm_vision_tower": "./checkpoints/tiny-llava-tinyllama-sharegpt4v-unlock-vit-from-12-siglip-TinyLlama-1.1B-Chat-v1.0-siglip-so400m-patch14-384-pretrain/vision_tower",
25
- "model_type": "tinyllava",
26
  "num_attention_heads": 32,
27
  "num_hidden_layers": 22,
28
  "num_key_value_heads": 4,
 
1
  {
2
+ "_name_or_path": "TinyLLaVA-1.5B",
3
  "architectures": [
4
+ "TinyLlavaLlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
 
21
  "mm_use_im_start_end": false,
22
  "mm_vision_select_feature": "patch",
23
  "mm_vision_select_layer": -2,
24
+ "mm_vision_tower": "bczhou/TinyLLaVA-1.5B-SigLIP",
25
+ "model_type": "tiny_llava",
26
  "num_attention_heads": 32,
27
  "num_hidden_layers": 22,
28
  "num_key_value_heads": 4,