jeiku commited on
Commit
46a4426
1 Parent(s): 47ef05d

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_name_or_path": "jeiku/llavamistral1.6configedit",
3
  "architectures": [
4
- "MistralForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
@@ -49,7 +49,7 @@
49
  "mm_vision_select_layer": -2,
50
  "mm_vision_tower": "openai/clip-vit-large-patch14-336",
51
  "mm_vision_tower_lr": 2e-06,
52
- "model_type": "mistral",
53
  "num_attention_heads": 32,
54
  "num_hidden_layers": 32,
55
  "num_key_value_heads": 8,
 
1
  {
2
  "_name_or_path": "jeiku/llavamistral1.6configedit",
3
  "architectures": [
4
+ "LLavaMistralForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
 
49
  "mm_vision_select_layer": -2,
50
  "mm_vision_tower": "openai/clip-vit-large-patch14-336",
51
  "mm_vision_tower_lr": 2e-06,
52
+ "model_type": "llava_mistral",
53
  "num_attention_heads": 32,
54
  "num_hidden_layers": 32,
55
  "num_key_value_heads": 8,