stas commited on
Commit
af110e6
1 Parent(s): 51ef8da

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -4,7 +4,7 @@
4
  "alpha_type": "vector",
5
  "alphas_initializer_range": 0.0,
6
  "architectures": [
7
- "VLlamaForCausalLM"
8
  ],
9
  "bos_token_id": 1,
10
  "cross_layer_activation_function": "swiglu",
@@ -23,7 +23,7 @@
23
  "intermediate_size": 11008,
24
  "max_new_tokens": 100,
25
  "max_position_embeddings": 128,
26
- "model_type": "vllama",
27
  "num_attention_heads": 4,
28
  "num_hidden_layers": 2,
29
  "pad_token_id": 0,
 
4
  "alpha_type": "vector",
5
  "alphas_initializer_range": 0.0,
6
  "architectures": [
7
+ "IdeficsForCausalLM"
8
  ],
9
  "bos_token_id": 1,
10
  "cross_layer_activation_function": "swiglu",
 
23
  "intermediate_size": 11008,
24
  "max_new_tokens": 100,
25
  "max_position_embeddings": 128,
26
+ "model_type": "idefics",
27
  "num_attention_heads": 4,
28
  "num_hidden_layers": 2,
29
  "pad_token_id": 0,