stas commited on
Commit
1c0bf47
1 Parent(s): 280387a
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -5,7 +5,7 @@
5
  "alpha_type": "float",
6
  "alphas_initializer_range": 0.0,
7
  "architectures": [
8
- "VLlamaForCausalLM"
9
  ],
10
  "bos_token_id": 1,
11
  "cross_layer_activation_function": "swiglu",
@@ -23,7 +23,7 @@
23
  "intermediate_size": 11008,
24
  "max_position_embeddings": 2048,
25
  "max_sequence_length": 2048,
26
- "model_type": "vllama",
27
  "num_attention_heads": 32,
28
  "num_hidden_layers": 32,
29
  "pad_token_id": 0,
 
5
  "alpha_type": "float",
6
  "alphas_initializer_range": 0.0,
7
  "architectures": [
8
+ "IdeficsForCausalLM"
9
  ],
10
  "bos_token_id": 1,
11
  "cross_layer_activation_function": "swiglu",
 
23
  "intermediate_size": 11008,
24
  "max_position_embeddings": 2048,
25
  "max_sequence_length": 2048,
26
+ "model_type": "idefics",
27
  "num_attention_heads": 32,
28
  "num_hidden_layers": 32,
29
  "pad_token_id": 0,