VictorSanh HF staff commited on
Commit
0979cbf
1 Parent(s): aca8e0b

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +6 -4
config.json CHANGED
@@ -5,7 +5,7 @@
5
  "alpha_type": "float",
6
  "alphas_initializer_range": 0.0,
7
  "architectures": [
8
- "VLlamaForCausalLM"
9
  ],
10
  "bos_token_id": 1,
11
  "cross_layer_activation_function": "swiglu",
@@ -22,7 +22,7 @@
22
  "initializer_range": 0.02,
23
  "intermediate_size": 22016,
24
  "max_sequence_length": 2048,
25
- "model_type": "vllama",
26
  "num_attention_heads": 64,
27
  "num_hidden_layers": 80,
28
  "pad_token_id": 0,
@@ -40,7 +40,9 @@
40
  "use_resampler": true,
41
  "vision_embed_dim": 1280,
42
  "vision_image_size": 224,
43
- "vision_model_name": "laion/CLIP-ViT-H-14-laion2B-s32B-b79K",
44
- "vision_model_params": "{\"id2label\":{}, \"label2id\":{}}",
 
 
45
  "vocab_size": 32000
46
  }
 
5
  "alpha_type": "float",
6
  "alphas_initializer_range": 0.0,
7
  "architectures": [
8
+ "IdeficsForCausalLM"
9
  ],
10
  "bos_token_id": 1,
11
  "cross_layer_activation_function": "swiglu",
 
22
  "initializer_range": 0.02,
23
  "intermediate_size": 22016,
24
  "max_sequence_length": 2048,
25
+ "model_type": "idefics",
26
  "num_attention_heads": 64,
27
  "num_hidden_layers": 80,
28
  "pad_token_id": 0,
 
40
  "use_resampler": true,
41
  "vision_embed_dim": 1280,
42
  "vision_image_size": 224,
43
+ "vision_intermediate_size": 5120,
44
+ "vision_patch_size": 14,
45
+ "vision_num_attention_heads": 16,
46
+ "vision_num_hidden_layers": 32,
47
  "vocab_size": 32000
48
  }