VictorSanh commited on
Commit
ceb649f
1 Parent(s): 5f31d1d

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +6 -4
config.json CHANGED
@@ -5,7 +5,7 @@
5
  "alpha_type": "float",
6
  "alphas_initializer_range": 0.0,
7
  "architectures": [
8
- "VLlamaForCausalLM"
9
  ],
10
  "bos_token_id": 1,
11
  "cross_layer_activation_function": "swiglu",
@@ -23,7 +23,7 @@
23
  "intermediate_size": 11008,
24
  "max_position_embeddings": 2048,
25
  "max_sequence_length": 2048,
26
- "model_type": "vllama",
27
  "num_attention_heads": 32,
28
  "num_hidden_layers": 32,
29
  "pad_token_id": 0,
@@ -41,7 +41,9 @@
41
  "use_resampler": true,
42
  "vision_embed_dim": 1280,
43
  "vision_image_size": 224,
44
- "vision_model_name": "laion/CLIP-ViT-H-14-laion2B-s32B-b79K",
45
- "vision_model_params": "{\"id2label\":{}, \"label2id\":{}}",
 
 
46
  "vocab_size": 32000
47
  }
 
5
  "alpha_type": "float",
6
  "alphas_initializer_range": 0.0,
7
  "architectures": [
8
+ "IdeficsForCausalLM"
9
  ],
10
  "bos_token_id": 1,
11
  "cross_layer_activation_function": "swiglu",
 
23
  "intermediate_size": 11008,
24
  "max_position_embeddings": 2048,
25
  "max_sequence_length": 2048,
26
+ "model_type": "idefics",
27
  "num_attention_heads": 32,
28
  "num_hidden_layers": 32,
29
  "pad_token_id": 0,
 
41
  "use_resampler": true,
42
  "vision_embed_dim": 1280,
43
  "vision_image_size": 224,
44
+ "vision_intermediate_size": 5120,
45
+ "vision_patch_size": 14,
46
+ "vision_num_attention_heads": 16,
47
+ "vision_num_hidden_layers": 32,
48
  "vocab_size": 32000
49
  }