m-ric HF staff commited on
Commit
dc84d32
1 Parent(s): 4e29d59

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +10 -4
config.json CHANGED
@@ -2,10 +2,14 @@
2
  "architectures": [
3
  "AriaForConditionalGeneration"
4
  ],
 
 
 
 
5
  "ignore_index": -100,
6
- "image_token_index": 32000,
7
  "model_type": "aria",
8
- "pad_token_id": 32001,
9
  "projector_patch_to_query_dict": {
10
  "1225": 128,
11
  "4900": 256
@@ -23,14 +27,16 @@
23
  "num_hidden_layers": 28,
24
  "num_key_value_heads": 20,
25
  "rope_theta": 5000000,
26
- "vocab_size": 100416
27
  },
28
  "torch_dtype": "bfloat16",
29
- "transformers_version": "4.46.0.dev0",
30
  "vision_config": {
 
31
  "image_size": 980,
32
  "intermediate_size": 4304,
33
  "model_type": "idefics3",
 
34
  "num_hidden_layers": 27,
35
  "patch_size": 14
36
  }
 
2
  "architectures": [
3
  "AriaForConditionalGeneration"
4
  ],
5
+ "auto_map": {
6
+ "AutoConfig": "modeling_aria.AriaConfig",
7
+ "AutoModelForCausalLM": "modeling_aria.AriaForConditionalGeneration"
8
+ },
9
  "ignore_index": -100,
10
+ "image_token_index": 9,
11
  "model_type": "aria",
12
+ "pad_token_id": 2,
13
  "projector_patch_to_query_dict": {
14
  "1225": 128,
15
  "4900": 256
 
27
  "num_hidden_layers": 28,
28
  "num_key_value_heads": 20,
29
  "rope_theta": 5000000,
30
+ "vocab_size": 100352
31
  },
32
  "torch_dtype": "bfloat16",
33
+ "transformers_version": "4.46.0",
34
  "vision_config": {
35
+ "hidden_size": 1152,
36
  "image_size": 980,
37
  "intermediate_size": 4304,
38
  "model_type": "idefics3",
39
+ "num_attention_heads": 16,
40
  "num_hidden_layers": 27,
41
  "patch_size": 14
42
  }