File size: 1,235 Bytes
827d192
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
{
  "_name_or_path": "emu2",
  "architectures": [
    "EmuForCausalLM"
  ],
  "attention_bias": false,
  "attention_dropout": 0.0,
  "auto_map": {
    "AutoConfig": "configuration_emu.EmuConfig",
    "AutoModelForCausalLM": "modeling_emu.EmuForCausalLM"
  },
  "bos_token_id": 1,
  "d_model": 1792,
  "eos_token_id": 2,
  "hidden_act": "silu",
  "hidden_size": 6656,
  "initializer_range": 0.02,
  "intermediate_size": 17920,
  "max_position_embeddings": 2048,
  "model_version": "chat",
  "num_attention_heads": 52,
  "num_hidden_layers": 60,
  "num_key_value_heads": 52,
  "pad_token_id": 32000,
  "pretraining_tp": 1,
  "rms_norm_eps": 1e-06,
  "rope_scaling": null,
  "rope_theta": 10000.0,
  "tie_word_embeddings": false,
  "torch_dtype": "float32",
  "transformers_version": "4.31.0",
  "use_cache": true,
  "vision_config": {
    "drop_path_rate": 0,
    "eva_model_name": "eva-clip-E-14-plus",
    "head_width": 112,
    "image_size": 448,
    "intermediate_size": 15360,
    "layer_norm_eps": 1e-06,
    "layers": 64,
    "mlp_ratio": 8.571428571428571,
    "n_query": 256,
    "patch_size": 14,
    "postnorm": true,
    "qkv_bias": true,
    "v_query": 64,
    "width": 1792,
    "xattn": false
  },
  "vocab_size": 32274
}