Tongjilibo commited on
Commit
08601ed
1 Parent(s): 98dfc1e

增加llama3-8b

Browse files
Meta-Llama-3-8B-Instruct/bert4torch_config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "llama",
3
+ "hidden_size": 4096,
4
+ "intermediate_size": 14336,
5
+ "num_attention_heads": 32,
6
+ "num_hidden_layers": 32,
7
+ "hidden_act": "silu",
8
+ "vocab_size": 128256,
9
+ "segment_vocab_size": 0,
10
+ "skip_init": true,
11
+ "layer_norm_eps": 1e-05,
12
+ "rope_rank": "updown",
13
+ "rope_theta": 500000.0,
14
+ "num_key_value_heads": 8,
15
+ "max_position_embeddings": 8192,
16
+ "torch_dtype": "bfloat16",
17
+ "tie_word_embeddings": false,
18
+ "attention_probs_dropout_prob": 0.0
19
+ }
Meta-Llama-3-8B/bert4torch_config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "llama",
3
+ "hidden_size": 4096,
4
+ "intermediate_size": 14336,
5
+ "num_attention_heads": 32,
6
+ "num_hidden_layers": 32,
7
+ "hidden_act": "silu",
8
+ "vocab_size": 128256,
9
+ "segment_vocab_size": 0,
10
+ "skip_init": true,
11
+ "layer_norm_eps": 1e-05,
12
+ "rope_rank": "updown",
13
+ "rope_theta": 500000.0,
14
+ "num_key_value_heads": 8,
15
+ "max_position_embeddings": 8192,
16
+ "torch_dtype": "bfloat16",
17
+ "tie_word_embeddings": false,
18
+ "attention_probs_dropout_prob": 0.0
19
+ }