royam0820 commited on
Commit
90b25da
·
1 Parent(s): d6af4c6

Create config.json

Browse files

adding the config.json file

Files changed (1) hide show
  1. config.json +26 -0
config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ _name_or_path: "meta-llama/Llama-2-7b-hf",
3
+ architectures: [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ bos_token_id: 1,
7
+ eos_token_id: 2,
8
+ hidden_act: "silu",
9
+ hidden_size: 4096,
10
+ initializer_range: 0.02,
11
+ intermediate_size: 11008,
12
+ max_position_embeddings: 2048,
13
+ model_type: "llama",
14
+ num_attention_heads: 32,
15
+ num_hidden_layers: 32,
16
+ num_key_value_heads: 32,
17
+ pad_token_id: 0,
18
+ pretraining_tp: 1,
19
+ rms_norm_eps: 0.00001,
20
+ rope_scaling: null,
21
+ tie_word_embeddings: false,
22
+ torch_dtype: "float32",
23
+ transformers_version: "4.29.2",
24
+ use_cache: true,
25
+ vocab_size: 32000
26
+ }