Neko-Institute-of-Science commited on
Commit
96d183f
1 Parent(s): be5b888

Sync Config files

Browse files
Files changed (3) hide show
  1. config.json +6 -6
  2. generation_config.json +1 -1
  3. tokenizer.json +0 -0
config.json CHANGED
@@ -5,18 +5,18 @@
5
  "bos_token_id": 1,
6
  "eos_token_id": 2,
7
  "hidden_act": "silu",
8
- "hidden_size": 6656,
9
  "initializer_range": 0.02,
10
- "intermediate_size": 17920,
11
  "max_position_embeddings": 2048,
12
  "model_type": "llama",
13
- "num_attention_heads": 52,
14
- "num_hidden_layers": 60,
15
  "pad_token_id": 0,
16
- "rms_norm_eps": 1e-06,
17
  "tie_word_embeddings": false,
18
  "torch_dtype": "float16",
19
- "transformers_version": "4.28.0.dev0",
20
  "use_cache": true,
21
  "vocab_size": 32000
22
  }
 
5
  "bos_token_id": 1,
6
  "eos_token_id": 2,
7
  "hidden_act": "silu",
8
+ "hidden_size": 8192,
9
  "initializer_range": 0.02,
10
+ "intermediate_size": 22016,
11
  "max_position_embeddings": 2048,
12
  "model_type": "llama",
13
+ "num_attention_heads": 64,
14
+ "num_hidden_layers": 80,
15
  "pad_token_id": 0,
16
+ "rms_norm_eps": 1e-05,
17
  "tie_word_embeddings": false,
18
  "torch_dtype": "float16",
19
+ "transformers_version": "4.28.1",
20
  "use_cache": true,
21
  "vocab_size": 32000
22
  }
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.28.0.dev0"
7
  }
 
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.28.1"
7
  }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff