Tongjilibo commited on
Commit
4db87e1
1 Parent(s): c561284

增加internlm2和2.5

Browse files
internlm2-1_8b/bert4torch_config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "internlm2",
3
+ "bias": false,
4
+ "bos_token_id": 1,
5
+ "eos_token_id": 2,
6
+ "pad_token_id": 2,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 2048,
9
+ "initializer_range": 0.02,
10
+ "intermediate_size": 8192,
11
+ "max_position_embeddings": 32768,
12
+ "num_attention_heads": 16,
13
+ "num_hidden_layers": 24,
14
+ "num_key_value_heads": 8,
15
+ "layer_norm_eps": 1e-05,
16
+ "rope_scaling": {
17
+ "type": "dynamic",
18
+ "factor": 2.0
19
+ },
20
+ "rope_theta": 1000000,
21
+ "tie_word_embeddings": false,
22
+ "segment_vocab_size": 0,
23
+ "skip_init": true,
24
+ "rope_rank": "updown",
25
+ "torch_dtype": "float16",
26
+ "vocab_size": 92544,
27
+ "generation_config": {"tokenizer_config": {"skip_special_tokens": true}, "end_id": [2, 92542], "max_length": 32768}
28
+ }
internlm2-7b/bert4torch_config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "internlm2",
3
+ "bias": false,
4
+ "bos_token_id": 1,
5
+ "eos_token_id": 2,
6
+ "pad_token_id": 2,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 4096,
9
+ "initializer_range": 0.02,
10
+ "intermediate_size": 14336,
11
+ "max_position_embeddings": 32768,
12
+ "num_attention_heads": 32,
13
+ "num_hidden_layers": 32,
14
+ "num_key_value_heads": 8,
15
+ "rms_norm_eps": 1e-05,
16
+ "rope_scaling": {
17
+ "type": "dynamic",
18
+ "factor": 2.0
19
+ },
20
+ "rope_theta": 1000000,
21
+ "tie_word_embeddings": false,
22
+ "torch_dtype": "bfloat16",
23
+ "vocab_size": 92544,
24
+ "segment_vocab_size": 0,
25
+ "skip_init": true,
26
+ "rope_rank": "updown",
27
+ "generation_config": {"tokenizer_config": {"skip_special_tokens": true}, "end_id": [2, 92542], "max_length": 32768}
28
+ }
internlm2-chat-1_8b/bert4torch_config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "internlm2",
3
+ "bias": false,
4
+ "bos_token_id": 1,
5
+ "eos_token_id": 2,
6
+ "pad_token_id": 2,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 2048,
9
+ "initializer_range": 0.02,
10
+ "intermediate_size": 8192,
11
+ "max_position_embeddings": 32768,
12
+ "num_attention_heads": 16,
13
+ "num_hidden_layers": 24,
14
+ "num_key_value_heads": 8,
15
+ "layer_norm_eps": 1e-05,
16
+ "rope_scaling": {
17
+ "type": "dynamic",
18
+ "factor": 2.0
19
+ },
20
+ "rope_theta": 1000000,
21
+ "tie_word_embeddings": false,
22
+ "segment_vocab_size": 0,
23
+ "skip_init": true,
24
+ "rope_rank": "updown",
25
+ "torch_dtype": "float16",
26
+ "vocab_size": 92544,
27
+ "generation_config": {"tokenizer_config": {"skip_special_tokens": true}, "end_id": [2, 92542], "max_length": 32768}
28
+ }
internlm2-chat-7b/bert4torch_config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "internlm2",
3
+ "bias": false,
4
+ "bos_token_id": 1,
5
+ "eos_token_id": 2,
6
+ "pad_token_id": 2,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 4096,
9
+ "initializer_range": 0.02,
10
+ "intermediate_size": 14336,
11
+ "max_position_embeddings": 32768,
12
+ "num_attention_heads": 32,
13
+ "num_hidden_layers": 32,
14
+ "num_key_value_heads": 8,
15
+ "rms_norm_eps": 1e-05,
16
+ "rope_scaling": {
17
+ "type": "dynamic",
18
+ "factor": 2.0
19
+ },
20
+ "rope_theta": 1000000,
21
+ "tie_word_embeddings": false,
22
+ "torch_dtype": "bfloat16",
23
+ "vocab_size": 92544,
24
+ "segment_vocab_size": 0,
25
+ "skip_init": true,
26
+ "rope_rank": "updown",
27
+ "generation_config": {"tokenizer_config": {"skip_special_tokens": true}, "end_id": [2, 92542], "max_length": 32768}
28
+ }
internlm2_5-7b-chat-1m/bert4torch_config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "internlm2",
3
+ "bias": false,
4
+ "bos_token_id": 1,
5
+ "eos_token_id": 2,
6
+ "pad_token_id": 2,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 4096,
9
+ "initializer_range": 0.02,
10
+ "intermediate_size": 14336,
11
+ "max_position_embeddings": 262144,
12
+ "num_attention_heads": 32,
13
+ "num_hidden_layers": 32,
14
+ "num_key_value_heads": 8,
15
+ "rms_norm_eps": 1e-05,
16
+ "rope_scaling": {
17
+ "type": "dynamic",
18
+ "factor": 2.5
19
+ },
20
+ "rope_theta": 50000000,
21
+ "tie_word_embeddings": false,
22
+ "torch_dtype": "bfloat16",
23
+ "vocab_size": 92544,
24
+ "segment_vocab_size": 0,
25
+ "skip_init": true,
26
+ "rope_rank": "updown",
27
+ "generation_config": {"tokenizer_config": {"skip_special_tokens": true}, "end_id": [2, 92542], "max_length": 262144}
28
+ }
internlm2_5-7b-chat/bert4torch_config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "internlm2",
3
+ "bias": false,
4
+ "bos_token_id": 1,
5
+ "eos_token_id": 2,
6
+ "pad_token_id": 2,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 4096,
9
+ "initializer_range": 0.02,
10
+ "intermediate_size": 14336,
11
+ "max_position_embeddings": 32768,
12
+ "num_attention_heads": 32,
13
+ "num_hidden_layers": 32,
14
+ "num_key_value_heads": 8,
15
+ "rms_norm_eps": 1e-05,
16
+ "rope_scaling": {
17
+ "type": "dynamic",
18
+ "factor": 2.0
19
+ },
20
+ "rope_theta": 1000000,
21
+ "tie_word_embeddings": false,
22
+ "torch_dtype": "bfloat16",
23
+ "vocab_size": 92544,
24
+ "segment_vocab_size": 0,
25
+ "skip_init": true,
26
+ "rope_rank": "updown",
27
+ "generation_config": {"tokenizer_config": {"skip_special_tokens": true}, "end_id": [2, 92542], "max_length": 32768}
28
+ }
internlm2_5-7b/bert4torch_config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "internlm2",
3
+ "bias": false,
4
+ "bos_token_id": 1,
5
+ "eos_token_id": 2,
6
+ "pad_token_id": 2,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 4096,
9
+ "initializer_range": 0.02,
10
+ "intermediate_size": 14336,
11
+ "max_position_embeddings": 32768,
12
+ "num_attention_heads": 32,
13
+ "num_hidden_layers": 32,
14
+ "num_key_value_heads": 8,
15
+ "rms_norm_eps": 1e-05,
16
+ "rope_scaling": {
17
+ "type": "dynamic",
18
+ "factor": 2.0
19
+ },
20
+ "rope_theta": 1000000,
21
+ "tie_word_embeddings": false,
22
+ "torch_dtype": "bfloat16",
23
+ "vocab_size": 92544,
24
+ "segment_vocab_size": 0,
25
+ "skip_init": true,
26
+ "rope_rank": "updown",
27
+ "generation_config": {"tokenizer_config": {"skip_special_tokens": true}, "end_id": [2, 92542], "max_length": 32768}
28
+ }