Tongjilibo commited on
Commit
f4b7619
1 Parent(s): 12edfac

去除internlm2中skip_special_tokens

Browse files
internlm2-1_8b/bert4torch_config.json CHANGED
@@ -24,5 +24,5 @@
24
  "rope_rank": "updown",
25
  "torch_dtype": "float16",
26
  "vocab_size": 92544,
27
- "generation_config": {"tokenizer_config": {"skip_special_tokens": true}, "end_id": [2, 92542], "max_length": 32768}
28
  }
 
24
  "rope_rank": "updown",
25
  "torch_dtype": "float16",
26
  "vocab_size": 92544,
27
+ "generation_config": {"end_id": [2, 92542], "max_length": 32768}
28
  }
internlm2-7b/bert4torch_config.json CHANGED
@@ -24,5 +24,5 @@
24
  "segment_vocab_size": 0,
25
  "skip_init": true,
26
  "rope_rank": "updown",
27
- "generation_config": {"tokenizer_config": {"skip_special_tokens": true}, "end_id": [2, 92542], "max_length": 32768}
28
  }
 
24
  "segment_vocab_size": 0,
25
  "skip_init": true,
26
  "rope_rank": "updown",
27
+ "generation_config": {"end_id": [2, 92542], "max_length": 32768}
28
  }
internlm2-chat-1_8b/bert4torch_config.json CHANGED
@@ -24,5 +24,5 @@
24
  "rope_rank": "updown",
25
  "torch_dtype": "float16",
26
  "vocab_size": 92544,
27
- "generation_config": {"tokenizer_config": {"skip_special_tokens": true}, "end_id": [2, 92542], "max_length": 32768}
28
  }
 
24
  "rope_rank": "updown",
25
  "torch_dtype": "float16",
26
  "vocab_size": 92544,
27
+ "generation_config": {"end_id": [2, 92542], "max_length": 32768}
28
  }
internlm2-chat-7b/bert4torch_config.json CHANGED
@@ -24,5 +24,5 @@
24
  "segment_vocab_size": 0,
25
  "skip_init": true,
26
  "rope_rank": "updown",
27
- "generation_config": {"tokenizer_config": {"skip_special_tokens": true}, "end_id": [2, 92542], "max_length": 32768}
28
  }
 
24
  "segment_vocab_size": 0,
25
  "skip_init": true,
26
  "rope_rank": "updown",
27
+ "generation_config": {"end_id": [2, 92542], "max_length": 32768}
28
  }
internlm2_5-7b-chat-1m/bert4torch_config.json CHANGED
@@ -24,5 +24,5 @@
24
  "segment_vocab_size": 0,
25
  "skip_init": true,
26
  "rope_rank": "updown",
27
- "generation_config": {"tokenizer_config": {"skip_special_tokens": true}, "end_id": [2, 92542], "max_length": 262144}
28
  }
 
24
  "segment_vocab_size": 0,
25
  "skip_init": true,
26
  "rope_rank": "updown",
27
+ "generation_config": {"end_id": [2, 92542], "max_length": 262144}
28
  }
internlm2_5-7b-chat/bert4torch_config.json CHANGED
@@ -24,5 +24,5 @@
24
  "segment_vocab_size": 0,
25
  "skip_init": true,
26
  "rope_rank": "updown",
27
- "generation_config": {"tokenizer_config": {"skip_special_tokens": true}, "end_id": [2, 92542], "max_length": 32768}
28
  }
 
24
  "segment_vocab_size": 0,
25
  "skip_init": true,
26
  "rope_rank": "updown",
27
+ "generation_config": {"end_id": [2, 92542], "max_length": 32768}
28
  }
internlm2_5-7b/bert4torch_config.json CHANGED
@@ -24,5 +24,5 @@
24
  "segment_vocab_size": 0,
25
  "skip_init": true,
26
  "rope_rank": "updown",
27
- "generation_config": {"tokenizer_config": {"skip_special_tokens": true}, "end_id": [2, 92542], "max_length": 32768}
28
  }
 
24
  "segment_vocab_size": 0,
25
  "skip_init": true,
26
  "rope_rank": "updown",
27
+ "generation_config": {"end_id": [2, 92542], "max_length": 32768}
28
  }