mostafatarek4 commited on
Commit
becbc2a
1 Parent(s): 33e09be

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -5,7 +5,7 @@
5
  "intermediate_size": 11008,
6
  "num_attention_heads": 32,
7
  "num_hidden_layers": 32,
8
- "vocab_size": 32000,
9
  "max_position_embeddings": 2048,
10
  "rope_scaling": {
11
  "type": "dynamic",
 
5
  "intermediate_size": 11008,
6
  "num_attention_heads": 32,
7
  "num_hidden_layers": 32,
8
+ "vocab_size": 128000,
9
  "max_position_embeddings": 2048,
10
  "rope_scaling": {
11
  "type": "dynamic",