GradientGuru commited on
Commit
f9d4d8d
1 Parent(s): 5310008

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -1
config.json CHANGED
@@ -24,5 +24,6 @@
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.29.2",
26
  "use_cache": true,
27
- "vocab_size": 125696
 
28
  }
 
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.29.2",
26
  "use_cache": true,
27
+ "vocab_size": 125696,
28
+ "z_loss_weight": 0
29
  }