muhtasham commited on
Commit
d523e22
1 Parent(s): f399777

Update config.json

Browse files

Added "tie_word_embeddings": false this is needed to return LM_HEAD layers in MLX

Files changed (1) hide show
  1. config.json +1 -0
config.json CHANGED
@@ -22,6 +22,7 @@
22
  "residual_dropout": 0.0,
23
  "rope_theta": 999999.4420358813,
24
  "sliding_window": 4096,
 
25
  "torch_dtype": "bfloat16",
26
  "transformers_version": "4.39.0.dev0",
27
  "use_bias": true,
 
22
  "residual_dropout": 0.0,
23
  "rope_theta": 999999.4420358813,
24
  "sliding_window": 4096,
25
+ "tie_word_embeddings": false,
26
  "torch_dtype": "bfloat16",
27
  "transformers_version": "4.39.0.dev0",
28
  "use_bias": true,