muhtasham commited on
Commit
cd79bfb
1 Parent(s): fdb7db8

Update config.json

Browse files

Added this to not return LM head in MLX

Files changed (1) hide show
  1. config.json +1 -0
config.json CHANGED
@@ -24,6 +24,7 @@
24
  "scale_attention_softmax_in_fp32": true,
25
  "scale_attn_weights": true,
26
  "sliding_window": 4096,
 
27
  "torch_dtype": "bfloat16",
28
  "transformers_version": "4.37.0.dev0",
29
  "use_bias": true,
 
24
  "scale_attention_softmax_in_fp32": true,
25
  "scale_attn_weights": true,
26
  "sliding_window": 4096,
27
+ "tie_word_embeddings": true,
28
  "torch_dtype": "bfloat16",
29
  "transformers_version": "4.37.0.dev0",
30
  "use_bias": true,