Remove `torch_dtype` from config

#13
by hlky HF staff - opened
Files changed (1) hide show
  1. text_encoder/config.json +0 -1
text_encoder/config.json CHANGED
@@ -40,7 +40,6 @@
40
  "rmsnorm": true,
41
  "seq_length": 32768,
42
  "tie_word_embeddings": false,
43
- "torch_dtype": "float16",
44
  "transformers_version": "4.41.2",
45
  "use_cache": true,
46
  "vocab_size": 65024
 
40
  "rmsnorm": true,
41
  "seq_length": 32768,
42
  "tie_word_embeddings": false,
 
43
  "transformers_version": "4.41.2",
44
  "use_cache": true,
45
  "vocab_size": 65024