w601sxs commited on
Commit
44d97c7
·
verified ·
1 Parent(s): 4446381

Upload GPTNeoXForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. model.safetensors +2 -2
config.json CHANGED
@@ -22,7 +22,7 @@
22
  "rotary_emb_base": 10000,
23
  "rotary_pct": 0.25,
24
  "tie_word_embeddings": false,
25
- "torch_dtype": "bfloat16",
26
  "transformers_version": "4.37.2",
27
  "use_cache": true,
28
  "use_parallel_residual": true,
 
22
  "rotary_emb_base": 10000,
23
  "rotary_pct": 0.25,
24
  "tie_word_embeddings": false,
25
+ "torch_dtype": "float32",
26
  "transformers_version": "4.37.2",
27
  "use_cache": true,
28
  "use_parallel_residual": true,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2c3918114d8b0889fd527ead5ed62df6deb07edacc6d4ea428230178529580fd
3
- size 2023586384
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7534b730f8b3d282d23392fa1e5d6a8b4204325ff99b78b1aa4ff9b8bd87548
3
+ size 4047149576