alon-albalak commited on
Commit
29281bd
1 Parent(s): 28c2d0d

Upload model

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_name_or_path": "/local/home/alon_albalak/gpt-neox/outputs/1B_seqlen2048_ods_smoothed_mean_mixed_minibatches_original_weights_init_05smoothing_seed42/global_step100000/hf_model",
3
  "architectures": [
4
- "GPTNeoXForCausalLM"
5
  ],
6
  "bos_token_id": 0,
7
  "eos_token_id": 0,
@@ -17,7 +17,7 @@
17
  "rotary_emb_base": 10000,
18
  "rotary_pct": 0.25,
19
  "tie_word_embeddings": false,
20
- "torch_dtype": "float16",
21
  "transformers_version": "4.24.0",
22
  "use_cache": true,
23
  "use_parallel_residual": true,
 
1
  {
2
  "_name_or_path": "/local/home/alon_albalak/gpt-neox/outputs/1B_seqlen2048_ods_smoothed_mean_mixed_minibatches_original_weights_init_05smoothing_seed42/global_step100000/hf_model",
3
  "architectures": [
4
+ "GPTNeoXModel"
5
  ],
6
  "bos_token_id": 0,
7
  "eos_token_id": 0,
 
17
  "rotary_emb_base": 10000,
18
  "rotary_pct": 0.25,
19
  "tie_word_embeddings": false,
20
+ "torch_dtype": "float32",
21
  "transformers_version": "4.24.0",
22
  "use_cache": true,
23
  "use_parallel_residual": true,