chuanli-lambda commited on
Commit
b0611d4
1 Parent(s): dce47ba

Upload GPTNeoXForCausalLM

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/home/ubuntu/ckpts/ft-synthetic-instruct-gptj-pairwise-pythia2.8b-8000",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
@@ -18,7 +18,7 @@
18
  "rotary_pct": 0.25,
19
  "tie_word_embeddings": false,
20
  "torch_dtype": "float32",
21
- "transformers_version": "4.26.1",
22
  "use_cache": true,
23
  "use_parallel_residual": true,
24
  "vocab_size": 50278
 
1
  {
2
+ "_name_or_path": "/home/ubuntu/llm/outputs/ft-synthetic-instruct-gptj-pairwise-pythia2.8b-deepspeed/resume/checkpoint-6000",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
 
18
  "rotary_pct": 0.25,
19
  "tie_word_embeddings": false,
20
  "torch_dtype": "float32",
21
+ "transformers_version": "4.25.1",
22
  "use_cache": true,
23
  "use_parallel_residual": true,
24
  "vocab_size": 50278
pytorch_model-00001-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:228e45ed145a638a8e724a5030099fa6db86c044137ff149995a0beffd15d805
3
  size 9977142462
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:099f9cb64691a8671f8d65795b2c9c3a07d4981397b7ae0a0ebdd5d4e732e704
3
  size 9977142462
pytorch_model-00002-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7e6bcbe2b9b0de9864711f9e398327c65e10c8139ee630a7190ce9b9db6e2aea
3
  size 1257547205
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e8668831277dbed461f2e9c583f725a06bc3a0874f6d8814d30708c99ff8378
3
  size 1257547205