rahulbaburajan commited on
Commit
25b563f
1 Parent(s): e8fbe27

Upload GPTNeoForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +3 -3
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "EleutherAI/gpt-neo-125M",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPTNeoForCausalLM"
@@ -47,8 +47,8 @@
47
  "summary_type": "cls_index",
48
  "summary_use_proj": true,
49
  "torch_dtype": "float32",
50
- "transformers_version": "4.21.0.dev0",
51
  "use_cache": true,
52
- "vocab_size": 50262,
53
  "window_size": 256
54
  }
 
1
  {
2
+ "_name_or_path": "../saved_models/gptNEO_author_VJ_epochs15_lr5e-05/checkpoint-25050",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPTNeoForCausalLM"
 
47
  "summary_type": "cls_index",
48
  "summary_use_proj": true,
49
  "torch_dtype": "float32",
50
+ "transformers_version": "4.24.0",
51
  "use_cache": true,
52
+ "vocab_size": 50259,
53
  "window_size": 256
54
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4a2f2b8f68847831003bc086095207293276f4b4e5d6a187ff52de6e0d094f20
3
- size 551200465
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa28bcf8d05aad6e61820b2a4beb73cc7b50ebd0c74d1aeace3d8442d3a71e7e
3
+ size 551190381