rahulbaburajan commited on
Commit
148e7fb
1 Parent(s): 308a7e9

Upload GPTNeoForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +3 -3
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "EleutherAI/gpt-neo-125M",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPTNeoForCausalLM"
@@ -47,8 +47,8 @@
47
  "summary_type": "cls_index",
48
  "summary_use_proj": true,
49
  "torch_dtype": "float32",
50
- "transformers_version": "4.21.0.dev0",
51
  "use_cache": true,
52
- "vocab_size": 50262,
53
  "window_size": 256
54
  }
 
1
  {
2
+ "_name_or_path": "../saved_models/gptNEO_author_RB_epochs15_lr5e-05/checkpoint-24105",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPTNeoForCausalLM"
 
47
  "summary_type": "cls_index",
48
  "summary_use_proj": true,
49
  "torch_dtype": "float32",
50
+ "transformers_version": "4.24.0",
51
  "use_cache": true,
52
+ "vocab_size": 50259,
53
  "window_size": 256
54
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e397ccef5f0abc6ca14585855cf89f67fce36a7bc1f9c46b11f87e31dcdb6eab
3
- size 551200465
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:111e4204b7fecbed573021129e3fbca0a7b2a0e300e93c81dee8bcd7984ba50b
3
+ size 551190381