deepparag commited on
Commit
7c6a6de
1 Parent(s): c437ef4
Files changed (2) hide show
  1. config.json +26 -6
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -1,11 +1,23 @@
1
  {
2
- "_name_or_path": "output-small",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPTNeoForCausalLM"
6
  ],
7
  "attention_dropout": 0,
8
  "attention_layers": [
 
 
 
 
 
 
 
 
 
 
 
 
9
  "global",
10
  "local",
11
  "global",
@@ -25,29 +37,37 @@
25
  "global",
26
  "local"
27
  ],
28
- 6
29
  ]
30
  ],
31
  "bos_token_id": 50256,
32
  "embed_dropout": 0,
33
  "eos_token_id": 50256,
34
  "gradient_checkpointing": false,
35
- "hidden_size": 768,
36
  "initializer_range": 0.02,
37
  "intermediate_size": null,
38
  "layer_norm_epsilon": 1e-05,
39
  "max_position_embeddings": 2048,
40
  "model_type": "gpt_neo",
41
- "num_heads": 12,
42
- "num_layers": 12,
43
  "resid_dropout": 0,
44
  "summary_activation": null,
45
  "summary_first_dropout": 0.1,
46
  "summary_proj_to_labels": true,
47
  "summary_type": "cls_index",
48
  "summary_use_proj": true,
 
 
 
 
 
 
 
 
49
  "torch_dtype": "float32",
50
- "transformers_version": "4.14.1",
51
  "use_cache": true,
52
  "vocab_size": 50257,
53
  "window_size": 256
 
1
  {
2
+ "_name_or_path": "EleutherAI/gpt-neo-1.3B",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPTNeoForCausalLM"
6
  ],
7
  "attention_dropout": 0,
8
  "attention_layers": [
9
+ "global",
10
+ "local",
11
+ "global",
12
+ "local",
13
+ "global",
14
+ "local",
15
+ "global",
16
+ "local",
17
+ "global",
18
+ "local",
19
+ "global",
20
+ "local",
21
  "global",
22
  "local",
23
  "global",
 
37
  "global",
38
  "local"
39
  ],
40
+ 12
41
  ]
42
  ],
43
  "bos_token_id": 50256,
44
  "embed_dropout": 0,
45
  "eos_token_id": 50256,
46
  "gradient_checkpointing": false,
47
+ "hidden_size": 2048,
48
  "initializer_range": 0.02,
49
  "intermediate_size": null,
50
  "layer_norm_epsilon": 1e-05,
51
  "max_position_embeddings": 2048,
52
  "model_type": "gpt_neo",
53
+ "num_heads": 16,
54
+ "num_layers": 24,
55
  "resid_dropout": 0,
56
  "summary_activation": null,
57
  "summary_first_dropout": 0.1,
58
  "summary_proj_to_labels": true,
59
  "summary_type": "cls_index",
60
  "summary_use_proj": true,
61
+ "task_specific_params": {
62
+ "text-generation": {
63
+ "do_sample": true,
64
+ "max_length": 50,
65
+ "temperature": 0.9
66
+ }
67
+ },
68
+ "tokenizer_class": "GPT2Tokenizer",
69
  "torch_dtype": "float32",
70
+ "transformers_version": "4.15.0",
71
  "use_cache": true,
72
  "vocab_size": 50257,
73
  "window_size": 256
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:68b95c233fb1a420c15566869f3d06ba4fbb681b2e78fb855c72caaaaaa4ccaf
3
- size 551192465
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e9ad5cb419a610e647b85d5aa0163ebf909f61a11fb21170e0d5fddd52859c9
3
+ size 5363100545