tombenj commited on
Commit
bdc08ec
1 Parent(s): d93bbc5

Add config scaffolding

Browse files
Files changed (5) hide show
  1. config.json +32 -0
  2. generation_config.json +6 -0
  3. merges.txt +0 -0
  4. tokenizer.json +0 -0
  5. vocab.json +0 -0
config.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_function": "gelu_new",
3
+ "architectures": [
4
+ "GPT2LMHeadModel"
5
+ ],
6
+ "attn_pdrop": 0.1,
7
+ "bos_token_id": 50256,
8
+ "embd_pdrop": 0.1,
9
+ "eos_token_id": 50256,
10
+ "initializer_range": 0.02,
11
+ "layer_norm_epsilon": 1e-05,
12
+ "model_type": "gpt2",
13
+ "n_ctx": 1024,
14
+ "n_embd": 1600,
15
+ "n_head": 25,
16
+ "n_layer": 48,
17
+ "n_positions": 1024,
18
+ "output_past": true,
19
+ "resid_pdrop": 0.1,
20
+ "summary_activation": null,
21
+ "summary_first_dropout": 0.1,
22
+ "summary_proj_to_labels": true,
23
+ "summary_type": "cls_index",
24
+ "summary_use_proj": true,
25
+ "task_specific_params": {
26
+ "text-generation": {
27
+ "do_sample": true,
28
+ "max_length": 50
29
+ }
30
+ },
31
+ "vocab_size": 50257
32
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 50256,
4
+ "eos_token_id": 50256,
5
+ "transformers_version": "4.27.0.dev0"
6
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
vocab.json ADDED
The diff for this file is too large to render. See raw diff