GiGi commited on
Commit
af35b32
1 Parent(s): bf9d050

new model gpt-2-medium

Browse files
Files changed (4) hide show
  1. added_tokens.json +1 -1
  2. config.json +11 -6
  3. pytorch_model.bin +2 -2
  4. tokenizer_config.json +1 -1
added_tokens.json CHANGED
@@ -1 +1 @@
1
- {"<|EOS|>": 50258, "<|PAD|>": 50260, "<|SEP|>": 50261, "<|BOS|>": 50257, "<|UNK|>": 50259}
 
1
+ {"<|BOS|>": 50257, "<|SEP|>": 50261, "<|PAD|>": 50260, "<|EOS|>": 50258, "<|UNK|>": 50259}
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "gpt2",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
@@ -8,18 +8,22 @@
8
  "bos_token_id": 50257,
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50258,
11
- "gradient_checkpointing": false,
12
  "initializer_range": 0.02,
13
  "layer_norm_epsilon": 1e-05,
14
  "model_type": "gpt2",
15
  "n_ctx": 1024,
16
- "n_embd": 768,
17
- "n_head": 12,
18
  "n_inner": null,
19
- "n_layer": 12,
20
  "n_positions": 1024,
 
21
  "pad_token_id": 50260,
 
 
22
  "resid_pdrop": 0.1,
 
 
23
  "sep_token_id": 50261,
24
  "summary_activation": null,
25
  "summary_first_dropout": 0.1,
@@ -32,7 +36,8 @@
32
  "max_length": 50
33
  }
34
  },
35
- "transformers_version": "4.5.1",
 
36
  "use_cache": true,
37
  "vocab_size": 50262
38
  }
 
1
  {
2
+ "_name_or_path": "gpt2-medium",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
 
8
  "bos_token_id": 50257,
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50258,
 
11
  "initializer_range": 0.02,
12
  "layer_norm_epsilon": 1e-05,
13
  "model_type": "gpt2",
14
  "n_ctx": 1024,
15
+ "n_embd": 1024,
16
+ "n_head": 16,
17
  "n_inner": null,
18
+ "n_layer": 24,
19
  "n_positions": 1024,
20
+ "n_special": 0,
21
  "pad_token_id": 50260,
22
+ "predict_special_tokens": true,
23
+ "reorder_and_upcast_attn": false,
24
  "resid_pdrop": 0.1,
25
+ "scale_attn_by_inverse_layer_idx": false,
26
+ "scale_attn_weights": true,
27
  "sep_token_id": 50261,
28
  "summary_activation": null,
29
  "summary_first_dropout": 0.1,
 
36
  "max_length": 50
37
  }
38
  },
39
+ "torch_dtype": "float32",
40
+ "transformers_version": "4.13.0.dev0",
41
  "use_cache": true,
42
  "vocab_size": 50262
43
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:660de74540a53ac7032a003526b675624e137b7e78bacbeefa14a7312b371ef4
3
- size 510423675
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ef249e0689873c1c19be0b64fc2df7ec0c02edc0397207fcab07027d8f82033
3
+ size 1444601791
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"unk_token": "<|endoftext|>", "bos_token": "<|endoftext|>", "eos_token": "<|endoftext|>", "add_prefix_space": false, "model_max_length": 1024, "special_tokens_map_file": null, "name_or_path": "gpt2", "tokenizer_class": "GPT2Tokenizer"}
 
1
+ {"unk_token": "<|endoftext|>", "bos_token": "<|endoftext|>", "eos_token": "<|endoftext|>", "add_prefix_space": false, "model_max_length": 1024, "special_tokens_map_file": null, "name_or_path": "gpt2-medium", "tokenizer_class": "GPT2Tokenizer"}