erikycd commited on
Commit
d74c0dd
1 Parent(s): 9c1bf4f
added_tokens.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "<bot>:": 50261,
3
+ "<endofstring>": 50260,
4
+ "<startofstring>": 50259,
5
+ "<|endoftext|>": 50257,
6
+ "<|pad|>": 50258,
7
+ "<|startoftext|>": 50256
8
+ }
config.json CHANGED
@@ -1,13 +1,14 @@
1
  {
2
- "_name_or_path": "microsoft/DialoGPT-small",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
6
  ],
7
- "attn_pdrop": 0.1,
8
  "bos_token_id": 50256,
9
- "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
 
11
  "initializer_range": 0.02,
12
  "layer_norm_epsilon": 1e-05,
13
  "model_type": "gpt2",
@@ -18,7 +19,7 @@
18
  "n_layer": 12,
19
  "n_positions": 1024,
20
  "reorder_and_upcast_attn": false,
21
- "resid_pdrop": 0.1,
22
  "scale_attn_by_inverse_layer_idx": false,
23
  "scale_attn_weights": true,
24
  "summary_activation": null,
@@ -27,12 +28,13 @@
27
  "summary_type": "cls_index",
28
  "summary_use_proj": true,
29
  "task_specific_params": {
30
- "conversational": {
31
- "max_length": 30
 
32
  }
33
  },
34
  "torch_dtype": "float32",
35
  "transformers_version": "4.20.1",
36
  "use_cache": true,
37
- "vocab_size": 50257
38
  }
1
  {
2
+ "_name_or_path": "C:/Users/erikycd/Documents/Python offline/GPT2_esp/model_flaxcom_gpt2",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
6
  ],
7
+ "attn_pdrop": 0.0,
8
  "bos_token_id": 50256,
9
+ "embd_pdrop": 0.0,
10
  "eos_token_id": 50256,
11
+ "gradient_checkpointing": false,
12
  "initializer_range": 0.02,
13
  "layer_norm_epsilon": 1e-05,
14
  "model_type": "gpt2",
19
  "n_layer": 12,
20
  "n_positions": 1024,
21
  "reorder_and_upcast_attn": false,
22
+ "resid_pdrop": 0.0,
23
  "scale_attn_by_inverse_layer_idx": false,
24
  "scale_attn_weights": true,
25
  "summary_activation": null,
28
  "summary_type": "cls_index",
29
  "summary_use_proj": true,
30
  "task_specific_params": {
31
+ "text-generation": {
32
+ "do_sample": true,
33
+ "max_length": 50
34
  }
35
  },
36
  "torch_dtype": "float32",
37
  "transformers_version": "4.20.1",
38
  "use_cache": true,
39
+ "vocab_size": 50262
40
  }
merges.txt CHANGED
The diff for this file is too large to render. See raw diff
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:52b315a2e10263aefe51a50a1fab882e1865de2b175ef83bfad34018622ae4ad
3
- size 510396521
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b0b3a48552dd486b496cd7b010a23565165e253783e9f91073ffa9475ed1af92
3
+ size 510411881
special_tokens_map.json CHANGED
@@ -1,23 +1,6 @@
1
  {
2
- "bos_token": {
3
- "content": "<|endoftext|>",
4
- "lstrip": false,
5
- "normalized": true,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|endoftext|>",
11
- "lstrip": false,
12
- "normalized": true,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "unk_token": {
17
- "content": "<|endoftext|>",
18
- "lstrip": false,
19
- "normalized": true,
20
- "rstrip": false,
21
- "single_word": false
22
- }
23
  }
1
  {
2
+ "bos_token": "<startofstring>",
3
+ "eos_token": "<endofstring>",
4
+ "pad_token": "<pad>",
5
+ "unk_token": "<|endoftext|>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
tokenizer_config.json CHANGED
@@ -1,34 +1,10 @@
1
  {
2
- "add_bos_token": false,
3
  "add_prefix_space": false,
4
- "bos_token": {
5
- "__type": "AddedToken",
6
- "content": "<|endoftext|>",
7
- "lstrip": false,
8
- "normalized": true,
9
- "rstrip": false,
10
- "single_word": false
11
- },
12
- "eos_token": {
13
- "__type": "AddedToken",
14
- "content": "<|endoftext|>",
15
- "lstrip": false,
16
- "normalized": true,
17
- "rstrip": false,
18
- "single_word": false
19
- },
20
- "errors": "replace",
21
- "model_max_length": 1024,
22
- "name_or_path": "microsoft/DialoGPT-small",
23
- "pad_token": null,
24
  "special_tokens_map_file": null,
25
  "tokenizer_class": "GPT2Tokenizer",
26
- "unk_token": {
27
- "__type": "AddedToken",
28
- "content": "<|endoftext|>",
29
- "lstrip": false,
30
- "normalized": true,
31
- "rstrip": false,
32
- "single_word": false
33
- }
34
  }
1
  {
 
2
  "add_prefix_space": false,
3
+ "bos_token": "<|startoftext|>",
4
+ "eos_token": "<|endoftext|>",
5
+ "name_or_path": "C:/Users/erikycd/Documents/Python offline/GPT2_esp/model_flaxcom_gpt2",
6
+ "pad_token": "<|pad|>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  "special_tokens_map_file": null,
8
  "tokenizer_class": "GPT2Tokenizer",
9
+ "unk_token": "<|endoftext|>"
 
 
 
 
 
 
 
10
  }
vocab.json CHANGED
The diff for this file is too large to render. See raw diff