minosu commited on
Commit
c8cdf3f
1 Parent(s): 38bf654

Upload 7 files

Browse files
added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "[PAD]": 49152
3
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
special_tokens_map.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|endoftext|>",
4
+ "<fim_prefix>",
5
+ "<fim_middle>",
6
+ "<fim_suffix>",
7
+ "<fim_pad>",
8
+ "<filename>",
9
+ "<gh_stars>",
10
+ "<issue_start>",
11
+ "<issue_comment>",
12
+ "<issue_closed>",
13
+ "<jupyter_start>",
14
+ "<jupyter_text>",
15
+ "<jupyter_code>",
16
+ "<jupyter_output>",
17
+ "<empty_output>",
18
+ "<commit_before>",
19
+ "<commit_msg>",
20
+ "<commit_after>",
21
+ "<reponame>"
22
+ ],
23
+ "bos_token": "<|endoftext|>",
24
+ "eos_token": "<|endoftext|>",
25
+ "pad_token": "[PAD]",
26
+ "unk_token": "<|endoftext|>"
27
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "additional_special_tokens": [
5
+ "<|endoftext|>",
6
+ "<fim_prefix>",
7
+ "<fim_middle>",
8
+ "<fim_suffix>",
9
+ "<fim_pad>",
10
+ "<filename>",
11
+ "<gh_stars>",
12
+ "<issue_start>",
13
+ "<issue_comment>",
14
+ "<issue_closed>",
15
+ "<jupyter_start>",
16
+ "<jupyter_text>",
17
+ "<jupyter_code>",
18
+ "<jupyter_output>",
19
+ "<empty_output>",
20
+ "<commit_before>",
21
+ "<commit_msg>",
22
+ "<commit_after>",
23
+ "<reponame>"
24
+ ],
25
+ "bos_token": {
26
+ "__type": "AddedToken",
27
+ "content": "<|endoftext|>",
28
+ "lstrip": false,
29
+ "normalized": true,
30
+ "rstrip": false,
31
+ "single_word": false
32
+ },
33
+ "clean_up_tokenization_spaces": true,
34
+ "eos_token": {
35
+ "__type": "AddedToken",
36
+ "content": "<|endoftext|>",
37
+ "lstrip": false,
38
+ "normalized": true,
39
+ "rstrip": false,
40
+ "single_word": false
41
+ },
42
+ "errors": "replace",
43
+ "model_max_length": 512,
44
+ "pad_token": null,
45
+ "padding_side": "right",
46
+ "tokenizer_class": "GPT2Tokenizer",
47
+ "unk_token": {
48
+ "__type": "AddedToken",
49
+ "content": "<|endoftext|>",
50
+ "lstrip": false,
51
+ "normalized": true,
52
+ "rstrip": false,
53
+ "single_word": false
54
+ },
55
+ "vocab_size": 49152
56
+ }
trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75c93ccc6866dcde59068e14369a29a5392ffccbd45dcfbc21e0d5a3a9bd152d
3
+ size 4091
vocab.json ADDED
The diff for this file is too large to render. See raw diff