Enkhai commited on
Commit
4ccb150
1 Parent(s): 0a76fff

Delete epoch_2_most_recent

Browse files
epoch_2_most_recent/added_tokens.json DELETED
@@ -1,145 +0,0 @@
1
- {
2
- "<|extratoken_100|>": 50356,
3
- "<|extratoken_101|>": 50357,
4
- "<|extratoken_102|>": 50358,
5
- "<|extratoken_103|>": 50359,
6
- "<|extratoken_104|>": 50360,
7
- "<|extratoken_105|>": 50361,
8
- "<|extratoken_106|>": 50362,
9
- "<|extratoken_107|>": 50363,
10
- "<|extratoken_108|>": 50364,
11
- "<|extratoken_109|>": 50365,
12
- "<|extratoken_10|>": 50266,
13
- "<|extratoken_110|>": 50366,
14
- "<|extratoken_111|>": 50367,
15
- "<|extratoken_112|>": 50368,
16
- "<|extratoken_113|>": 50369,
17
- "<|extratoken_114|>": 50370,
18
- "<|extratoken_115|>": 50371,
19
- "<|extratoken_116|>": 50372,
20
- "<|extratoken_117|>": 50373,
21
- "<|extratoken_118|>": 50374,
22
- "<|extratoken_119|>": 50375,
23
- "<|extratoken_11|>": 50267,
24
- "<|extratoken_120|>": 50376,
25
- "<|extratoken_121|>": 50377,
26
- "<|extratoken_122|>": 50378,
27
- "<|extratoken_123|>": 50379,
28
- "<|extratoken_124|>": 50380,
29
- "<|extratoken_125|>": 50381,
30
- "<|extratoken_126|>": 50382,
31
- "<|extratoken_127|>": 50383,
32
- "<|extratoken_128|>": 50384,
33
- "<|extratoken_129|>": 50385,
34
- "<|extratoken_12|>": 50268,
35
- "<|extratoken_130|>": 50386,
36
- "<|extratoken_131|>": 50387,
37
- "<|extratoken_132|>": 50388,
38
- "<|extratoken_133|>": 50389,
39
- "<|extratoken_134|>": 50390,
40
- "<|extratoken_135|>": 50391,
41
- "<|extratoken_136|>": 50392,
42
- "<|extratoken_137|>": 50393,
43
- "<|extratoken_138|>": 50394,
44
- "<|extratoken_139|>": 50395,
45
- "<|extratoken_13|>": 50269,
46
- "<|extratoken_140|>": 50396,
47
- "<|extratoken_141|>": 50397,
48
- "<|extratoken_142|>": 50398,
49
- "<|extratoken_143|>": 50399,
50
- "<|extratoken_14|>": 50270,
51
- "<|extratoken_15|>": 50271,
52
- "<|extratoken_16|>": 50272,
53
- "<|extratoken_17|>": 50273,
54
- "<|extratoken_18|>": 50274,
55
- "<|extratoken_19|>": 50275,
56
- "<|extratoken_1|>": 50257,
57
- "<|extratoken_20|>": 50276,
58
- "<|extratoken_21|>": 50277,
59
- "<|extratoken_22|>": 50278,
60
- "<|extratoken_23|>": 50279,
61
- "<|extratoken_24|>": 50280,
62
- "<|extratoken_25|>": 50281,
63
- "<|extratoken_26|>": 50282,
64
- "<|extratoken_27|>": 50283,
65
- "<|extratoken_28|>": 50284,
66
- "<|extratoken_29|>": 50285,
67
- "<|extratoken_2|>": 50258,
68
- "<|extratoken_30|>": 50286,
69
- "<|extratoken_31|>": 50287,
70
- "<|extratoken_32|>": 50288,
71
- "<|extratoken_33|>": 50289,
72
- "<|extratoken_34|>": 50290,
73
- "<|extratoken_35|>": 50291,
74
- "<|extratoken_36|>": 50292,
75
- "<|extratoken_37|>": 50293,
76
- "<|extratoken_38|>": 50294,
77
- "<|extratoken_39|>": 50295,
78
- "<|extratoken_3|>": 50259,
79
- "<|extratoken_40|>": 50296,
80
- "<|extratoken_41|>": 50297,
81
- "<|extratoken_42|>": 50298,
82
- "<|extratoken_43|>": 50299,
83
- "<|extratoken_44|>": 50300,
84
- "<|extratoken_45|>": 50301,
85
- "<|extratoken_46|>": 50302,
86
- "<|extratoken_47|>": 50303,
87
- "<|extratoken_48|>": 50304,
88
- "<|extratoken_49|>": 50305,
89
- "<|extratoken_4|>": 50260,
90
- "<|extratoken_50|>": 50306,
91
- "<|extratoken_51|>": 50307,
92
- "<|extratoken_52|>": 50308,
93
- "<|extratoken_53|>": 50309,
94
- "<|extratoken_54|>": 50310,
95
- "<|extratoken_55|>": 50311,
96
- "<|extratoken_56|>": 50312,
97
- "<|extratoken_57|>": 50313,
98
- "<|extratoken_58|>": 50314,
99
- "<|extratoken_59|>": 50315,
100
- "<|extratoken_5|>": 50261,
101
- "<|extratoken_60|>": 50316,
102
- "<|extratoken_61|>": 50317,
103
- "<|extratoken_62|>": 50318,
104
- "<|extratoken_63|>": 50319,
105
- "<|extratoken_64|>": 50320,
106
- "<|extratoken_65|>": 50321,
107
- "<|extratoken_66|>": 50322,
108
- "<|extratoken_67|>": 50323,
109
- "<|extratoken_68|>": 50324,
110
- "<|extratoken_69|>": 50325,
111
- "<|extratoken_6|>": 50262,
112
- "<|extratoken_70|>": 50326,
113
- "<|extratoken_71|>": 50327,
114
- "<|extratoken_72|>": 50328,
115
- "<|extratoken_73|>": 50329,
116
- "<|extratoken_74|>": 50330,
117
- "<|extratoken_75|>": 50331,
118
- "<|extratoken_76|>": 50332,
119
- "<|extratoken_77|>": 50333,
120
- "<|extratoken_78|>": 50334,
121
- "<|extratoken_79|>": 50335,
122
- "<|extratoken_7|>": 50263,
123
- "<|extratoken_80|>": 50336,
124
- "<|extratoken_81|>": 50337,
125
- "<|extratoken_82|>": 50338,
126
- "<|extratoken_83|>": 50339,
127
- "<|extratoken_84|>": 50340,
128
- "<|extratoken_85|>": 50341,
129
- "<|extratoken_86|>": 50342,
130
- "<|extratoken_87|>": 50343,
131
- "<|extratoken_88|>": 50344,
132
- "<|extratoken_89|>": 50345,
133
- "<|extratoken_8|>": 50264,
134
- "<|extratoken_90|>": 50346,
135
- "<|extratoken_91|>": 50347,
136
- "<|extratoken_92|>": 50348,
137
- "<|extratoken_93|>": 50349,
138
- "<|extratoken_94|>": 50350,
139
- "<|extratoken_95|>": 50351,
140
- "<|extratoken_96|>": 50352,
141
- "<|extratoken_97|>": 50353,
142
- "<|extratoken_98|>": 50354,
143
- "<|extratoken_99|>": 50355,
144
- "<|extratoken_9|>": 50265
145
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
epoch_2_most_recent/config.json DELETED
@@ -1,43 +0,0 @@
1
- {
2
- "_name_or_path": "hivemind/gpt-j-6B-8bit",
3
- "activation_function": "gelu_new",
4
- "architectures": [
5
- "GPTJForCausalLM"
6
- ],
7
- "attn_pdrop": 0.0,
8
- "bos_token_id": 50256,
9
- "eight_bit": true,
10
- "embd_pdrop": 0.0,
11
- "eos_token_id": 50256,
12
- "gradient_checkpointing": false,
13
- "initializer_range": 0.02,
14
- "layer_norm_epsilon": 1e-05,
15
- "model_type": "gptj",
16
- "n_embd": 4096,
17
- "n_head": 16,
18
- "n_inner": null,
19
- "n_layer": 28,
20
- "n_positions": 2048,
21
- "resid_pdrop": 0.0,
22
- "rotary": true,
23
- "rotary_dim": 64,
24
- "scale_attn_weights": true,
25
- "summary_activation": null,
26
- "summary_first_dropout": 0.1,
27
- "summary_proj_to_labels": true,
28
- "summary_type": "cls_index",
29
- "summary_use_proj": true,
30
- "task_specific_params": {
31
- "text-generation": {
32
- "do_sample": true,
33
- "max_length": 50,
34
- "temperature": 1.0
35
- }
36
- },
37
- "tie_word_embeddings": false,
38
- "tokenizer_class": "GPT2Tokenizer",
39
- "torch_dtype": "float32",
40
- "transformers_version": "4.20.1",
41
- "use_cache": true,
42
- "vocab_size": 50400
43
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
epoch_2_most_recent/merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
epoch_2_most_recent/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:8efc50339d959d9186094f63d67ac7669181dcd010d0206f4eaec06b656be6e0
3
- size 6316424352
 
 
 
 
epoch_2_most_recent/special_tokens_map.json DELETED
@@ -1,24 +0,0 @@
1
- {
2
- "bos_token": {
3
- "content": "<|endoftext|>",
4
- "lstrip": false,
5
- "normalized": true,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|endoftext|>",
11
- "lstrip": false,
12
- "normalized": true,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": "<|endoftext|>",
17
- "unk_token": {
18
- "content": "<|endoftext|>",
19
- "lstrip": false,
20
- "normalized": true,
21
- "rstrip": false,
22
- "single_word": false
23
- }
24
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
epoch_2_most_recent/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
epoch_2_most_recent/tokenizer_config.json DELETED
@@ -1,34 +0,0 @@
1
- {
2
- "add_bos_token": false,
3
- "add_prefix_space": false,
4
- "bos_token": {
5
- "__type": "AddedToken",
6
- "content": "<|endoftext|>",
7
- "lstrip": false,
8
- "normalized": true,
9
- "rstrip": false,
10
- "single_word": false
11
- },
12
- "eos_token": {
13
- "__type": "AddedToken",
14
- "content": "<|endoftext|>",
15
- "lstrip": false,
16
- "normalized": true,
17
- "rstrip": false,
18
- "single_word": false
19
- },
20
- "errors": "replace",
21
- "model_max_length": 2048,
22
- "name_or_path": "EleutherAI/gpt-j-6B",
23
- "pad_token": null,
24
- "special_tokens_map_file": null,
25
- "tokenizer_class": "GPT2Tokenizer",
26
- "unk_token": {
27
- "__type": "AddedToken",
28
- "content": "<|endoftext|>",
29
- "lstrip": false,
30
- "normalized": true,
31
- "rstrip": false,
32
- "single_word": false
33
- }
34
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
epoch_2_most_recent/vocab.json DELETED
The diff for this file is too large to render. See raw diff