ludziej commited on
Commit
1c83c91
1 Parent(s): 3f26068

End of training

Browse files
README.md CHANGED
@@ -13,7 +13,7 @@ should probably proofread and complete it, then remove this comment. -->
13
 
14
  This model is a fine-tuned version of [](https://huggingface.co/) on the None dataset.
15
  It achieves the following results on the evaluation set:
16
- - Loss: 0.0408
17
 
18
  ## Model description
19
 
@@ -32,7 +32,7 @@ More information needed
32
  ### Training hyperparameters
33
 
34
  The following hyperparameters were used during training:
35
- - learning_rate: 0.001
36
  - train_batch_size: 512
37
  - eval_batch_size: 512
38
  - seed: 42
@@ -44,56 +44,56 @@ The following hyperparameters were used during training:
44
 
45
  | Training Loss | Epoch | Step | Validation Loss |
46
  |:-------------:|:-----:|:----:|:---------------:|
47
- | 3.1232 | 1.0 | 5 | 2.3752 |
48
- | 2.1776 | 2.0 | 10 | 1.8879 |
49
- | 1.7349 | 3.0 | 15 | 1.4779 |
50
- | 1.3722 | 4.0 | 20 | 1.2288 |
51
- | 1.1464 | 5.0 | 25 | 1.0354 |
52
- | 0.995 | 6.0 | 30 | 0.9079 |
53
- | 0.8838 | 7.0 | 35 | 0.8040 |
54
- | 0.7827 | 8.0 | 40 | 0.7075 |
55
- | 0.7079 | 9.0 | 45 | 0.6453 |
56
- | 0.6523 | 10.0 | 50 | 0.5931 |
57
- | 0.6042 | 11.0 | 55 | 0.5485 |
58
- | 0.5548 | 12.0 | 60 | 0.5170 |
59
- | 0.5339 | 13.0 | 65 | 0.5695 |
60
- | 0.5492 | 14.0 | 70 | 0.4823 |
61
- | 0.4951 | 15.0 | 75 | 0.4626 |
62
- | 0.464 | 16.0 | 80 | 0.4308 |
63
- | 0.4377 | 17.0 | 85 | 0.3924 |
64
- | 0.4059 | 18.0 | 90 | 0.3690 |
65
- | 0.3782 | 19.0 | 95 | 0.3322 |
66
- | 0.3458 | 20.0 | 100 | 0.3135 |
67
- | 0.3307 | 21.0 | 105 | 0.2936 |
68
- | 0.2999 | 22.0 | 110 | 0.2577 |
69
- | 0.2739 | 23.0 | 115 | 0.2444 |
70
- | 0.2461 | 24.0 | 120 | 0.2236 |
71
- | 0.2264 | 25.0 | 125 | 0.1957 |
72
- | 0.2046 | 26.0 | 130 | 0.1637 |
73
- | 0.1819 | 27.0 | 135 | 0.1415 |
74
- | 0.16 | 28.0 | 140 | 0.1238 |
75
- | 0.1454 | 29.0 | 145 | 0.1092 |
76
- | 0.1297 | 30.0 | 150 | 0.0997 |
77
- | 0.1188 | 31.0 | 155 | 0.0876 |
78
- | 0.1105 | 32.0 | 160 | 0.0897 |
79
- | 0.1033 | 33.0 | 165 | 0.0779 |
80
- | 0.0941 | 34.0 | 170 | 0.0702 |
81
- | 0.0897 | 35.0 | 175 | 0.0656 |
82
- | 0.085 | 36.0 | 180 | 0.0634 |
83
- | 0.0796 | 37.0 | 185 | 0.0596 |
84
- | 0.0768 | 38.0 | 190 | 0.0560 |
85
- | 0.0728 | 39.0 | 195 | 0.0556 |
86
- | 0.0702 | 40.0 | 200 | 0.0516 |
87
- | 0.0662 | 41.0 | 205 | 0.0493 |
88
- | 0.063 | 42.0 | 210 | 0.0472 |
89
- | 0.0613 | 43.0 | 215 | 0.0459 |
90
- | 0.0617 | 44.0 | 220 | 0.0449 |
91
- | 0.0579 | 45.0 | 225 | 0.0425 |
92
- | 0.0556 | 46.0 | 230 | 0.0428 |
93
- | 0.0555 | 47.0 | 235 | 0.0413 |
94
- | 0.0536 | 48.0 | 240 | 0.0409 |
95
- | 0.053 | 49.0 | 245 | 0.0409 |
96
- | 0.0532 | 50.0 | 250 | 0.0408 |
97
 
98
 
99
  ### Framework versions
 
13
 
14
  This model is a fine-tuned version of [](https://huggingface.co/) on the None dataset.
15
  It achieves the following results on the evaluation set:
16
+ - Loss: 1.0813
17
 
18
  ## Model description
19
 
 
32
  ### Training hyperparameters
33
 
34
  The following hyperparameters were used during training:
35
+ - learning_rate: 0.0001
36
  - train_batch_size: 512
37
  - eval_batch_size: 512
38
  - seed: 42
 
44
 
45
  | Training Loss | Epoch | Step | Validation Loss |
46
  |:-------------:|:-----:|:----:|:---------------:|
47
+ | 3.8078 | 1.0 | 6 | 3.6115 |
48
+ | 3.5402 | 2.0 | 12 | 3.4403 |
49
+ | 3.3905 | 3.0 | 18 | 3.3023 |
50
+ | 3.2601 | 4.0 | 24 | 3.1757 |
51
+ | 3.1298 | 5.0 | 30 | 3.0465 |
52
+ | 2.9919 | 6.0 | 36 | 2.9159 |
53
+ | 2.8647 | 7.0 | 42 | 2.7868 |
54
+ | 2.7503 | 8.0 | 48 | 2.6616 |
55
+ | 2.6207 | 9.0 | 54 | 2.5386 |
56
+ | 2.4973 | 10.0 | 60 | 2.4256 |
57
+ | 2.3944 | 11.0 | 66 | 2.3203 |
58
+ | 2.2924 | 12.0 | 72 | 2.2263 |
59
+ | 2.2061 | 13.0 | 78 | 2.1487 |
60
+ | 2.117 | 14.0 | 84 | 2.0624 |
61
+ | 2.044 | 15.0 | 90 | 1.9910 |
62
+ | 1.9718 | 16.0 | 96 | 1.9239 |
63
+ | 1.9093 | 17.0 | 102 | 1.8786 |
64
+ | 1.8542 | 18.0 | 108 | 1.8129 |
65
+ | 1.8085 | 19.0 | 114 | 1.7692 |
66
+ | 1.7653 | 20.0 | 120 | 1.7316 |
67
+ | 1.7103 | 21.0 | 126 | 1.6790 |
68
+ | 1.6757 | 22.0 | 132 | 1.6199 |
69
+ | 1.6089 | 23.0 | 138 | 1.5592 |
70
+ | 1.5391 | 24.0 | 144 | 1.5067 |
71
+ | 1.4987 | 25.0 | 150 | 1.4640 |
72
+ | 1.4535 | 26.0 | 156 | 1.4296 |
73
+ | 1.4285 | 27.0 | 162 | 1.3858 |
74
+ | 1.3828 | 28.0 | 168 | 1.3493 |
75
+ | 1.3468 | 29.0 | 174 | 1.3184 |
76
+ | 1.3265 | 30.0 | 180 | 1.2910 |
77
+ | 1.2953 | 31.0 | 186 | 1.2636 |
78
+ | 1.2804 | 32.0 | 192 | 1.2402 |
79
+ | 1.2522 | 33.0 | 198 | 1.2223 |
80
+ | 1.2375 | 34.0 | 204 | 1.2094 |
81
+ | 1.219 | 35.0 | 210 | 1.1914 |
82
+ | 1.2133 | 36.0 | 216 | 1.1762 |
83
+ | 1.19 | 37.0 | 222 | 1.1606 |
84
+ | 1.1839 | 38.0 | 228 | 1.1532 |
85
+ | 1.1737 | 39.0 | 234 | 1.1380 |
86
+ | 1.1635 | 40.0 | 240 | 1.1267 |
87
+ | 1.1496 | 41.0 | 246 | 1.1219 |
88
+ | 1.1514 | 42.0 | 252 | 1.1104 |
89
+ | 1.1285 | 43.0 | 258 | 1.1056 |
90
+ | 1.1367 | 44.0 | 264 | 1.0976 |
91
+ | 1.1232 | 45.0 | 270 | 1.0949 |
92
+ | 1.1185 | 46.0 | 276 | 1.0896 |
93
+ | 1.1155 | 47.0 | 282 | 1.0836 |
94
+ | 1.1053 | 48.0 | 288 | 1.0834 |
95
+ | 1.1071 | 49.0 | 294 | 1.0823 |
96
+ | 1.1132 | 50.0 | 300 | 1.0813 |
97
 
98
 
99
  ### Framework versions
config.json CHANGED
@@ -78,7 +78,7 @@
78
  "typical_p": 1.0,
79
  "use_bfloat16": false,
80
  "use_cache": true,
81
- "vocab_size": 105
82
  },
83
  "decoder_start_token_id": 2,
84
  "encoder": {
@@ -157,7 +157,7 @@
157
  "typical_p": 1.0,
158
  "use_bfloat16": false,
159
  "use_cache": true,
160
- "vocab_size": 105
161
  },
162
  "eos_token_id": 0,
163
  "is_encoder_decoder": true,
 
78
  "typical_p": 1.0,
79
  "use_bfloat16": false,
80
  "use_cache": true,
81
+ "vocab_size": 52
82
  },
83
  "decoder_start_token_id": 2,
84
  "encoder": {
 
157
  "typical_p": 1.0,
158
  "use_bfloat16": false,
159
  "use_cache": true,
160
+ "vocab_size": 52
161
  },
162
  "eos_token_id": 0,
163
  "is_encoder_decoder": true,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:745100c3aba7304b571ebece470b3972643f144bcb1fa9d4fc321a438645ffa9
3
- size 31314308
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da4f203a50465bbaf1babcff0cb9459252480321b46aa69609402a6d6f466c22
3
+ size 31205552
runs/Feb28_17-27-44_3897ec21fae5/events.out.tfevents.1709141265.3897ec21fae5.75144.2 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:64e2a55b6c903519c748e5da9e880db2dd59ab83da49914d3b8a2e29f735125e
3
- size 23252
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17ad020588818cc291fc63c1b1f289c36fadd200cf2b3ec0b9377ba7ee029b94
3
+ size 30026
runs/Feb28_17-51-03_3897ec21fae5/events.out.tfevents.1709142663.3897ec21fae5.81816.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:25b8f65db3c899d839e9f3b2588506eab7eba12daa111d59e39fd50fce8e1d22
3
+ size 21928
runs/Feb28_18-13-19_3897ec21fae5/events.out.tfevents.1709144000.3897ec21fae5.81816.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b89e06fd11471306f0f3d62cc64f3f1eaa12eceeff2086ed0623e5f2fc6e7d82
3
+ size 29928
tokenizer.json CHANGED
@@ -101,198 +101,91 @@
101
  "[CLS]": 2,
102
  "[PAD]": 3,
103
  "+": 4,
104
- "0": 5,
105
- "1": 6,
106
- "2": 7,
107
- "3": 8,
108
- "4": 9,
109
- "5": 10,
110
- "6": 11,
111
- "7": 12,
112
- "8": 13,
113
- "9": 14,
114
- "50": 15,
115
- "93": 16,
116
- "11": 17,
117
- "60": 18,
118
- "19": 19,
119
- "21": 20,
120
- "33": 21,
121
- "36": 22,
122
- "66": 23,
123
- "88": 24,
124
- "12": 25,
125
- "17": 26,
126
- "81": 27,
127
- "90": 28,
128
  "16": 29,
129
- "39": 30,
130
- "55": 31,
131
- "65": 32,
132
- "79": 33,
133
- "87": 34,
134
- "95": 35,
135
- "10": 36,
136
- "18": 37,
137
- "25": 38,
138
- "32": 39,
139
- "37": 40,
140
- "53": 41,
141
- "54": 42,
142
- "57": 43,
143
- "59": 44,
144
- "71": 45,
145
- "72": 46,
146
- "75": 47,
147
- "85": 48,
148
- "89": 49,
149
- "91": 50,
150
- "22": 51,
151
- "24": 52,
152
- "28": 53,
153
- "35": 54,
154
- "46": 55,
155
- "64": 56,
156
- "69": 57,
157
- "78": 58,
158
- "83": 59,
159
- "84": 60,
160
- "92": 61,
161
- "94": 62,
162
- "26": 63,
163
- "27": 64,
164
- "29": 65,
165
- "30": 66,
166
- "47": 67,
167
- "49": 68,
168
- "51": 69,
169
- "58": 70,
170
- "68": 71,
171
- "73": 72,
172
- "96": 73,
173
- "13": 74,
174
- "20": 75,
175
- "23": 76,
176
- "40": 77,
177
- "61": 78,
178
- "70": 79,
179
- "82": 80,
180
- "38": 81,
181
- "74": 82,
182
- "80": 83,
183
- "98": 84,
184
- "14": 85,
185
- "41": 86,
186
- "45": 87,
187
- "52": 88,
188
- "62": 89,
189
- "63": 90,
190
- "77": 91,
191
- "86": 92,
192
- "31": 93,
193
- "34": 94,
194
- "42": 95,
195
- "43": 96,
196
- "48": 97,
197
- "76": 98,
198
- "99": 99,
199
- "15": 100,
200
- "56": 101,
201
- "97": 102,
202
- "44": 103,
203
- "67": 104
204
  },
205
  "merges": [
206
- "5 0",
207
- "9 3",
 
208
  "1 1",
209
- "6 0",
210
- "1 9",
211
- "2 1",
212
- "3 3",
213
- "3 6",
214
- "6 6",
215
- "8 8",
216
  "1 2",
217
- "1 7",
218
- "8 1",
219
- "9 0",
220
- "1 6",
221
- "3 9",
222
- "5 5",
223
- "6 5",
224
- "7 9",
225
- "8 7",
226
  "9 5",
227
- "1 0",
 
 
 
 
 
 
228
  "1 8",
229
- "2 5",
230
- "3 2",
231
- "3 7",
232
- "5 3",
233
- "5 4",
234
- "5 7",
235
- "5 9",
236
- "7 1",
237
- "7 2",
238
- "7 5",
239
- "8 5",
240
- "8 9",
241
  "9 1",
 
 
 
 
 
 
 
242
  "2 2",
 
 
 
243
  "2 4",
244
- "2 8",
245
- "3 5",
246
- "4 6",
247
- "6 4",
248
- "6 9",
249
- "7 8",
250
- "8 3",
251
  "8 4",
252
- "9 2",
253
- "9 4",
254
  "2 6",
255
  "2 7",
256
- "2 9",
257
- "3 0",
258
- "4 7",
259
- "4 9",
260
- "5 1",
261
- "5 8",
262
- "6 8",
263
- "7 3",
264
- "9 6",
265
- "1 3",
266
- "2 0",
267
- "2 3",
268
- "4 0",
269
- "6 1",
270
- "7 0",
271
- "8 2",
272
- "3 8",
273
- "7 4",
274
- "8 0",
275
- "9 8",
276
- "1 4",
277
- "4 1",
278
- "4 5",
279
- "5 2",
280
- "6 2",
281
- "6 3",
282
- "7 7",
283
- "8 6",
284
- "3 1",
285
- "3 4",
286
- "4 2",
287
- "4 3",
288
- "4 8",
289
- "7 6",
290
- "9 9",
291
- "1 5",
292
- "5 6",
293
- "9 7",
294
- "4 4",
295
- "6 7"
296
  ]
297
  }
298
  }
 
101
  "[CLS]": 2,
102
  "[PAD]": 3,
103
  "+": 4,
104
+ "-": 5,
105
+ "0": 6,
106
+ "1": 7,
107
+ "2": 8,
108
+ "3": 9,
109
+ "4": 10,
110
+ "5": 11,
111
+ "6": 12,
112
+ "7": 13,
113
+ "8": 14,
114
+ "9": 15,
115
+ "10": 16,
116
+ "99": 17,
117
+ "98": 18,
118
+ "11": 19,
119
+ "97": 20,
120
+ "12": 21,
121
+ "96": 22,
122
+ "13": 23,
123
+ "95": 24,
124
+ "14": 25,
125
+ "15": 26,
126
+ "94": 27,
127
+ "93": 28,
128
  "16": 29,
129
+ "17": 30,
130
+ "92": 31,
131
+ "18": 32,
132
+ "91": 33,
133
+ "90": 34,
134
+ "19": 35,
135
+ "20": 36,
136
+ "89": 37,
137
+ "21": 38,
138
+ "88": 39,
139
+ "87": 40,
140
+ "22": 41,
141
+ "23": 42,
142
+ "86": 43,
143
+ "85": 44,
144
+ "24": 45,
145
+ "25": 46,
146
+ "84": 47,
147
+ "83": 48,
148
+ "26": 49,
149
+ "27": 50,
150
+ "82": 51
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
151
  },
152
  "merges": [
153
+ "1 0",
154
+ "9 9",
155
+ "9 8",
156
  "1 1",
157
+ "9 7",
 
 
 
 
 
 
158
  "1 2",
159
+ "9 6",
160
+ "1 3",
 
 
 
 
 
 
 
161
  "9 5",
162
+ "1 4",
163
+ "1 5",
164
+ "9 4",
165
+ "9 3",
166
+ "1 6",
167
+ "1 7",
168
+ "9 2",
169
  "1 8",
 
 
 
 
 
 
 
 
 
 
 
 
170
  "9 1",
171
+ "9 0",
172
+ "1 9",
173
+ "2 0",
174
+ "8 9",
175
+ "2 1",
176
+ "8 8",
177
+ "8 7",
178
  "2 2",
179
+ "2 3",
180
+ "8 6",
181
+ "8 5",
182
  "2 4",
183
+ "2 5",
 
 
 
 
 
 
184
  "8 4",
185
+ "8 3",
 
186
  "2 6",
187
  "2 7",
188
+ "8 2"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
189
  ]
190
  }
191
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1d2815be01cb68f3bfcac2870192039600e01a9bffcea46d39dfbe9c9424bf80
3
  size 4920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d67f255dd0132ad0e26dff40d45f3b9dccbb9a1d04a4e4d270f7a3e6fa02c0ec
3
  size 4920