End of training
Browse files- README.md +204 -79
- model.safetensors +1 -1
- runs/Jul02_12-58-19_viridian/events.out.tfevents.1719925223.viridian.3874171.1 +3 -0
- runs/Jul02_13-00-44_viridian/events.out.tfevents.1719925248.viridian.3874171.2 +3 -0
- runs/Jul02_13-11-32_viridian/events.out.tfevents.1719925895.viridian.3874171.3 +3 -0
- runs/Jul02_13-11-56_viridian/events.out.tfevents.1719925926.viridian.3874171.4 +3 -0
- special_tokens_map.json +22 -3
- tokenizer_config.json +1 -0
- training_args.bin +1 -1
README.md
CHANGED
@@ -1,8 +1,8 @@
|
|
1 |
---
|
2 |
license: apache-2.0
|
|
|
3 |
tags:
|
4 |
- generated_from_trainer
|
5 |
-
base_model: distilgpt2
|
6 |
model-index:
|
7 |
- name: StatementOfWork_Generator_Omega_BS_1024_2
|
8 |
results: []
|
@@ -15,7 +15,7 @@ should probably proofread and complete it, then remove this comment. -->
|
|
15 |
|
16 |
This model is a fine-tuned version of [distilgpt2](https://huggingface.co/distilgpt2) on an unknown dataset.
|
17 |
It achieves the following results on the evaluation set:
|
18 |
-
- Loss:
|
19 |
|
20 |
## Model description
|
21 |
|
@@ -36,91 +36,216 @@ More information needed
|
|
36 |
The following hyperparameters were used during training:
|
37 |
- learning_rate: 2e-05
|
38 |
- train_batch_size: 10
|
39 |
-
- eval_batch_size:
|
40 |
- seed: 42
|
41 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
42 |
- lr_scheduler_type: linear
|
43 |
-
- num_epochs:
|
44 |
|
45 |
### Training results
|
46 |
|
47 |
| Training Loss | Epoch | Step | Validation Loss |
|
48 |
|:-------------:|:-----:|:----:|:---------------:|
|
49 |
-
| No log | 1.0 | 10 |
|
50 |
-
| No log | 2.0 | 20 |
|
51 |
-
| No log | 3.0 | 30 |
|
52 |
-
| No log | 4.0 | 40 |
|
53 |
-
| No log | 5.0 | 50 |
|
54 |
-
| No log | 6.0 | 60 |
|
55 |
-
| No log | 7.0 | 70 |
|
56 |
-
| No log | 8.0 | 80 |
|
57 |
-
| No log | 9.0 | 90 |
|
58 |
-
| No log | 10.0 | 100 |
|
59 |
-
| No log | 11.0 | 110 |
|
60 |
-
| No log | 12.0 | 120 |
|
61 |
-
| No log | 13.0 | 130 | 1.
|
62 |
-
| No log | 14.0 | 140 | 1.
|
63 |
-
| No log | 15.0 | 150 | 1.
|
64 |
-
| No log | 16.0 | 160 | 1.
|
65 |
-
| No log | 17.0 | 170 | 1.
|
66 |
-
| No log | 18.0 | 180 | 1.
|
67 |
-
| No log | 19.0 | 190 | 1.
|
68 |
-
| No log | 20.0 | 200 | 1.
|
69 |
-
| No log | 21.0 | 210 | 1.
|
70 |
-
| No log | 22.0 | 220 | 1.
|
71 |
-
| No log | 23.0 | 230 | 1.
|
72 |
-
| No log | 24.0 | 240 | 1.
|
73 |
-
| No log | 25.0 | 250 | 1.
|
74 |
-
| No log | 26.0 | 260 | 1.
|
75 |
-
| No log | 27.0 | 270 | 1.
|
76 |
-
| No log | 28.0 | 280 | 1.
|
77 |
-
| No log | 29.0 | 290 | 1.
|
78 |
-
| No log | 30.0 | 300 | 1.
|
79 |
-
| No log | 31.0 | 310 | 1.
|
80 |
-
| No log | 32.0 | 320 | 1.
|
81 |
-
| No log | 33.0 | 330 | 1.
|
82 |
-
| No log | 34.0 | 340 | 1.
|
83 |
-
| No log | 35.0 | 350 | 1.
|
84 |
-
| No log | 36.0 | 360 | 1.
|
85 |
-
| No log | 37.0 | 370 | 1.
|
86 |
-
| No log | 38.0 | 380 | 1.
|
87 |
-
| No log | 39.0 | 390 | 1.
|
88 |
-
| No log | 40.0 | 400 | 1.
|
89 |
-
| No log | 41.0 | 410 | 1.
|
90 |
-
| No log | 42.0 | 420 | 1.
|
91 |
-
| No log | 43.0 | 430 | 1.
|
92 |
-
| No log | 44.0 | 440 | 1.
|
93 |
-
| No log | 45.0 | 450 | 1.
|
94 |
-
| No log | 46.0 | 460 | 1.
|
95 |
-
| No log | 47.0 | 470 | 1.
|
96 |
-
| No log | 48.0 | 480 | 1.
|
97 |
-
| No log | 49.0 | 490 | 1.
|
98 |
-
|
|
99 |
-
|
|
100 |
-
|
|
101 |
-
|
|
102 |
-
|
|
103 |
-
|
|
104 |
-
|
|
105 |
-
|
|
106 |
-
|
|
107 |
-
|
|
108 |
-
|
|
109 |
-
|
|
110 |
-
|
|
111 |
-
|
|
112 |
-
|
|
113 |
-
|
|
114 |
-
|
|
115 |
-
|
|
116 |
-
|
|
117 |
-
|
|
118 |
-
|
|
119 |
-
|
|
120 |
-
|
|
121 |
-
|
|
122 |
-
|
|
123 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
124 |
|
125 |
|
126 |
### Framework versions
|
|
|
1 |
---
|
2 |
license: apache-2.0
|
3 |
+
base_model: distilgpt2
|
4 |
tags:
|
5 |
- generated_from_trainer
|
|
|
6 |
model-index:
|
7 |
- name: StatementOfWork_Generator_Omega_BS_1024_2
|
8 |
results: []
|
|
|
15 |
|
16 |
This model is a fine-tuned version of [distilgpt2](https://huggingface.co/distilgpt2) on an unknown dataset.
|
17 |
It achieves the following results on the evaluation set:
|
18 |
+
- Loss: 0.8335
|
19 |
|
20 |
## Model description
|
21 |
|
|
|
36 |
The following hyperparameters were used during training:
|
37 |
- learning_rate: 2e-05
|
38 |
- train_batch_size: 10
|
39 |
+
- eval_batch_size: 10
|
40 |
- seed: 42
|
41 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
42 |
- lr_scheduler_type: linear
|
43 |
+
- num_epochs: 200
|
44 |
|
45 |
### Training results
|
46 |
|
47 |
| Training Loss | Epoch | Step | Validation Loss |
|
48 |
|:-------------:|:-----:|:----:|:---------------:|
|
49 |
+
| No log | 1.0 | 10 | 1.3902 |
|
50 |
+
| No log | 2.0 | 20 | 1.3797 |
|
51 |
+
| No log | 3.0 | 30 | 1.3675 |
|
52 |
+
| No log | 4.0 | 40 | 1.3547 |
|
53 |
+
| No log | 5.0 | 50 | 1.3462 |
|
54 |
+
| No log | 6.0 | 60 | 1.3354 |
|
55 |
+
| No log | 7.0 | 70 | 1.3314 |
|
56 |
+
| No log | 8.0 | 80 | 1.3174 |
|
57 |
+
| No log | 9.0 | 90 | 1.3136 |
|
58 |
+
| No log | 10.0 | 100 | 1.3040 |
|
59 |
+
| No log | 11.0 | 110 | 1.2999 |
|
60 |
+
| No log | 12.0 | 120 | 1.2922 |
|
61 |
+
| No log | 13.0 | 130 | 1.2854 |
|
62 |
+
| No log | 14.0 | 140 | 1.2752 |
|
63 |
+
| No log | 15.0 | 150 | 1.2663 |
|
64 |
+
| No log | 16.0 | 160 | 1.2614 |
|
65 |
+
| No log | 17.0 | 170 | 1.2536 |
|
66 |
+
| No log | 18.0 | 180 | 1.2507 |
|
67 |
+
| No log | 19.0 | 190 | 1.2478 |
|
68 |
+
| No log | 20.0 | 200 | 1.2334 |
|
69 |
+
| No log | 21.0 | 210 | 1.2300 |
|
70 |
+
| No log | 22.0 | 220 | 1.2176 |
|
71 |
+
| No log | 23.0 | 230 | 1.2153 |
|
72 |
+
| No log | 24.0 | 240 | 1.2056 |
|
73 |
+
| No log | 25.0 | 250 | 1.1983 |
|
74 |
+
| No log | 26.0 | 260 | 1.1945 |
|
75 |
+
| No log | 27.0 | 270 | 1.1925 |
|
76 |
+
| No log | 28.0 | 280 | 1.1881 |
|
77 |
+
| No log | 29.0 | 290 | 1.1831 |
|
78 |
+
| No log | 30.0 | 300 | 1.1697 |
|
79 |
+
| No log | 31.0 | 310 | 1.1683 |
|
80 |
+
| No log | 32.0 | 320 | 1.1649 |
|
81 |
+
| No log | 33.0 | 330 | 1.1566 |
|
82 |
+
| No log | 34.0 | 340 | 1.1539 |
|
83 |
+
| No log | 35.0 | 350 | 1.1463 |
|
84 |
+
| No log | 36.0 | 360 | 1.1456 |
|
85 |
+
| No log | 37.0 | 370 | 1.1396 |
|
86 |
+
| No log | 38.0 | 380 | 1.1334 |
|
87 |
+
| No log | 39.0 | 390 | 1.1263 |
|
88 |
+
| No log | 40.0 | 400 | 1.1244 |
|
89 |
+
| No log | 41.0 | 410 | 1.1112 |
|
90 |
+
| No log | 42.0 | 420 | 1.1180 |
|
91 |
+
| No log | 43.0 | 430 | 1.1072 |
|
92 |
+
| No log | 44.0 | 440 | 1.1055 |
|
93 |
+
| No log | 45.0 | 450 | 1.0992 |
|
94 |
+
| No log | 46.0 | 460 | 1.0953 |
|
95 |
+
| No log | 47.0 | 470 | 1.0917 |
|
96 |
+
| No log | 48.0 | 480 | 1.0831 |
|
97 |
+
| No log | 49.0 | 490 | 1.0786 |
|
98 |
+
| 0.866 | 50.0 | 500 | 1.0810 |
|
99 |
+
| 0.866 | 51.0 | 510 | 1.0724 |
|
100 |
+
| 0.866 | 52.0 | 520 | 1.0696 |
|
101 |
+
| 0.866 | 53.0 | 530 | 1.0657 |
|
102 |
+
| 0.866 | 54.0 | 540 | 1.0618 |
|
103 |
+
| 0.866 | 55.0 | 550 | 1.0534 |
|
104 |
+
| 0.866 | 56.0 | 560 | 1.0492 |
|
105 |
+
| 0.866 | 57.0 | 570 | 1.0519 |
|
106 |
+
| 0.866 | 58.0 | 580 | 1.0433 |
|
107 |
+
| 0.866 | 59.0 | 590 | 1.0362 |
|
108 |
+
| 0.866 | 60.0 | 600 | 1.0369 |
|
109 |
+
| 0.866 | 61.0 | 610 | 1.0322 |
|
110 |
+
| 0.866 | 62.0 | 620 | 1.0263 |
|
111 |
+
| 0.866 | 63.0 | 630 | 1.0237 |
|
112 |
+
| 0.866 | 64.0 | 640 | 1.0205 |
|
113 |
+
| 0.866 | 65.0 | 650 | 1.0173 |
|
114 |
+
| 0.866 | 66.0 | 660 | 1.0158 |
|
115 |
+
| 0.866 | 67.0 | 670 | 1.0051 |
|
116 |
+
| 0.866 | 68.0 | 680 | 1.0100 |
|
117 |
+
| 0.866 | 69.0 | 690 | 1.0045 |
|
118 |
+
| 0.866 | 70.0 | 700 | 0.9997 |
|
119 |
+
| 0.866 | 71.0 | 710 | 0.9946 |
|
120 |
+
| 0.866 | 72.0 | 720 | 0.9924 |
|
121 |
+
| 0.866 | 73.0 | 730 | 0.9882 |
|
122 |
+
| 0.866 | 74.0 | 740 | 0.9867 |
|
123 |
+
| 0.866 | 75.0 | 750 | 0.9819 |
|
124 |
+
| 0.866 | 76.0 | 760 | 0.9802 |
|
125 |
+
| 0.866 | 77.0 | 770 | 0.9769 |
|
126 |
+
| 0.866 | 78.0 | 780 | 0.9756 |
|
127 |
+
| 0.866 | 79.0 | 790 | 0.9708 |
|
128 |
+
| 0.866 | 80.0 | 800 | 0.9636 |
|
129 |
+
| 0.866 | 81.0 | 810 | 0.9636 |
|
130 |
+
| 0.866 | 82.0 | 820 | 0.9627 |
|
131 |
+
| 0.866 | 83.0 | 830 | 0.9566 |
|
132 |
+
| 0.866 | 84.0 | 840 | 0.9556 |
|
133 |
+
| 0.866 | 85.0 | 850 | 0.9554 |
|
134 |
+
| 0.866 | 86.0 | 860 | 0.9531 |
|
135 |
+
| 0.866 | 87.0 | 870 | 0.9481 |
|
136 |
+
| 0.866 | 88.0 | 880 | 0.9408 |
|
137 |
+
| 0.866 | 89.0 | 890 | 0.9429 |
|
138 |
+
| 0.866 | 90.0 | 900 | 0.9396 |
|
139 |
+
| 0.866 | 91.0 | 910 | 0.9363 |
|
140 |
+
| 0.866 | 92.0 | 920 | 0.9359 |
|
141 |
+
| 0.866 | 93.0 | 930 | 0.9295 |
|
142 |
+
| 0.866 | 94.0 | 940 | 0.9287 |
|
143 |
+
| 0.866 | 95.0 | 950 | 0.9236 |
|
144 |
+
| 0.866 | 96.0 | 960 | 0.9227 |
|
145 |
+
| 0.866 | 97.0 | 970 | 0.9247 |
|
146 |
+
| 0.866 | 98.0 | 980 | 0.9198 |
|
147 |
+
| 0.866 | 99.0 | 990 | 0.9174 |
|
148 |
+
| 0.4896 | 100.0 | 1000 | 0.9162 |
|
149 |
+
| 0.4896 | 101.0 | 1010 | 0.9155 |
|
150 |
+
| 0.4896 | 102.0 | 1020 | 0.9153 |
|
151 |
+
| 0.4896 | 103.0 | 1030 | 0.9093 |
|
152 |
+
| 0.4896 | 104.0 | 1040 | 0.9073 |
|
153 |
+
| 0.4896 | 105.0 | 1050 | 0.9049 |
|
154 |
+
| 0.4896 | 106.0 | 1060 | 0.9009 |
|
155 |
+
| 0.4896 | 107.0 | 1070 | 0.9019 |
|
156 |
+
| 0.4896 | 108.0 | 1080 | 0.8991 |
|
157 |
+
| 0.4896 | 109.0 | 1090 | 0.8962 |
|
158 |
+
| 0.4896 | 110.0 | 1100 | 0.8956 |
|
159 |
+
| 0.4896 | 111.0 | 1110 | 0.8933 |
|
160 |
+
| 0.4896 | 112.0 | 1120 | 0.8945 |
|
161 |
+
| 0.4896 | 113.0 | 1130 | 0.8909 |
|
162 |
+
| 0.4896 | 114.0 | 1140 | 0.8895 |
|
163 |
+
| 0.4896 | 115.0 | 1150 | 0.8892 |
|
164 |
+
| 0.4896 | 116.0 | 1160 | 0.8849 |
|
165 |
+
| 0.4896 | 117.0 | 1170 | 0.8827 |
|
166 |
+
| 0.4896 | 118.0 | 1180 | 0.8820 |
|
167 |
+
| 0.4896 | 119.0 | 1190 | 0.8814 |
|
168 |
+
| 0.4896 | 120.0 | 1200 | 0.8812 |
|
169 |
+
| 0.4896 | 121.0 | 1210 | 0.8789 |
|
170 |
+
| 0.4896 | 122.0 | 1220 | 0.8792 |
|
171 |
+
| 0.4896 | 123.0 | 1230 | 0.8762 |
|
172 |
+
| 0.4896 | 124.0 | 1240 | 0.8782 |
|
173 |
+
| 0.4896 | 125.0 | 1250 | 0.8723 |
|
174 |
+
| 0.4896 | 126.0 | 1260 | 0.8717 |
|
175 |
+
| 0.4896 | 127.0 | 1270 | 0.8693 |
|
176 |
+
| 0.4896 | 128.0 | 1280 | 0.8714 |
|
177 |
+
| 0.4896 | 129.0 | 1290 | 0.8697 |
|
178 |
+
| 0.4896 | 130.0 | 1300 | 0.8679 |
|
179 |
+
| 0.4896 | 131.0 | 1310 | 0.8656 |
|
180 |
+
| 0.4896 | 132.0 | 1320 | 0.8655 |
|
181 |
+
| 0.4896 | 133.0 | 1330 | 0.8638 |
|
182 |
+
| 0.4896 | 134.0 | 1340 | 0.8646 |
|
183 |
+
| 0.4896 | 135.0 | 1350 | 0.8622 |
|
184 |
+
| 0.4896 | 136.0 | 1360 | 0.8635 |
|
185 |
+
| 0.4896 | 137.0 | 1370 | 0.8587 |
|
186 |
+
| 0.4896 | 138.0 | 1380 | 0.8591 |
|
187 |
+
| 0.4896 | 139.0 | 1390 | 0.8609 |
|
188 |
+
| 0.4896 | 140.0 | 1400 | 0.8597 |
|
189 |
+
| 0.4896 | 141.0 | 1410 | 0.8570 |
|
190 |
+
| 0.4896 | 142.0 | 1420 | 0.8588 |
|
191 |
+
| 0.4896 | 143.0 | 1430 | 0.8552 |
|
192 |
+
| 0.4896 | 144.0 | 1440 | 0.8572 |
|
193 |
+
| 0.4896 | 145.0 | 1450 | 0.8528 |
|
194 |
+
| 0.4896 | 146.0 | 1460 | 0.8526 |
|
195 |
+
| 0.4896 | 147.0 | 1470 | 0.8534 |
|
196 |
+
| 0.4896 | 148.0 | 1480 | 0.8514 |
|
197 |
+
| 0.4896 | 149.0 | 1490 | 0.8522 |
|
198 |
+
| 0.3365 | 150.0 | 1500 | 0.8528 |
|
199 |
+
| 0.3365 | 151.0 | 1510 | 0.8513 |
|
200 |
+
| 0.3365 | 152.0 | 1520 | 0.8493 |
|
201 |
+
| 0.3365 | 153.0 | 1530 | 0.8491 |
|
202 |
+
| 0.3365 | 154.0 | 1540 | 0.8498 |
|
203 |
+
| 0.3365 | 155.0 | 1550 | 0.8479 |
|
204 |
+
| 0.3365 | 156.0 | 1560 | 0.8478 |
|
205 |
+
| 0.3365 | 157.0 | 1570 | 0.8466 |
|
206 |
+
| 0.3365 | 158.0 | 1580 | 0.8438 |
|
207 |
+
| 0.3365 | 159.0 | 1590 | 0.8444 |
|
208 |
+
| 0.3365 | 160.0 | 1600 | 0.8442 |
|
209 |
+
| 0.3365 | 161.0 | 1610 | 0.8436 |
|
210 |
+
| 0.3365 | 162.0 | 1620 | 0.8434 |
|
211 |
+
| 0.3365 | 163.0 | 1630 | 0.8407 |
|
212 |
+
| 0.3365 | 164.0 | 1640 | 0.8417 |
|
213 |
+
| 0.3365 | 165.0 | 1650 | 0.8426 |
|
214 |
+
| 0.3365 | 166.0 | 1660 | 0.8406 |
|
215 |
+
| 0.3365 | 167.0 | 1670 | 0.8400 |
|
216 |
+
| 0.3365 | 168.0 | 1680 | 0.8405 |
|
217 |
+
| 0.3365 | 169.0 | 1690 | 0.8400 |
|
218 |
+
| 0.3365 | 170.0 | 1700 | 0.8386 |
|
219 |
+
| 0.3365 | 171.0 | 1710 | 0.8396 |
|
220 |
+
| 0.3365 | 172.0 | 1720 | 0.8384 |
|
221 |
+
| 0.3365 | 173.0 | 1730 | 0.8370 |
|
222 |
+
| 0.3365 | 174.0 | 1740 | 0.8374 |
|
223 |
+
| 0.3365 | 175.0 | 1750 | 0.8375 |
|
224 |
+
| 0.3365 | 176.0 | 1760 | 0.8381 |
|
225 |
+
| 0.3365 | 177.0 | 1770 | 0.8377 |
|
226 |
+
| 0.3365 | 178.0 | 1780 | 0.8370 |
|
227 |
+
| 0.3365 | 179.0 | 1790 | 0.8355 |
|
228 |
+
| 0.3365 | 180.0 | 1800 | 0.8354 |
|
229 |
+
| 0.3365 | 181.0 | 1810 | 0.8363 |
|
230 |
+
| 0.3365 | 182.0 | 1820 | 0.8351 |
|
231 |
+
| 0.3365 | 183.0 | 1830 | 0.8348 |
|
232 |
+
| 0.3365 | 184.0 | 1840 | 0.8352 |
|
233 |
+
| 0.3365 | 185.0 | 1850 | 0.8357 |
|
234 |
+
| 0.3365 | 186.0 | 1860 | 0.8356 |
|
235 |
+
| 0.3365 | 187.0 | 1870 | 0.8351 |
|
236 |
+
| 0.3365 | 188.0 | 1880 | 0.8344 |
|
237 |
+
| 0.3365 | 189.0 | 1890 | 0.8343 |
|
238 |
+
| 0.3365 | 190.0 | 1900 | 0.8340 |
|
239 |
+
| 0.3365 | 191.0 | 1910 | 0.8336 |
|
240 |
+
| 0.3365 | 192.0 | 1920 | 0.8332 |
|
241 |
+
| 0.3365 | 193.0 | 1930 | 0.8329 |
|
242 |
+
| 0.3365 | 194.0 | 1940 | 0.8328 |
|
243 |
+
| 0.3365 | 195.0 | 1950 | 0.8331 |
|
244 |
+
| 0.3365 | 196.0 | 1960 | 0.8333 |
|
245 |
+
| 0.3365 | 197.0 | 1970 | 0.8334 |
|
246 |
+
| 0.3365 | 198.0 | 1980 | 0.8335 |
|
247 |
+
| 0.3365 | 199.0 | 1990 | 0.8335 |
|
248 |
+
| 0.2854 | 200.0 | 2000 | 0.8335 |
|
249 |
|
250 |
|
251 |
### Framework versions
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 327657928
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2fa4e5e16e72e2785ebebe41e1c1a0ffe4a2f5933d730b0c06a13e064d6c4f3c
|
3 |
size 327657928
|
runs/Jul02_12-58-19_viridian/events.out.tfevents.1719925223.viridian.3874171.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4268b33138c1d5e2224d12bbec1634d391ccf581f2ff1184207bb61c32c42696
|
3 |
+
size 5079
|
runs/Jul02_13-00-44_viridian/events.out.tfevents.1719925248.viridian.3874171.2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2b67407e80186271d0bbac14784c00fc0326712ca361e23e200d03559fb6dd9a
|
3 |
+
size 5079
|
runs/Jul02_13-11-32_viridian/events.out.tfevents.1719925895.viridian.3874171.3
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:763e56b5ee91f6b4926ed842a95371becd992a389d30e8dec9e06c69a4066782
|
3 |
+
size 5079
|
runs/Jul02_13-11-56_viridian/events.out.tfevents.1719925926.viridian.3874171.4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:05f9c889e1ecda74d81be4e69e5a10a484b5acb5ed589120f310594c0e5cec1a
|
3 |
+
size 60415
|
special_tokens_map.json
CHANGED
@@ -1,5 +1,24 @@
|
|
1 |
{
|
2 |
-
"bos_token":
|
3 |
-
|
4 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
}
|
|
|
1 |
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<|endoftext|>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": true,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "<|endoftext|>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": true,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"pad_token": "<|endoftext|>",
|
17 |
+
"unk_token": {
|
18 |
+
"content": "<|endoftext|>",
|
19 |
+
"lstrip": false,
|
20 |
+
"normalized": true,
|
21 |
+
"rstrip": false,
|
22 |
+
"single_word": false
|
23 |
+
}
|
24 |
}
|
tokenizer_config.json
CHANGED
@@ -14,6 +14,7 @@
|
|
14 |
"clean_up_tokenization_spaces": true,
|
15 |
"eos_token": "<|endoftext|>",
|
16 |
"model_max_length": 1024,
|
|
|
17 |
"tokenizer_class": "GPT2Tokenizer",
|
18 |
"unk_token": "<|endoftext|>"
|
19 |
}
|
|
|
14 |
"clean_up_tokenization_spaces": true,
|
15 |
"eos_token": "<|endoftext|>",
|
16 |
"model_max_length": 1024,
|
17 |
+
"pad_token": "<|endoftext|>",
|
18 |
"tokenizer_class": "GPT2Tokenizer",
|
19 |
"unk_token": "<|endoftext|>"
|
20 |
}
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5048
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c8ad5c3f5dbda24f21458ae661a230c50b9cd10ffc77f2521dac8e57a948e5ed
|
3 |
size 5048
|