End of training
Browse files- README.md +270 -247
- adapter_config.json +5 -5
- adapter_model.safetensors +1 -1
- runs/Jun08_00-20-24_86a22f0778fe/events.out.tfevents.1717806035.86a22f0778fe.6252.0 +3 -0
- tokenizer.json +1 -0
- training_args.bin +2 -2
README.md
CHANGED
@@ -18,7 +18,7 @@ should probably proofread and complete it, then remove this comment. -->
|
|
18 |
|
19 |
This model is a fine-tuned version of [codellama/CodeLlama-7b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf) on the None dataset.
|
20 |
It achieves the following results on the evaluation set:
|
21 |
-
- Loss: 0.
|
22 |
|
23 |
## Model description
|
24 |
|
@@ -48,253 +48,276 @@ The following hyperparameters were used during training:
|
|
48 |
|
49 |
### Training results
|
50 |
|
51 |
-
| Training Loss | Epoch
|
52 |
-
|
53 |
-
| 0.
|
54 |
-
| 0.
|
55 |
-
| 0.
|
56 |
-
| 0.
|
57 |
-
| 0.
|
58 |
-
| 0.
|
59 |
-
| 0.
|
60 |
-
| 0.
|
61 |
-
| 0.
|
62 |
-
| 0.
|
63 |
-
| 0.
|
64 |
-
| 0.
|
65 |
-
| 0.
|
66 |
-
| 0.
|
67 |
-
| 0.
|
68 |
-
| 0.
|
69 |
-
| 0.
|
70 |
-
| 0.
|
71 |
-
| 0.
|
72 |
-
| 0.
|
73 |
-
| 0.
|
74 |
-
| 0.
|
75 |
-
| 0.
|
76 |
-
| 0.
|
77 |
-
| 0.
|
78 |
-
| 0.
|
79 |
-
| 0.
|
80 |
-
| 0.
|
81 |
-
| 0.
|
82 |
-
| 0.
|
83 |
-
| 0.
|
84 |
-
| 0.
|
85 |
-
| 0.
|
86 |
-
| 0.
|
87 |
-
| 0.
|
88 |
-
| 0.
|
89 |
-
| 0.
|
90 |
-
| 0.
|
91 |
-
| 0.
|
92 |
-
| 0.
|
93 |
-
| 0.
|
94 |
-
| 0.
|
95 |
-
| 0.
|
96 |
-
| 0.
|
97 |
-
| 0.
|
98 |
-
| 0.
|
99 |
-
| 0.
|
100 |
-
| 0.
|
101 |
-
| 0.
|
102 |
-
| 0.
|
103 |
-
| 0.
|
104 |
-
| 0.
|
105 |
-
| 0.
|
106 |
-
| 0.
|
107 |
-
| 0.
|
108 |
-
| 0.
|
109 |
-
| 0.
|
110 |
-
| 0.
|
111 |
-
| 0.
|
112 |
-
| 0.
|
113 |
-
| 0.
|
114 |
-
| 0.
|
115 |
-
| 0.
|
116 |
-
| 0.
|
117 |
-
| 0.
|
118 |
-
| 0.
|
119 |
-
| 0.
|
120 |
-
| 0.
|
121 |
-
| 0.
|
122 |
-
| 0.
|
123 |
-
| 0.
|
124 |
-
| 0.
|
125 |
-
| 0.
|
126 |
-
| 0.
|
127 |
-
| 0.
|
128 |
-
| 0.
|
129 |
-
| 0.
|
130 |
-
| 0.
|
131 |
-
| 0.
|
132 |
-
| 0.
|
133 |
-
| 0.
|
134 |
-
| 0.
|
135 |
-
| 0.
|
136 |
-
| 0.
|
137 |
-
| 0.
|
138 |
-
| 0.
|
139 |
-
| 0.
|
140 |
-
| 0.
|
141 |
-
| 0.
|
142 |
-
| 0.
|
143 |
-
| 0.
|
144 |
-
| 0.
|
145 |
-
| 0.
|
146 |
-
| 0.
|
147 |
-
| 0.
|
148 |
-
| 0.
|
149 |
-
| 0.
|
150 |
-
| 0.
|
151 |
-
| 0.
|
152 |
-
| 0.
|
153 |
-
| 0.
|
154 |
-
| 0.
|
155 |
-
| 0.
|
156 |
-
| 0.
|
157 |
-
| 0.
|
158 |
-
| 0.
|
159 |
-
| 0.
|
160 |
-
| 0.
|
161 |
-
| 0.
|
162 |
-
| 0.
|
163 |
-
| 0.
|
164 |
-
| 0.
|
165 |
-
| 0.
|
166 |
-
| 0.
|
167 |
-
| 0.
|
168 |
-
| 0.
|
169 |
-
| 0.
|
170 |
-
| 0.
|
171 |
-
| 0.
|
172 |
-
| 0.
|
173 |
-
| 0.
|
174 |
-
| 0.
|
175 |
-
| 0.
|
176 |
-
| 0.
|
177 |
-
| 0.
|
178 |
-
| 0.
|
179 |
-
| 0.
|
180 |
-
| 0.
|
181 |
-
| 0.
|
182 |
-
| 0.
|
183 |
-
| 0.
|
184 |
-
| 0.
|
185 |
-
| 0.
|
186 |
-
| 0.
|
187 |
-
| 0.
|
188 |
-
| 0.
|
189 |
-
| 0.
|
190 |
-
| 0.
|
191 |
-
| 0.
|
192 |
-
| 0.
|
193 |
-
| 0.
|
194 |
-
| 0.
|
195 |
-
| 0.
|
196 |
-
| 0.
|
197 |
-
| 0.
|
198 |
-
| 0.
|
199 |
-
| 0.
|
200 |
-
| 0.
|
201 |
-
| 0.
|
202 |
-
| 0.
|
203 |
-
| 0.
|
204 |
-
| 0.
|
205 |
-
| 0.
|
206 |
-
| 0.
|
207 |
-
| 0.
|
208 |
-
| 0.
|
209 |
-
| 0.
|
210 |
-
| 0.
|
211 |
-
| 0.
|
212 |
-
| 0.
|
213 |
-
| 0.
|
214 |
-
| 0.
|
215 |
-
| 0.
|
216 |
-
| 0.
|
217 |
-
| 0.
|
218 |
-
| 0.
|
219 |
-
| 0.
|
220 |
-
| 0.
|
221 |
-
| 0.
|
222 |
-
| 0.
|
223 |
-
| 0.
|
224 |
-
| 0.
|
225 |
-
| 0.
|
226 |
-
| 0.
|
227 |
-
| 0.
|
228 |
-
| 0.
|
229 |
-
| 0.
|
230 |
-
| 0.
|
231 |
-
| 0.
|
232 |
-
| 0.
|
233 |
-
| 0.
|
234 |
-
| 0.
|
235 |
-
| 0.
|
236 |
-
| 0.
|
237 |
-
| 0.
|
238 |
-
| 0.
|
239 |
-
| 0.
|
240 |
-
| 0.
|
241 |
-
| 0.
|
242 |
-
| 0.
|
243 |
-
| 0.
|
244 |
-
| 0.
|
245 |
-
| 0.
|
246 |
-
| 0.
|
247 |
-
| 0.
|
248 |
-
| 0.
|
249 |
-
| 0.
|
250 |
-
| 0.
|
251 |
-
| 0.
|
252 |
-
| 0.
|
253 |
-
| 0.
|
254 |
-
| 0.
|
255 |
-
| 0.
|
256 |
-
| 0.
|
257 |
-
| 0.
|
258 |
-
| 0.
|
259 |
-
| 0.
|
260 |
-
| 0.
|
261 |
-
| 0.
|
262 |
-
| 0.
|
263 |
-
| 0.
|
264 |
-
| 0.
|
265 |
-
| 0.
|
266 |
-
| 0.
|
267 |
-
| 0.
|
268 |
-
| 0.
|
269 |
-
| 0.
|
270 |
-
| 0.
|
271 |
-
| 0.
|
272 |
-
| 0.
|
273 |
-
| 0.
|
274 |
-
| 0.
|
275 |
-
| 0.
|
276 |
-
| 0.
|
277 |
-
| 0.
|
278 |
-
| 0.
|
279 |
-
| 0.
|
280 |
-
| 0.
|
281 |
-
| 0.
|
282 |
-
| 0.
|
283 |
-
| 0.
|
284 |
-
| 0.
|
285 |
-
| 0.
|
286 |
-
| 0.
|
287 |
-
| 0.
|
288 |
-
| 0.
|
289 |
-
| 0.
|
290 |
-
| 0.
|
291 |
-
| 0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
292 |
|
293 |
|
294 |
### Framework versions
|
295 |
|
296 |
-
- PEFT 0.
|
297 |
-
- Transformers 4.
|
298 |
-
- Pytorch 2.
|
299 |
-
- Datasets 2.
|
300 |
-
- Tokenizers 0.
|
|
|
18 |
|
19 |
This model is a fine-tuned version of [codellama/CodeLlama-7b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf) on the None dataset.
|
20 |
It achieves the following results on the evaluation set:
|
21 |
+
- Loss: 0.2977
|
22 |
|
23 |
## Model description
|
24 |
|
|
|
48 |
|
49 |
### Training results
|
50 |
|
51 |
+
| Training Loss | Epoch | Step | Validation Loss |
|
52 |
+
|:-------------:|:------:|:-----:|:---------------:|
|
53 |
+
| 0.2658 | 1.5177 | 8000 | 0.2979 |
|
54 |
+
| 0.2628 | 1.5272 | 8050 | 0.2971 |
|
55 |
+
| 0.2535 | 1.5367 | 8100 | 0.2988 |
|
56 |
+
| 0.2583 | 1.5462 | 8150 | 0.2980 |
|
57 |
+
| 0.253 | 1.5557 | 8200 | 0.2963 |
|
58 |
+
| 0.2835 | 1.5652 | 8250 | 0.2969 |
|
59 |
+
| 0.2759 | 1.5747 | 8300 | 0.2954 |
|
60 |
+
| 0.2581 | 1.5841 | 8350 | 0.2952 |
|
61 |
+
| 0.2639 | 1.5936 | 8400 | 0.2950 |
|
62 |
+
| 0.2406 | 1.6031 | 8450 | 0.2939 |
|
63 |
+
| 0.2566 | 1.6126 | 8500 | 0.2939 |
|
64 |
+
| 0.2607 | 1.6221 | 8550 | 0.2935 |
|
65 |
+
| 0.2561 | 1.6316 | 8600 | 0.2940 |
|
66 |
+
| 0.2752 | 1.6411 | 8650 | 0.2932 |
|
67 |
+
| 0.2523 | 1.6505 | 8700 | 0.2927 |
|
68 |
+
| 0.2454 | 1.6600 | 8750 | 0.2942 |
|
69 |
+
| 0.2673 | 1.6695 | 8800 | 0.2930 |
|
70 |
+
| 0.299 | 1.6790 | 8850 | 0.2935 |
|
71 |
+
| 0.2681 | 1.6885 | 8900 | 0.2929 |
|
72 |
+
| 0.2849 | 1.6980 | 8950 | 0.2911 |
|
73 |
+
| 0.2213 | 1.7075 | 9000 | 0.2926 |
|
74 |
+
| 0.2592 | 1.7169 | 9050 | 0.2919 |
|
75 |
+
| 0.2917 | 1.7264 | 9100 | 0.2927 |
|
76 |
+
| 0.2545 | 1.7359 | 9150 | 0.2911 |
|
77 |
+
| 0.2702 | 1.7454 | 9200 | 0.2909 |
|
78 |
+
| 0.2398 | 1.7549 | 9250 | 0.2895 |
|
79 |
+
| 0.2724 | 1.7644 | 9300 | 0.2900 |
|
80 |
+
| 0.2556 | 1.7739 | 9350 | 0.2916 |
|
81 |
+
| 0.2714 | 1.7833 | 9400 | 0.2894 |
|
82 |
+
| 0.2682 | 1.7928 | 9450 | 0.2899 |
|
83 |
+
| 0.2492 | 1.8023 | 9500 | 0.2897 |
|
84 |
+
| 0.262 | 1.8118 | 9550 | 0.2893 |
|
85 |
+
| 0.2628 | 1.8213 | 9600 | 0.2884 |
|
86 |
+
| 0.2228 | 1.8308 | 9650 | 0.2890 |
|
87 |
+
| 0.2335 | 1.8403 | 9700 | 0.2886 |
|
88 |
+
| 0.2337 | 1.8497 | 9750 | 0.2883 |
|
89 |
+
| 0.2839 | 1.8592 | 9800 | 0.2872 |
|
90 |
+
| 0.2649 | 1.8687 | 9850 | 0.2876 |
|
91 |
+
| 0.2508 | 1.8782 | 9900 | 0.2865 |
|
92 |
+
| 0.2509 | 1.8877 | 9950 | 0.2852 |
|
93 |
+
| 0.2965 | 1.8972 | 10000 | 0.2866 |
|
94 |
+
| 0.2419 | 1.9067 | 10050 | 0.2852 |
|
95 |
+
| 0.2654 | 1.9161 | 10100 | 0.2854 |
|
96 |
+
| 0.2621 | 1.9256 | 10150 | 0.2847 |
|
97 |
+
| 0.2748 | 1.9351 | 10200 | 0.2848 |
|
98 |
+
| 0.2533 | 1.9446 | 10250 | 0.2841 |
|
99 |
+
| 0.2514 | 1.9541 | 10300 | 0.2833 |
|
100 |
+
| 0.2609 | 1.9636 | 10350 | 0.2834 |
|
101 |
+
| 0.232 | 1.9731 | 10400 | 0.2836 |
|
102 |
+
| 0.2546 | 1.9825 | 10450 | 0.2841 |
|
103 |
+
| 0.2431 | 1.9920 | 10500 | 0.2852 |
|
104 |
+
| 0.2078 | 2.0015 | 10550 | 0.2853 |
|
105 |
+
| 0.1902 | 2.0110 | 10600 | 0.2942 |
|
106 |
+
| 0.1936 | 2.0205 | 10650 | 0.2920 |
|
107 |
+
| 0.1793 | 2.0300 | 10700 | 0.2927 |
|
108 |
+
| 0.1892 | 2.0395 | 10750 | 0.2928 |
|
109 |
+
| 0.1818 | 2.0489 | 10800 | 0.2938 |
|
110 |
+
| 0.1916 | 2.0584 | 10850 | 0.2929 |
|
111 |
+
| 0.1706 | 2.0679 | 10900 | 0.2934 |
|
112 |
+
| 0.1937 | 2.0774 | 10950 | 0.2909 |
|
113 |
+
| 0.1743 | 2.0869 | 11000 | 0.2952 |
|
114 |
+
| 0.1812 | 2.0964 | 11050 | 0.2952 |
|
115 |
+
| 0.1705 | 2.1059 | 11100 | 0.2950 |
|
116 |
+
| 0.1774 | 2.1153 | 11150 | 0.2959 |
|
117 |
+
| 0.2058 | 2.1248 | 11200 | 0.2935 |
|
118 |
+
| 0.2053 | 2.1343 | 11250 | 0.2950 |
|
119 |
+
| 0.2094 | 2.1438 | 11300 | 0.2930 |
|
120 |
+
| 0.2172 | 2.1533 | 11350 | 0.2947 |
|
121 |
+
| 0.1747 | 2.1628 | 11400 | 0.2916 |
|
122 |
+
| 0.201 | 2.1723 | 11450 | 0.2926 |
|
123 |
+
| 0.1867 | 2.1817 | 11500 | 0.2931 |
|
124 |
+
| 0.1892 | 2.1912 | 11550 | 0.2917 |
|
125 |
+
| 0.173 | 2.2007 | 11600 | 0.2939 |
|
126 |
+
| 0.1785 | 2.2102 | 11650 | 0.2930 |
|
127 |
+
| 0.2122 | 2.2197 | 11700 | 0.2894 |
|
128 |
+
| 0.1915 | 2.2292 | 11750 | 0.2914 |
|
129 |
+
| 0.2049 | 2.2387 | 11800 | 0.2916 |
|
130 |
+
| 0.1831 | 2.2482 | 11850 | 0.2911 |
|
131 |
+
| 0.1927 | 2.2576 | 11900 | 0.2917 |
|
132 |
+
| 0.2014 | 2.2671 | 11950 | 0.2921 |
|
133 |
+
| 0.1847 | 2.2766 | 12000 | 0.2903 |
|
134 |
+
| 0.1736 | 2.2861 | 12050 | 0.2959 |
|
135 |
+
| 0.1885 | 2.2956 | 12100 | 0.2919 |
|
136 |
+
| 0.1918 | 2.3051 | 12150 | 0.2924 |
|
137 |
+
| 0.1919 | 2.3146 | 12200 | 0.2895 |
|
138 |
+
| 0.1888 | 2.3240 | 12250 | 0.2872 |
|
139 |
+
| 0.1614 | 2.3335 | 12300 | 0.2906 |
|
140 |
+
| 0.1932 | 2.3430 | 12350 | 0.2889 |
|
141 |
+
| 0.1882 | 2.3525 | 12400 | 0.2897 |
|
142 |
+
| 0.1863 | 2.3620 | 12450 | 0.2889 |
|
143 |
+
| 0.21 | 2.3715 | 12500 | 0.2892 |
|
144 |
+
| 0.1869 | 2.3810 | 12550 | 0.2897 |
|
145 |
+
| 0.1757 | 2.3904 | 12600 | 0.2891 |
|
146 |
+
| 0.1698 | 2.3999 | 12650 | 0.2916 |
|
147 |
+
| 0.1699 | 2.4094 | 12700 | 0.2888 |
|
148 |
+
| 0.1908 | 2.4189 | 12750 | 0.2890 |
|
149 |
+
| 0.2096 | 2.4284 | 12800 | 0.2861 |
|
150 |
+
| 0.1957 | 2.4379 | 12850 | 0.2907 |
|
151 |
+
| 0.1971 | 2.4474 | 12900 | 0.2892 |
|
152 |
+
| 0.187 | 2.4568 | 12950 | 0.2861 |
|
153 |
+
| 0.1911 | 2.4663 | 13000 | 0.2888 |
|
154 |
+
| 0.1683 | 2.4758 | 13050 | 0.2871 |
|
155 |
+
| 0.1783 | 2.4853 | 13100 | 0.2883 |
|
156 |
+
| 0.1737 | 2.4948 | 13150 | 0.2892 |
|
157 |
+
| 0.1886 | 2.5043 | 13200 | 0.2878 |
|
158 |
+
| 0.1744 | 2.5138 | 13250 | 0.2867 |
|
159 |
+
| 0.1948 | 2.5232 | 13300 | 0.2878 |
|
160 |
+
| 0.1762 | 2.5327 | 13350 | 0.2878 |
|
161 |
+
| 0.1772 | 2.5422 | 13400 | 0.2876 |
|
162 |
+
| 0.1982 | 2.5517 | 13450 | 0.2877 |
|
163 |
+
| 0.182 | 2.5612 | 13500 | 0.2892 |
|
164 |
+
| 0.1925 | 2.5707 | 13550 | 0.2866 |
|
165 |
+
| 0.1834 | 2.5802 | 13600 | 0.2878 |
|
166 |
+
| 0.167 | 2.5896 | 13650 | 0.2865 |
|
167 |
+
| 0.2087 | 2.5991 | 13700 | 0.2858 |
|
168 |
+
| 0.1948 | 2.6086 | 13750 | 0.2864 |
|
169 |
+
| 0.1709 | 2.6181 | 13800 | 0.2863 |
|
170 |
+
| 0.1948 | 2.6276 | 13850 | 0.2851 |
|
171 |
+
| 0.1901 | 2.6371 | 13900 | 0.2862 |
|
172 |
+
| 0.1817 | 2.6466 | 13950 | 0.2870 |
|
173 |
+
| 0.1712 | 2.6560 | 14000 | 0.2845 |
|
174 |
+
| 0.1749 | 2.6655 | 14050 | 0.2856 |
|
175 |
+
| 0.1956 | 2.6750 | 14100 | 0.2864 |
|
176 |
+
| 0.1938 | 2.6845 | 14150 | 0.2836 |
|
177 |
+
| 0.1767 | 2.6940 | 14200 | 0.2840 |
|
178 |
+
| 0.1885 | 2.7035 | 14250 | 0.2844 |
|
179 |
+
| 0.1648 | 2.7130 | 14300 | 0.2830 |
|
180 |
+
| 0.1903 | 2.7224 | 14350 | 0.2835 |
|
181 |
+
| 0.1901 | 2.7319 | 14400 | 0.2833 |
|
182 |
+
| 0.1635 | 2.7414 | 14450 | 0.2857 |
|
183 |
+
| 0.1856 | 2.7509 | 14500 | 0.2830 |
|
184 |
+
| 0.1921 | 2.7604 | 14550 | 0.2839 |
|
185 |
+
| 0.1823 | 2.7699 | 14600 | 0.2831 |
|
186 |
+
| 0.1872 | 2.7794 | 14650 | 0.2823 |
|
187 |
+
| 0.1585 | 2.7888 | 14700 | 0.2836 |
|
188 |
+
| 0.1743 | 2.7983 | 14750 | 0.2839 |
|
189 |
+
| 0.1702 | 2.8078 | 14800 | 0.2827 |
|
190 |
+
| 0.1923 | 2.8173 | 14850 | 0.2829 |
|
191 |
+
| 0.1739 | 2.8268 | 14900 | 0.2819 |
|
192 |
+
| 0.1751 | 2.8363 | 14950 | 0.2810 |
|
193 |
+
| 0.1945 | 2.8458 | 15000 | 0.2817 |
|
194 |
+
| 0.181 | 2.8552 | 15050 | 0.2816 |
|
195 |
+
| 0.1718 | 2.8647 | 15100 | 0.2812 |
|
196 |
+
| 0.1811 | 2.8742 | 15150 | 0.2807 |
|
197 |
+
| 0.1917 | 2.8837 | 15200 | 0.2818 |
|
198 |
+
| 0.1776 | 2.8932 | 15250 | 0.2806 |
|
199 |
+
| 0.1656 | 2.9027 | 15300 | 0.2811 |
|
200 |
+
| 0.1794 | 2.9122 | 15350 | 0.2812 |
|
201 |
+
| 0.1969 | 2.9216 | 15400 | 0.2799 |
|
202 |
+
| 0.1741 | 2.9311 | 15450 | 0.2803 |
|
203 |
+
| 0.1654 | 2.9406 | 15500 | 0.2817 |
|
204 |
+
| 0.1682 | 2.9501 | 15550 | 0.2806 |
|
205 |
+
| 0.175 | 2.9596 | 15600 | 0.2797 |
|
206 |
+
| 0.1597 | 2.9691 | 15650 | 0.2813 |
|
207 |
+
| 0.1723 | 2.9786 | 15700 | 0.2821 |
|
208 |
+
| 0.1957 | 2.9880 | 15750 | 0.2786 |
|
209 |
+
| 0.1689 | 2.9975 | 15800 | 0.2800 |
|
210 |
+
| 0.1795 | 3.0070 | 15850 | 0.2937 |
|
211 |
+
| 0.1294 | 3.0165 | 15900 | 0.2980 |
|
212 |
+
| 0.1169 | 3.0260 | 15950 | 0.3020 |
|
213 |
+
| 0.1194 | 3.0355 | 16000 | 0.3021 |
|
214 |
+
| 0.1164 | 3.0450 | 16050 | 0.3001 |
|
215 |
+
| 0.1276 | 3.0544 | 16100 | 0.3033 |
|
216 |
+
| 0.1265 | 3.0639 | 16150 | 0.2986 |
|
217 |
+
| 0.1239 | 3.0734 | 16200 | 0.3006 |
|
218 |
+
| 0.1129 | 3.0829 | 16250 | 0.3029 |
|
219 |
+
| 0.1294 | 3.0924 | 16300 | 0.2992 |
|
220 |
+
| 0.1198 | 3.1019 | 16350 | 0.3027 |
|
221 |
+
| 0.1168 | 3.1114 | 16400 | 0.3022 |
|
222 |
+
| 0.1302 | 3.1208 | 16450 | 0.2996 |
|
223 |
+
| 0.1287 | 3.1303 | 16500 | 0.3020 |
|
224 |
+
| 0.1181 | 3.1398 | 16550 | 0.3001 |
|
225 |
+
| 0.1261 | 3.1493 | 16600 | 0.3019 |
|
226 |
+
| 0.1297 | 3.1588 | 16650 | 0.3037 |
|
227 |
+
| 0.1212 | 3.1683 | 16700 | 0.3047 |
|
228 |
+
| 0.1234 | 3.1778 | 16750 | 0.3033 |
|
229 |
+
| 0.1317 | 3.1873 | 16800 | 0.3023 |
|
230 |
+
| 0.1425 | 3.1967 | 16850 | 0.2999 |
|
231 |
+
| 0.1187 | 3.2062 | 16900 | 0.3039 |
|
232 |
+
| 0.1313 | 3.2157 | 16950 | 0.3002 |
|
233 |
+
| 0.1133 | 3.2252 | 17000 | 0.3022 |
|
234 |
+
| 0.1161 | 3.2347 | 17050 | 0.3017 |
|
235 |
+
| 0.1354 | 3.2442 | 17100 | 0.3024 |
|
236 |
+
| 0.1204 | 3.2537 | 17150 | 0.2998 |
|
237 |
+
| 0.1354 | 3.2631 | 17200 | 0.3012 |
|
238 |
+
| 0.1263 | 3.2726 | 17250 | 0.3001 |
|
239 |
+
| 0.1281 | 3.2821 | 17300 | 0.3015 |
|
240 |
+
| 0.123 | 3.2916 | 17350 | 0.3020 |
|
241 |
+
| 0.1384 | 3.3011 | 17400 | 0.3009 |
|
242 |
+
| 0.1257 | 3.3106 | 17450 | 0.3014 |
|
243 |
+
| 0.1125 | 3.3201 | 17500 | 0.2992 |
|
244 |
+
| 0.1218 | 3.3295 | 17550 | 0.2988 |
|
245 |
+
| 0.1295 | 3.3390 | 17600 | 0.2997 |
|
246 |
+
| 0.1224 | 3.3485 | 17650 | 0.3006 |
|
247 |
+
| 0.1289 | 3.3580 | 17700 | 0.3011 |
|
248 |
+
| 0.1369 | 3.3675 | 17750 | 0.3005 |
|
249 |
+
| 0.1285 | 3.3770 | 17800 | 0.3002 |
|
250 |
+
| 0.1247 | 3.3865 | 17850 | 0.2997 |
|
251 |
+
| 0.1421 | 3.3959 | 17900 | 0.2988 |
|
252 |
+
| 0.1246 | 3.4054 | 17950 | 0.2980 |
|
253 |
+
| 0.1345 | 3.4149 | 18000 | 0.2988 |
|
254 |
+
| 0.1345 | 3.4244 | 18050 | 0.2980 |
|
255 |
+
| 0.1251 | 3.4339 | 18100 | 0.2992 |
|
256 |
+
| 0.1194 | 3.4434 | 18150 | 0.2998 |
|
257 |
+
| 0.1163 | 3.4529 | 18200 | 0.2979 |
|
258 |
+
| 0.1254 | 3.4623 | 18250 | 0.2993 |
|
259 |
+
| 0.1299 | 3.4718 | 18300 | 0.3003 |
|
260 |
+
| 0.1162 | 3.4813 | 18350 | 0.2997 |
|
261 |
+
| 0.1259 | 3.4908 | 18400 | 0.2983 |
|
262 |
+
| 0.1304 | 3.5003 | 18450 | 0.2976 |
|
263 |
+
| 0.1328 | 3.5098 | 18500 | 0.2964 |
|
264 |
+
| 0.1186 | 3.5193 | 18550 | 0.2981 |
|
265 |
+
| 0.114 | 3.5287 | 18600 | 0.2990 |
|
266 |
+
| 0.1287 | 3.5382 | 18650 | 0.2986 |
|
267 |
+
| 0.1185 | 3.5477 | 18700 | 0.2995 |
|
268 |
+
| 0.1201 | 3.5572 | 18750 | 0.2980 |
|
269 |
+
| 0.1173 | 3.5667 | 18800 | 0.2994 |
|
270 |
+
| 0.1361 | 3.5762 | 18850 | 0.2980 |
|
271 |
+
| 0.1163 | 3.5857 | 18900 | 0.2991 |
|
272 |
+
| 0.1223 | 3.5951 | 18950 | 0.2982 |
|
273 |
+
| 0.1289 | 3.6046 | 19000 | 0.2983 |
|
274 |
+
| 0.129 | 3.6141 | 19050 | 0.2966 |
|
275 |
+
| 0.1348 | 3.6236 | 19100 | 0.2979 |
|
276 |
+
| 0.1211 | 3.6331 | 19150 | 0.2988 |
|
277 |
+
| 0.1276 | 3.6426 | 19200 | 0.2974 |
|
278 |
+
| 0.1151 | 3.6521 | 19250 | 0.2987 |
|
279 |
+
| 0.1304 | 3.6615 | 19300 | 0.2970 |
|
280 |
+
| 0.1138 | 3.6710 | 19350 | 0.2978 |
|
281 |
+
| 0.1202 | 3.6805 | 19400 | 0.2987 |
|
282 |
+
| 0.1378 | 3.6900 | 19450 | 0.2975 |
|
283 |
+
| 0.1244 | 3.6995 | 19500 | 0.2972 |
|
284 |
+
| 0.13 | 3.7090 | 19550 | 0.2978 |
|
285 |
+
| 0.1162 | 3.7185 | 19600 | 0.2982 |
|
286 |
+
| 0.1301 | 3.7279 | 19650 | 0.2970 |
|
287 |
+
| 0.1186 | 3.7374 | 19700 | 0.2977 |
|
288 |
+
| 0.1158 | 3.7469 | 19750 | 0.2976 |
|
289 |
+
| 0.1181 | 3.7564 | 19800 | 0.2975 |
|
290 |
+
| 0.1186 | 3.7659 | 19850 | 0.2980 |
|
291 |
+
| 0.121 | 3.7754 | 19900 | 0.2974 |
|
292 |
+
| 0.1355 | 3.7849 | 19950 | 0.2970 |
|
293 |
+
| 0.1222 | 3.7943 | 20000 | 0.2961 |
|
294 |
+
| 0.1307 | 3.8038 | 20050 | 0.2964 |
|
295 |
+
| 0.1223 | 3.8133 | 20100 | 0.2957 |
|
296 |
+
| 0.1097 | 3.8228 | 20150 | 0.2961 |
|
297 |
+
| 0.1215 | 3.8323 | 20200 | 0.2965 |
|
298 |
+
| 0.1208 | 3.8418 | 20250 | 0.2973 |
|
299 |
+
| 0.1199 | 3.8513 | 20300 | 0.2973 |
|
300 |
+
| 0.1249 | 3.8607 | 20350 | 0.2966 |
|
301 |
+
| 0.1143 | 3.8702 | 20400 | 0.2972 |
|
302 |
+
| 0.1316 | 3.8797 | 20450 | 0.2971 |
|
303 |
+
| 0.1202 | 3.8892 | 20500 | 0.2973 |
|
304 |
+
| 0.1364 | 3.8987 | 20550 | 0.2972 |
|
305 |
+
| 0.1193 | 3.9082 | 20600 | 0.2975 |
|
306 |
+
| 0.1246 | 3.9177 | 20650 | 0.2971 |
|
307 |
+
| 0.1214 | 3.9271 | 20700 | 0.2971 |
|
308 |
+
| 0.1108 | 3.9366 | 20750 | 0.2976 |
|
309 |
+
| 0.1224 | 3.9461 | 20800 | 0.2975 |
|
310 |
+
| 0.1202 | 3.9556 | 20850 | 0.2975 |
|
311 |
+
| 0.1135 | 3.9651 | 20900 | 0.2975 |
|
312 |
+
| 0.1183 | 3.9746 | 20950 | 0.2977 |
|
313 |
+
| 0.1105 | 3.9841 | 21000 | 0.2977 |
|
314 |
+
| 0.1307 | 3.9935 | 21050 | 0.2977 |
|
315 |
|
316 |
|
317 |
### Framework versions
|
318 |
|
319 |
+
- PEFT 0.11.1
|
320 |
+
- Transformers 4.41.2
|
321 |
+
- Pytorch 2.3.0+cu121
|
322 |
+
- Datasets 2.19.2
|
323 |
+
- Tokenizers 0.19.1
|
adapter_config.json
CHANGED
@@ -20,13 +20,13 @@
|
|
20 |
"rank_pattern": {},
|
21 |
"revision": null,
|
22 |
"target_modules": [
|
23 |
-
"
|
|
|
24 |
"q_proj",
|
25 |
-
"k_proj",
|
26 |
-
"gate_proj",
|
27 |
"o_proj",
|
28 |
-
"
|
29 |
-
"
|
|
|
30 |
],
|
31 |
"task_type": "CAUSAL_LM",
|
32 |
"use_dora": false,
|
|
|
20 |
"rank_pattern": {},
|
21 |
"revision": null,
|
22 |
"target_modules": [
|
23 |
+
"down_proj",
|
24 |
+
"up_proj",
|
25 |
"q_proj",
|
|
|
|
|
26 |
"o_proj",
|
27 |
+
"gate_proj",
|
28 |
+
"k_proj",
|
29 |
+
"v_proj"
|
30 |
],
|
31 |
"task_type": "CAUSAL_LM",
|
32 |
"use_dora": false,
|
adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2332095256
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:528fbfbe7312fa3393d8748b97ec398101a12fd6ba31983e69474101ee30b402
|
3 |
size 2332095256
|
runs/Jun08_00-20-24_86a22f0778fe/events.out.tfevents.1717806035.86a22f0778fe.6252.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:45374f78ba60cb73a67fb558640169ee5fedb6ae4ac4af409e31f674fe5eedab
|
3 |
+
size 166746
|
tokenizer.json
CHANGED
@@ -184,6 +184,7 @@
|
|
184 |
"end_of_word_suffix": null,
|
185 |
"fuse_unk": true,
|
186 |
"byte_fallback": true,
|
|
|
187 |
"vocab": {
|
188 |
"<unk>": 0,
|
189 |
"<s>": 1,
|
|
|
184 |
"end_of_word_suffix": null,
|
185 |
"fuse_unk": true,
|
186 |
"byte_fallback": true,
|
187 |
+
"ignore_merges": false,
|
188 |
"vocab": {
|
189 |
"<unk>": 0,
|
190 |
"<s>": 1,
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c77f732211ab66d0a55442a5422becdc12617adcc3273b3f9ac56d8af53e9d86
|
3 |
+
size 5368
|