End of training
Browse files- README.md +160 -156
- adapter_config.json +5 -5
- adapter_model.safetensors +1 -1
- runs/Aug25_12-47-45_8e0006e4f5fe/events.out.tfevents.1724590078.8e0006e4f5fe.554.0 +3 -0
- training_args.bin +1 -1
README.md
CHANGED
@@ -18,7 +18,7 @@ should probably proofread and complete it, then remove this comment. -->
|
|
18 |
|
19 |
This model is a fine-tuned version of [codellama/CodeLlama-7b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf) on the None dataset.
|
20 |
It achieves the following results on the evaluation set:
|
21 |
-
- Loss: 0.
|
22 |
|
23 |
## Model description
|
24 |
|
@@ -50,165 +50,169 @@ The following hyperparameters were used during training:
|
|
50 |
|
51 |
| Training Loss | Epoch | Step | Validation Loss |
|
52 |
|:-------------:|:------:|:-----:|:---------------:|
|
53 |
-
| 0.
|
54 |
-
| 0.
|
55 |
-
| 0.
|
56 |
-
| 0.
|
57 |
-
| 0.
|
58 |
-
| 0.
|
59 |
-
| 0.
|
60 |
-
| 0.
|
61 |
-
| 0.
|
62 |
-
| 0.
|
63 |
-
| 0.
|
64 |
-
| 0.
|
65 |
-
| 0.
|
66 |
-
| 0.
|
67 |
-
| 0.
|
68 |
-
| 0.
|
69 |
-
| 0.
|
70 |
-
| 0.
|
71 |
-
| 0.
|
72 |
-
| 0.
|
73 |
-
| 0.
|
74 |
-
| 0.
|
75 |
-
| 0.
|
76 |
-
| 0.
|
77 |
-
| 0.
|
78 |
-
| 0.
|
79 |
-
| 0.
|
80 |
-
| 0.
|
81 |
-
| 0.
|
82 |
-
| 0.
|
83 |
-
| 0.
|
84 |
-
| 0.
|
85 |
-
| 0.
|
86 |
-
| 0.
|
87 |
-
| 0.
|
88 |
-
| 0.
|
89 |
-
| 0.
|
90 |
-
| 0.
|
91 |
-
| 0.
|
92 |
-
| 0.
|
93 |
-
| 0.
|
94 |
-
| 0.
|
95 |
-
| 0.
|
96 |
-
| 0.
|
97 |
-
| 0.
|
98 |
-
| 0.
|
99 |
-
| 0.
|
100 |
-
| 0.
|
101 |
-
| 0.
|
102 |
-
| 0.
|
103 |
-
| 0.
|
104 |
-
| 0.
|
105 |
-
| 0.
|
106 |
-
| 0.
|
107 |
-
| 0.
|
108 |
-
| 0.
|
109 |
-
| 0.
|
110 |
-
| 0.
|
111 |
-
| 0.
|
112 |
-
| 0.
|
113 |
-
| 0.
|
114 |
-
| 0.
|
115 |
-
| 0.
|
116 |
-
| 0.
|
117 |
-
| 0.
|
118 |
-
| 0.
|
119 |
-
| 0.
|
120 |
-
| 0.
|
121 |
-
| 0.
|
122 |
-
| 0.
|
123 |
-
| 0.
|
124 |
-
| 0.
|
125 |
-
| 0.
|
126 |
-
| 0.
|
127 |
-
| 0.
|
128 |
-
| 0.
|
129 |
-
| 0.
|
130 |
-
| 0.
|
131 |
-
| 0.
|
132 |
-
| 0.
|
133 |
-
| 0.
|
134 |
-
| 0.
|
135 |
-
| 0.
|
136 |
-
| 0.
|
137 |
-
| 0.
|
138 |
-
| 0.
|
139 |
-
| 0.
|
140 |
-
| 0.
|
141 |
-
| 0.
|
142 |
-
| 0.
|
143 |
-
| 0.
|
144 |
-
| 0.
|
145 |
-
| 0.
|
146 |
-
| 0.
|
147 |
-
| 0.
|
148 |
-
| 0.
|
149 |
-
| 0.
|
150 |
-
| 0.
|
151 |
-
| 0.
|
152 |
-
| 0.
|
153 |
-
| 0.
|
154 |
-
| 0.
|
155 |
-
| 0.
|
156 |
-
| 0.
|
157 |
-
| 0.
|
158 |
-
| 0.
|
159 |
-
| 0.
|
160 |
-
| 0.
|
161 |
-
| 0.
|
162 |
-
| 0.
|
163 |
-
| 0.
|
164 |
-
| 0.
|
165 |
-
| 0.
|
166 |
-
| 0.
|
167 |
-
| 0.
|
168 |
-
| 0.
|
169 |
-
| 0.
|
170 |
-
| 0.
|
171 |
-
| 0.
|
172 |
-
| 0.
|
173 |
-
| 0.
|
174 |
-
| 0.
|
175 |
-
| 0.
|
176 |
-
| 0.
|
177 |
-
| 0.
|
178 |
-
| 0.
|
179 |
-
| 0.
|
180 |
-
| 0.
|
181 |
-
| 0.
|
182 |
-
| 0.
|
183 |
-
| 0.
|
184 |
-
| 0.
|
185 |
-
| 0.
|
186 |
-
| 0.
|
187 |
-
| 0.
|
188 |
-
| 0.
|
189 |
-
| 0.
|
190 |
-
| 0.
|
191 |
-
| 0.
|
192 |
-
| 0.
|
193 |
-
| 0.
|
194 |
-
| 0.
|
195 |
-
| 0.
|
196 |
-
| 0.
|
197 |
-
| 0.
|
198 |
-
| 0.
|
199 |
-
| 0.
|
200 |
-
| 0.
|
201 |
-
| 0.
|
202 |
-
| 0.
|
203 |
-
| 0.
|
204 |
-
| 0.
|
205 |
-
| 0.
|
|
|
|
|
|
|
|
|
206 |
|
207 |
|
208 |
### Framework versions
|
209 |
|
210 |
- PEFT 0.12.0
|
211 |
-
- Transformers 4.
|
212 |
- Pytorch 2.3.1+cu121
|
213 |
-
- Datasets 2.
|
214 |
- Tokenizers 0.19.1
|
|
|
18 |
|
19 |
This model is a fine-tuned version of [codellama/CodeLlama-7b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf) on the None dataset.
|
20 |
It achieves the following results on the evaluation set:
|
21 |
+
- Loss: 0.2953
|
22 |
|
23 |
## Model description
|
24 |
|
|
|
50 |
|
51 |
| Training Loss | Epoch | Step | Validation Loss |
|
52 |
|:-------------:|:------:|:-----:|:---------------:|
|
53 |
+
| 0.1975 | 2.4942 | 13000 | 0.2857 |
|
54 |
+
| 0.1754 | 2.5038 | 13050 | 0.2851 |
|
55 |
+
| 0.1716 | 2.5134 | 13100 | 0.2843 |
|
56 |
+
| 0.1896 | 2.5230 | 13150 | 0.2843 |
|
57 |
+
| 0.172 | 2.5326 | 13200 | 0.2849 |
|
58 |
+
| 0.1967 | 2.5422 | 13250 | 0.2805 |
|
59 |
+
| 0.1778 | 2.5518 | 13300 | 0.2820 |
|
60 |
+
| 0.2003 | 2.5614 | 13350 | 0.2818 |
|
61 |
+
| 0.1669 | 2.5710 | 13400 | 0.2818 |
|
62 |
+
| 0.1713 | 2.5806 | 13450 | 0.2819 |
|
63 |
+
| 0.1813 | 2.5902 | 13500 | 0.2826 |
|
64 |
+
| 0.2075 | 2.5998 | 13550 | 0.2810 |
|
65 |
+
| 0.1683 | 2.6094 | 13600 | 0.2830 |
|
66 |
+
| 0.1985 | 2.6190 | 13650 | 0.2819 |
|
67 |
+
| 0.1759 | 2.6285 | 13700 | 0.2840 |
|
68 |
+
| 0.1854 | 2.6381 | 13750 | 0.2821 |
|
69 |
+
| 0.1922 | 2.6477 | 13800 | 0.2824 |
|
70 |
+
| 0.1707 | 2.6573 | 13850 | 0.2828 |
|
71 |
+
| 0.1759 | 2.6669 | 13900 | 0.2811 |
|
72 |
+
| 0.1747 | 2.6765 | 13950 | 0.2814 |
|
73 |
+
| 0.1965 | 2.6861 | 14000 | 0.2837 |
|
74 |
+
| 0.1917 | 2.6957 | 14050 | 0.2821 |
|
75 |
+
| 0.186 | 2.7053 | 14100 | 0.2832 |
|
76 |
+
| 0.1878 | 2.7149 | 14150 | 0.2823 |
|
77 |
+
| 0.1906 | 2.7245 | 14200 | 0.2792 |
|
78 |
+
| 0.1551 | 2.7341 | 14250 | 0.2793 |
|
79 |
+
| 0.2018 | 2.7437 | 14300 | 0.2779 |
|
80 |
+
| 0.1939 | 2.7533 | 14350 | 0.2799 |
|
81 |
+
| 0.1659 | 2.7629 | 14400 | 0.2785 |
|
82 |
+
| 0.1845 | 2.7724 | 14450 | 0.2783 |
|
83 |
+
| 0.1727 | 2.7820 | 14500 | 0.2780 |
|
84 |
+
| 0.1793 | 2.7916 | 14550 | 0.2787 |
|
85 |
+
| 0.1753 | 2.8012 | 14600 | 0.2780 |
|
86 |
+
| 0.1857 | 2.8108 | 14650 | 0.2781 |
|
87 |
+
| 0.1759 | 2.8204 | 14700 | 0.2776 |
|
88 |
+
| 0.1925 | 2.8300 | 14750 | 0.2769 |
|
89 |
+
| 0.1666 | 2.8396 | 14800 | 0.2780 |
|
90 |
+
| 0.2118 | 2.8492 | 14850 | 0.2762 |
|
91 |
+
| 0.169 | 2.8588 | 14900 | 0.2779 |
|
92 |
+
| 0.1604 | 2.8684 | 14950 | 0.2765 |
|
93 |
+
| 0.1746 | 2.8780 | 15000 | 0.2788 |
|
94 |
+
| 0.1795 | 2.8876 | 15050 | 0.2771 |
|
95 |
+
| 0.1661 | 2.8972 | 15100 | 0.2799 |
|
96 |
+
| 0.1893 | 2.9068 | 15150 | 0.2781 |
|
97 |
+
| 0.1665 | 2.9163 | 15200 | 0.2763 |
|
98 |
+
| 0.1781 | 2.9259 | 15250 | 0.2768 |
|
99 |
+
| 0.1634 | 2.9355 | 15300 | 0.2774 |
|
100 |
+
| 0.1713 | 2.9451 | 15350 | 0.2774 |
|
101 |
+
| 0.1676 | 2.9547 | 15400 | 0.2774 |
|
102 |
+
| 0.1927 | 2.9643 | 15450 | 0.2758 |
|
103 |
+
| 0.1667 | 2.9739 | 15500 | 0.2753 |
|
104 |
+
| 0.1791 | 2.9835 | 15550 | 0.2755 |
|
105 |
+
| 0.1817 | 2.9931 | 15600 | 0.2751 |
|
106 |
+
| 0.1526 | 3.0027 | 15650 | 0.2775 |
|
107 |
+
| 0.128 | 3.0123 | 15700 | 0.2942 |
|
108 |
+
| 0.1235 | 3.0219 | 15750 | 0.2959 |
|
109 |
+
| 0.1385 | 3.0315 | 15800 | 0.2932 |
|
110 |
+
| 0.1259 | 3.0411 | 15850 | 0.2946 |
|
111 |
+
| 0.1148 | 3.0507 | 15900 | 0.2958 |
|
112 |
+
| 0.121 | 3.0602 | 15950 | 0.2957 |
|
113 |
+
| 0.1137 | 3.0698 | 16000 | 0.2982 |
|
114 |
+
| 0.1181 | 3.0794 | 16050 | 0.2982 |
|
115 |
+
| 0.1185 | 3.0890 | 16100 | 0.2949 |
|
116 |
+
| 0.1254 | 3.0986 | 16150 | 0.2965 |
|
117 |
+
| 0.1261 | 3.1082 | 16200 | 0.2965 |
|
118 |
+
| 0.1202 | 3.1178 | 16250 | 0.3008 |
|
119 |
+
| 0.1151 | 3.1274 | 16300 | 0.2980 |
|
120 |
+
| 0.1284 | 3.1370 | 16350 | 0.2976 |
|
121 |
+
| 0.1353 | 3.1466 | 16400 | 0.2961 |
|
122 |
+
| 0.1149 | 3.1562 | 16450 | 0.2995 |
|
123 |
+
| 0.1254 | 3.1658 | 16500 | 0.3002 |
|
124 |
+
| 0.1318 | 3.1754 | 16550 | 0.2975 |
|
125 |
+
| 0.1289 | 3.1850 | 16600 | 0.2955 |
|
126 |
+
| 0.1286 | 3.1946 | 16650 | 0.3016 |
|
127 |
+
| 0.1104 | 3.2041 | 16700 | 0.2981 |
|
128 |
+
| 0.1087 | 3.2137 | 16750 | 0.2984 |
|
129 |
+
| 0.1325 | 3.2233 | 16800 | 0.2953 |
|
130 |
+
| 0.113 | 3.2329 | 16850 | 0.2972 |
|
131 |
+
| 0.1335 | 3.2425 | 16900 | 0.2984 |
|
132 |
+
| 0.1189 | 3.2521 | 16950 | 0.2981 |
|
133 |
+
| 0.1177 | 3.2617 | 17000 | 0.3003 |
|
134 |
+
| 0.1065 | 3.2713 | 17050 | 0.2980 |
|
135 |
+
| 0.1262 | 3.2809 | 17100 | 0.2970 |
|
136 |
+
| 0.1248 | 3.2905 | 17150 | 0.2972 |
|
137 |
+
| 0.1229 | 3.3001 | 17200 | 0.2961 |
|
138 |
+
| 0.1339 | 3.3097 | 17250 | 0.2953 |
|
139 |
+
| 0.1216 | 3.3193 | 17300 | 0.2984 |
|
140 |
+
| 0.1239 | 3.3289 | 17350 | 0.2974 |
|
141 |
+
| 0.1182 | 3.3384 | 17400 | 0.2956 |
|
142 |
+
| 0.1237 | 3.3480 | 17450 | 0.2967 |
|
143 |
+
| 0.1208 | 3.3576 | 17500 | 0.2973 |
|
144 |
+
| 0.1378 | 3.3672 | 17550 | 0.2994 |
|
145 |
+
| 0.1199 | 3.3768 | 17600 | 0.2975 |
|
146 |
+
| 0.1295 | 3.3864 | 17650 | 0.2990 |
|
147 |
+
| 0.127 | 3.3960 | 17700 | 0.2985 |
|
148 |
+
| 0.129 | 3.4056 | 17750 | 0.3004 |
|
149 |
+
| 0.1227 | 3.4152 | 17800 | 0.2993 |
|
150 |
+
| 0.1188 | 3.4248 | 17850 | 0.2994 |
|
151 |
+
| 0.1243 | 3.4344 | 17900 | 0.2992 |
|
152 |
+
| 0.1246 | 3.4440 | 17950 | 0.2973 |
|
153 |
+
| 0.1252 | 3.4536 | 18000 | 0.2989 |
|
154 |
+
| 0.128 | 3.4632 | 18050 | 0.2976 |
|
155 |
+
| 0.1359 | 3.4728 | 18100 | 0.2966 |
|
156 |
+
| 0.1194 | 3.4823 | 18150 | 0.2959 |
|
157 |
+
| 0.1213 | 3.4919 | 18200 | 0.2964 |
|
158 |
+
| 0.1161 | 3.5015 | 18250 | 0.2965 |
|
159 |
+
| 0.1209 | 3.5111 | 18300 | 0.2974 |
|
160 |
+
| 0.1255 | 3.5207 | 18350 | 0.2955 |
|
161 |
+
| 0.1319 | 3.5303 | 18400 | 0.2965 |
|
162 |
+
| 0.118 | 3.5399 | 18450 | 0.2990 |
|
163 |
+
| 0.1317 | 3.5495 | 18500 | 0.2964 |
|
164 |
+
| 0.1302 | 3.5591 | 18550 | 0.2956 |
|
165 |
+
| 0.1289 | 3.5687 | 18600 | 0.2968 |
|
166 |
+
| 0.1363 | 3.5783 | 18650 | 0.2959 |
|
167 |
+
| 0.1301 | 3.5879 | 18700 | 0.2954 |
|
168 |
+
| 0.123 | 3.5975 | 18750 | 0.2969 |
|
169 |
+
| 0.1189 | 3.6071 | 18800 | 0.2969 |
|
170 |
+
| 0.1151 | 3.6167 | 18850 | 0.2951 |
|
171 |
+
| 0.1296 | 3.6262 | 18900 | 0.2963 |
|
172 |
+
| 0.1264 | 3.6358 | 18950 | 0.2958 |
|
173 |
+
| 0.1151 | 3.6454 | 19000 | 0.2967 |
|
174 |
+
| 0.1193 | 3.6550 | 19050 | 0.2974 |
|
175 |
+
| 0.13 | 3.6646 | 19100 | 0.2962 |
|
176 |
+
| 0.1324 | 3.6742 | 19150 | 0.2956 |
|
177 |
+
| 0.1126 | 3.6838 | 19200 | 0.2975 |
|
178 |
+
| 0.1223 | 3.6934 | 19250 | 0.2965 |
|
179 |
+
| 0.1257 | 3.7030 | 19300 | 0.2970 |
|
180 |
+
| 0.1236 | 3.7126 | 19350 | 0.2976 |
|
181 |
+
| 0.1255 | 3.7222 | 19400 | 0.2970 |
|
182 |
+
| 0.1174 | 3.7318 | 19450 | 0.2974 |
|
183 |
+
| 0.1256 | 3.7414 | 19500 | 0.2966 |
|
184 |
+
| 0.1131 | 3.7510 | 19550 | 0.2971 |
|
185 |
+
| 0.1296 | 3.7606 | 19600 | 0.2961 |
|
186 |
+
| 0.1291 | 3.7701 | 19650 | 0.2947 |
|
187 |
+
| 0.1232 | 3.7797 | 19700 | 0.2951 |
|
188 |
+
| 0.1257 | 3.7893 | 19750 | 0.2958 |
|
189 |
+
| 0.1123 | 3.7989 | 19800 | 0.2954 |
|
190 |
+
| 0.1234 | 3.8085 | 19850 | 0.2952 |
|
191 |
+
| 0.1224 | 3.8181 | 19900 | 0.2951 |
|
192 |
+
| 0.1125 | 3.8277 | 19950 | 0.2960 |
|
193 |
+
| 0.1349 | 3.8373 | 20000 | 0.2951 |
|
194 |
+
| 0.1274 | 3.8469 | 20050 | 0.2950 |
|
195 |
+
| 0.1169 | 3.8565 | 20100 | 0.2948 |
|
196 |
+
| 0.1202 | 3.8661 | 20150 | 0.2953 |
|
197 |
+
| 0.1239 | 3.8757 | 20200 | 0.2960 |
|
198 |
+
| 0.1237 | 3.8853 | 20250 | 0.2956 |
|
199 |
+
| 0.1126 | 3.8949 | 20300 | 0.2957 |
|
200 |
+
| 0.121 | 3.9045 | 20350 | 0.2956 |
|
201 |
+
| 0.1319 | 3.9140 | 20400 | 0.2953 |
|
202 |
+
| 0.1274 | 3.9236 | 20450 | 0.2952 |
|
203 |
+
| 0.116 | 3.9332 | 20500 | 0.2951 |
|
204 |
+
| 0.1189 | 3.9428 | 20550 | 0.2953 |
|
205 |
+
| 0.1224 | 3.9524 | 20600 | 0.2956 |
|
206 |
+
| 0.1133 | 3.9620 | 20650 | 0.2958 |
|
207 |
+
| 0.1216 | 3.9716 | 20700 | 0.2956 |
|
208 |
+
| 0.1229 | 3.9812 | 20750 | 0.2954 |
|
209 |
+
| 0.1228 | 3.9908 | 20800 | 0.2953 |
|
210 |
|
211 |
|
212 |
### Framework versions
|
213 |
|
214 |
- PEFT 0.12.0
|
215 |
+
- Transformers 4.44.2
|
216 |
- Pytorch 2.3.1+cu121
|
217 |
+
- Datasets 2.21.0
|
218 |
- Tokenizers 0.19.1
|
adapter_config.json
CHANGED
@@ -20,13 +20,13 @@
|
|
20 |
"rank_pattern": {},
|
21 |
"revision": null,
|
22 |
"target_modules": [
|
23 |
-
"k_proj",
|
24 |
-
"up_proj",
|
25 |
-
"gate_proj",
|
26 |
-
"down_proj",
|
27 |
"q_proj",
|
28 |
"v_proj",
|
29 |
-
"
|
|
|
|
|
|
|
|
|
30 |
],
|
31 |
"task_type": "CAUSAL_LM",
|
32 |
"use_dora": false,
|
|
|
20 |
"rank_pattern": {},
|
21 |
"revision": null,
|
22 |
"target_modules": [
|
|
|
|
|
|
|
|
|
23 |
"q_proj",
|
24 |
"v_proj",
|
25 |
+
"up_proj",
|
26 |
+
"o_proj",
|
27 |
+
"k_proj",
|
28 |
+
"down_proj",
|
29 |
+
"gate_proj"
|
30 |
],
|
31 |
"task_type": "CAUSAL_LM",
|
32 |
"use_dora": false,
|
adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2332095256
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e60e7dad8f622e1f7b8f479a1503a619945de512d8cdaa0355818d6d2dd3f074
|
3 |
size 2332095256
|
runs/Aug25_12-47-45_8e0006e4f5fe/events.out.tfevents.1724590078.8e0006e4f5fe.554.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:688d654805ceb2e98167efe02e2db59e3b5316545db32a344d2e9be15e9d3063
|
3 |
+
size 137324
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5496
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:456334575237d727227a0b4fc8a586c5cd0f18ae5d588a52aa6f356fc5d53410
|
3 |
size 5496
|