|
{ |
|
"best_metric": 0.011418873444199562, |
|
"best_model_checkpoint": "beit-banana-diseases/checkpoint-264", |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 264, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 21.393463134765625, |
|
"learning_rate": 1.8518518518518519e-06, |
|
"loss": 1.9713, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 22.38655662536621, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 1.9772, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 21.854021072387695, |
|
"learning_rate": 5.555555555555556e-06, |
|
"loss": 1.7078, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 24.193124771118164, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 2.1819, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 20.156389236450195, |
|
"learning_rate": 9.259259259259259e-06, |
|
"loss": 1.7843, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 21.425861358642578, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 1.5904, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 23.77149200439453, |
|
"learning_rate": 1.2962962962962962e-05, |
|
"loss": 1.8766, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 21.238325119018555, |
|
"learning_rate": 1.4814814814814815e-05, |
|
"loss": 1.6849, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 21.050870895385742, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.9811, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 21.381715774536133, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 1.7029, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 23.12903594970703, |
|
"learning_rate": 2.037037037037037e-05, |
|
"loss": 1.5096, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 21.264657974243164, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 1.748, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 22.664464950561523, |
|
"learning_rate": 2.4074074074074074e-05, |
|
"loss": 1.5399, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 23.9726619720459, |
|
"learning_rate": 2.5925925925925925e-05, |
|
"loss": 1.6543, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 25.409692764282227, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 1.4427, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 19.801799774169922, |
|
"learning_rate": 2.962962962962963e-05, |
|
"loss": 1.415, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 18.942399978637695, |
|
"learning_rate": 3.148148148148148e-05, |
|
"loss": 1.3031, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 17.01711654663086, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.382, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 25.648284912109375, |
|
"learning_rate": 3.518518518518519e-05, |
|
"loss": 1.5428, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 17.678979873657227, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 1.1447, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 20.149110794067383, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 1.0376, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 22.123849868774414, |
|
"learning_rate": 4.074074074074074e-05, |
|
"loss": 1.037, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 16.500776290893555, |
|
"learning_rate": 4.259259259259259e-05, |
|
"loss": 0.7835, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 24.026493072509766, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 1.1297, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 15.726734161376953, |
|
"learning_rate": 4.62962962962963e-05, |
|
"loss": 0.872, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 27.0452823638916, |
|
"learning_rate": 4.814814814814815e-05, |
|
"loss": 1.4377, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 19.147762298583984, |
|
"learning_rate": 5e-05, |
|
"loss": 0.8577, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 18.310501098632812, |
|
"learning_rate": 4.9789029535864986e-05, |
|
"loss": 0.7565, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 39.3529052734375, |
|
"learning_rate": 4.957805907172996e-05, |
|
"loss": 1.1609, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 18.195653915405273, |
|
"learning_rate": 4.936708860759494e-05, |
|
"loss": 0.7501, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 18.973045349121094, |
|
"learning_rate": 4.9156118143459915e-05, |
|
"loss": 0.5452, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 34.03499221801758, |
|
"learning_rate": 4.89451476793249e-05, |
|
"loss": 0.7635, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 20.804288864135742, |
|
"learning_rate": 4.8734177215189874e-05, |
|
"loss": 0.5612, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 17.79737663269043, |
|
"learning_rate": 4.852320675105486e-05, |
|
"loss": 0.6733, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 16.90382194519043, |
|
"learning_rate": 4.8312236286919834e-05, |
|
"loss": 0.6375, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 10.320564270019531, |
|
"learning_rate": 4.810126582278481e-05, |
|
"loss": 0.2782, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 23.366121292114258, |
|
"learning_rate": 4.789029535864979e-05, |
|
"loss": 0.5793, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 10.37669563293457, |
|
"learning_rate": 4.767932489451477e-05, |
|
"loss": 0.2511, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 12.599074363708496, |
|
"learning_rate": 4.7468354430379746e-05, |
|
"loss": 0.3908, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 12.652225494384766, |
|
"learning_rate": 4.725738396624473e-05, |
|
"loss": 0.382, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 12.52493953704834, |
|
"learning_rate": 4.704641350210971e-05, |
|
"loss": 0.5196, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 24.851703643798828, |
|
"learning_rate": 4.683544303797468e-05, |
|
"loss": 0.759, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 20.065210342407227, |
|
"learning_rate": 4.6624472573839666e-05, |
|
"loss": 0.4902, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 6.980092525482178, |
|
"learning_rate": 4.641350210970464e-05, |
|
"loss": 0.1772, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 10.875155448913574, |
|
"learning_rate": 4.6202531645569625e-05, |
|
"loss": 0.2554, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 8.317543029785156, |
|
"learning_rate": 4.59915611814346e-05, |
|
"loss": 0.1368, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 15.295212745666504, |
|
"learning_rate": 4.5780590717299585e-05, |
|
"loss": 0.539, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 9.764492988586426, |
|
"learning_rate": 4.556962025316456e-05, |
|
"loss": 0.3554, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 18.125457763671875, |
|
"learning_rate": 4.535864978902954e-05, |
|
"loss": 0.5302, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 7.70007848739624, |
|
"learning_rate": 4.5147679324894514e-05, |
|
"loss": 0.2058, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 15.114970207214355, |
|
"learning_rate": 4.49367088607595e-05, |
|
"loss": 0.4506, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 3.854482889175415, |
|
"learning_rate": 4.4725738396624474e-05, |
|
"loss": 0.0844, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 15.301838874816895, |
|
"learning_rate": 4.451476793248946e-05, |
|
"loss": 0.443, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 12.818358421325684, |
|
"learning_rate": 4.430379746835443e-05, |
|
"loss": 0.4188, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 20.202260971069336, |
|
"learning_rate": 4.409282700421941e-05, |
|
"loss": 0.518, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 10.593685150146484, |
|
"learning_rate": 4.388185654008439e-05, |
|
"loss": 0.2015, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 6.216127872467041, |
|
"learning_rate": 4.367088607594937e-05, |
|
"loss": 0.1223, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 8.717305183410645, |
|
"learning_rate": 4.345991561181435e-05, |
|
"loss": 0.1161, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 5.678544998168945, |
|
"learning_rate": 4.324894514767933e-05, |
|
"loss": 0.1481, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 12.164253234863281, |
|
"learning_rate": 4.3037974683544305e-05, |
|
"loss": 0.3593, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 9.91303825378418, |
|
"learning_rate": 4.282700421940928e-05, |
|
"loss": 0.2332, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 11.6513090133667, |
|
"learning_rate": 4.2616033755274265e-05, |
|
"loss": 0.2646, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 10.128318786621094, |
|
"learning_rate": 4.240506329113924e-05, |
|
"loss": 0.166, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 7.344198226928711, |
|
"learning_rate": 4.2194092827004224e-05, |
|
"loss": 0.1824, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 14.342442512512207, |
|
"learning_rate": 4.19831223628692e-05, |
|
"loss": 0.2132, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 9.733878135681152, |
|
"learning_rate": 4.177215189873418e-05, |
|
"loss": 0.1968, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 6.932378768920898, |
|
"learning_rate": 4.1561181434599153e-05, |
|
"loss": 0.1487, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.614574909210205, |
|
"learning_rate": 4.135021097046414e-05, |
|
"loss": 0.0385, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 12.623132705688477, |
|
"learning_rate": 4.113924050632912e-05, |
|
"loss": 0.1805, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 8.182653427124023, |
|
"learning_rate": 4.0928270042194096e-05, |
|
"loss": 0.1422, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.2609386444091797, |
|
"learning_rate": 4.071729957805907e-05, |
|
"loss": 0.0578, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.1661690473556519, |
|
"learning_rate": 4.050632911392405e-05, |
|
"loss": 0.0243, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 18.96497917175293, |
|
"learning_rate": 4.029535864978903e-05, |
|
"loss": 0.3047, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 16.800785064697266, |
|
"learning_rate": 4.008438818565401e-05, |
|
"loss": 0.4561, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 25.772275924682617, |
|
"learning_rate": 3.987341772151899e-05, |
|
"loss": 0.5251, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 10.185676574707031, |
|
"learning_rate": 3.966244725738397e-05, |
|
"loss": 0.2006, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 6.298079013824463, |
|
"learning_rate": 3.945147679324895e-05, |
|
"loss": 0.0788, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 13.123534202575684, |
|
"learning_rate": 3.924050632911392e-05, |
|
"loss": 0.2158, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 4.510250568389893, |
|
"learning_rate": 3.9029535864978904e-05, |
|
"loss": 0.0525, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 8.665478706359863, |
|
"learning_rate": 3.881856540084388e-05, |
|
"loss": 0.1001, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 5.559294700622559, |
|
"learning_rate": 3.8607594936708864e-05, |
|
"loss": 0.0602, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 5.936877250671387, |
|
"learning_rate": 3.839662447257384e-05, |
|
"loss": 0.0586, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 4.139342784881592, |
|
"learning_rate": 3.8185654008438823e-05, |
|
"loss": 0.0709, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.722638487815857, |
|
"learning_rate": 3.79746835443038e-05, |
|
"loss": 0.0264, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 7.886575222015381, |
|
"learning_rate": 3.7763713080168776e-05, |
|
"loss": 0.1474, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 3.067516803741455, |
|
"learning_rate": 3.755274261603376e-05, |
|
"loss": 0.0431, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 7.004971027374268, |
|
"learning_rate": 3.7341772151898736e-05, |
|
"loss": 0.0518, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 42.861572265625, |
|
"learning_rate": 3.713080168776372e-05, |
|
"loss": 0.686, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.935064935064935, |
|
"eval_f1_macro": 0.9315281668222845, |
|
"eval_f1_micro": 0.935064935064935, |
|
"eval_f1_weighted": 0.9315281668222845, |
|
"eval_loss": 0.2894458770751953, |
|
"eval_precision_macro": 0.9553571428571429, |
|
"eval_precision_micro": 0.935064935064935, |
|
"eval_precision_weighted": 0.9553571428571429, |
|
"eval_recall_macro": 0.935064935064935, |
|
"eval_recall_micro": 0.935064935064935, |
|
"eval_recall_weighted": 0.935064935064935, |
|
"eval_runtime": 23.6193, |
|
"eval_samples_per_second": 3.26, |
|
"eval_steps_per_second": 0.212, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.910532832145691, |
|
"learning_rate": 3.6919831223628695e-05, |
|
"loss": 0.0282, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 2.4399540424346924, |
|
"learning_rate": 3.670886075949367e-05, |
|
"loss": 0.0293, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 2.8237359523773193, |
|
"learning_rate": 3.649789029535865e-05, |
|
"loss": 0.0184, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 5.545835971832275, |
|
"learning_rate": 3.628691983122363e-05, |
|
"loss": 0.0542, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 23.268957138061523, |
|
"learning_rate": 3.607594936708861e-05, |
|
"loss": 0.3274, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 38.25579833984375, |
|
"learning_rate": 3.586497890295359e-05, |
|
"loss": 0.8174, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 29.19462776184082, |
|
"learning_rate": 3.565400843881857e-05, |
|
"loss": 0.4695, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 15.189168930053711, |
|
"learning_rate": 3.5443037974683544e-05, |
|
"loss": 0.5005, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 13.557092666625977, |
|
"learning_rate": 3.523206751054853e-05, |
|
"loss": 0.1581, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 24.019737243652344, |
|
"learning_rate": 3.50210970464135e-05, |
|
"loss": 0.1921, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 8.620636940002441, |
|
"learning_rate": 3.4810126582278487e-05, |
|
"loss": 0.1287, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 27.298948287963867, |
|
"learning_rate": 3.459915611814346e-05, |
|
"loss": 0.1861, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 2.0457346439361572, |
|
"learning_rate": 3.438818565400844e-05, |
|
"loss": 0.0309, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.20146967470645905, |
|
"learning_rate": 3.4177215189873416e-05, |
|
"loss": 0.0051, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 5.008052825927734, |
|
"learning_rate": 3.39662447257384e-05, |
|
"loss": 0.0413, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 7.44640588760376, |
|
"learning_rate": 3.3755274261603375e-05, |
|
"loss": 0.0625, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 1.3147083520889282, |
|
"learning_rate": 3.354430379746836e-05, |
|
"loss": 0.0202, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 30.254919052124023, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.5385, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 8.866931915283203, |
|
"learning_rate": 3.312236286919831e-05, |
|
"loss": 0.0891, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 6.436508655548096, |
|
"learning_rate": 3.291139240506329e-05, |
|
"loss": 0.0887, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 25.527908325195312, |
|
"learning_rate": 3.270042194092827e-05, |
|
"loss": 0.4717, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 20.949968338012695, |
|
"learning_rate": 3.248945147679325e-05, |
|
"loss": 0.2791, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.1503649950027466, |
|
"learning_rate": 3.227848101265823e-05, |
|
"loss": 0.0226, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 11.437671661376953, |
|
"learning_rate": 3.2067510548523214e-05, |
|
"loss": 0.1807, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.25332069396972656, |
|
"learning_rate": 3.185654008438819e-05, |
|
"loss": 0.0062, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 1.8780378103256226, |
|
"learning_rate": 3.1645569620253167e-05, |
|
"loss": 0.0284, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 6.392487049102783, |
|
"learning_rate": 3.143459915611814e-05, |
|
"loss": 0.06, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 23.06869888305664, |
|
"learning_rate": 3.1223628691983126e-05, |
|
"loss": 0.3186, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 9.919283866882324, |
|
"learning_rate": 3.10126582278481e-05, |
|
"loss": 0.0903, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 13.233536720275879, |
|
"learning_rate": 3.0801687763713086e-05, |
|
"loss": 0.1328, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 18.54045867919922, |
|
"learning_rate": 3.059071729957806e-05, |
|
"loss": 0.2366, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 1.4564893245697021, |
|
"learning_rate": 3.0379746835443042e-05, |
|
"loss": 0.0253, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.2900470495223999, |
|
"learning_rate": 3.0168776371308015e-05, |
|
"loss": 0.0065, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.4513213038444519, |
|
"learning_rate": 2.9957805907172998e-05, |
|
"loss": 0.0087, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 4.35510778427124, |
|
"learning_rate": 2.9746835443037974e-05, |
|
"loss": 0.0477, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 2.07529354095459, |
|
"learning_rate": 2.9535864978902954e-05, |
|
"loss": 0.0166, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.21565364301204681, |
|
"learning_rate": 2.9324894514767937e-05, |
|
"loss": 0.0047, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 1.532738208770752, |
|
"learning_rate": 2.9113924050632914e-05, |
|
"loss": 0.0174, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 7.4955949783325195, |
|
"learning_rate": 2.8902953586497894e-05, |
|
"loss": 0.1436, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 27.709796905517578, |
|
"learning_rate": 2.869198312236287e-05, |
|
"loss": 0.784, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.33867835998535156, |
|
"learning_rate": 2.848101265822785e-05, |
|
"loss": 0.0058, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.512848436832428, |
|
"learning_rate": 2.8270042194092826e-05, |
|
"loss": 0.0065, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 18.563438415527344, |
|
"learning_rate": 2.805907172995781e-05, |
|
"loss": 0.1981, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 18.82804298400879, |
|
"learning_rate": 2.7848101265822786e-05, |
|
"loss": 0.1722, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 4.428062438964844, |
|
"learning_rate": 2.7637130801687766e-05, |
|
"loss": 0.038, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 3.624861478805542, |
|
"learning_rate": 2.7426160337552742e-05, |
|
"loss": 0.0346, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 6.063285827636719, |
|
"learning_rate": 2.7215189873417722e-05, |
|
"loss": 0.0494, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 12.928357124328613, |
|
"learning_rate": 2.7004219409282698e-05, |
|
"loss": 0.3157, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 1.6940982341766357, |
|
"learning_rate": 2.679324894514768e-05, |
|
"loss": 0.0192, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.3945218622684479, |
|
"learning_rate": 2.6582278481012658e-05, |
|
"loss": 0.0058, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.15542316436767578, |
|
"learning_rate": 2.6371308016877638e-05, |
|
"loss": 0.0032, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.11174587905406952, |
|
"learning_rate": 2.616033755274262e-05, |
|
"loss": 0.0025, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 23.488004684448242, |
|
"learning_rate": 2.5949367088607597e-05, |
|
"loss": 0.8641, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.5066362023353577, |
|
"learning_rate": 2.5738396624472577e-05, |
|
"loss": 0.0083, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 32.906803131103516, |
|
"learning_rate": 2.5527426160337553e-05, |
|
"loss": 0.6371, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 11.186979293823242, |
|
"learning_rate": 2.5316455696202533e-05, |
|
"loss": 0.1184, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 26.368669509887695, |
|
"learning_rate": 2.510548523206751e-05, |
|
"loss": 0.2113, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 2.7630584239959717, |
|
"learning_rate": 2.4894514767932493e-05, |
|
"loss": 0.0236, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 26.372888565063477, |
|
"learning_rate": 2.468354430379747e-05, |
|
"loss": 0.339, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 7.727011203765869, |
|
"learning_rate": 2.447257383966245e-05, |
|
"loss": 0.091, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 7.276378631591797, |
|
"learning_rate": 2.426160337552743e-05, |
|
"loss": 0.0345, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 1.4434276819229126, |
|
"learning_rate": 2.4050632911392405e-05, |
|
"loss": 0.0191, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 1.5443305969238281, |
|
"learning_rate": 2.3839662447257385e-05, |
|
"loss": 0.0139, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 7.824341297149658, |
|
"learning_rate": 2.3628691983122365e-05, |
|
"loss": 0.1015, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.20900146663188934, |
|
"learning_rate": 2.341772151898734e-05, |
|
"loss": 0.0033, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.32141363620758057, |
|
"learning_rate": 2.320675105485232e-05, |
|
"loss": 0.0059, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 21.58867073059082, |
|
"learning_rate": 2.29957805907173e-05, |
|
"loss": 0.5102, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 1.1995337009429932, |
|
"learning_rate": 2.278481012658228e-05, |
|
"loss": 0.0088, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 8.670762062072754, |
|
"learning_rate": 2.2573839662447257e-05, |
|
"loss": 0.0449, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 43.51858139038086, |
|
"learning_rate": 2.2362869198312237e-05, |
|
"loss": 0.7478, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 21.07337760925293, |
|
"learning_rate": 2.2151898734177217e-05, |
|
"loss": 0.4485, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.5225343704223633, |
|
"learning_rate": 2.1940928270042196e-05, |
|
"loss": 0.0063, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.6471118330955505, |
|
"learning_rate": 2.1729957805907176e-05, |
|
"loss": 0.0052, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 5.603309154510498, |
|
"learning_rate": 2.1518987341772153e-05, |
|
"loss": 0.0676, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 18.191370010375977, |
|
"learning_rate": 2.1308016877637132e-05, |
|
"loss": 0.1428, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 15.37728214263916, |
|
"learning_rate": 2.1097046413502112e-05, |
|
"loss": 0.4738, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.964952290058136, |
|
"learning_rate": 2.088607594936709e-05, |
|
"loss": 0.0135, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 20.822654724121094, |
|
"learning_rate": 2.067510548523207e-05, |
|
"loss": 0.359, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 15.292733192443848, |
|
"learning_rate": 2.0464135021097048e-05, |
|
"loss": 0.3184, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 1.5272413492202759, |
|
"learning_rate": 2.0253164556962025e-05, |
|
"loss": 0.0186, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 17.016498565673828, |
|
"learning_rate": 2.0042194092827004e-05, |
|
"loss": 0.2154, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 1.4779866933822632, |
|
"learning_rate": 1.9831223628691984e-05, |
|
"loss": 0.0165, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.8727405071258545, |
|
"learning_rate": 1.962025316455696e-05, |
|
"loss": 0.0102, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 13.600626945495605, |
|
"learning_rate": 1.940928270042194e-05, |
|
"loss": 0.1604, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.77298504114151, |
|
"learning_rate": 1.919831223628692e-05, |
|
"loss": 0.0094, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 8.220309257507324, |
|
"learning_rate": 1.89873417721519e-05, |
|
"loss": 0.0887, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 15.369383811950684, |
|
"learning_rate": 1.877637130801688e-05, |
|
"loss": 0.3962, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 7.943267822265625, |
|
"learning_rate": 1.856540084388186e-05, |
|
"loss": 0.0342, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.961038961038961, |
|
"eval_f1_macro": 0.9603007518796992, |
|
"eval_f1_micro": 0.961038961038961, |
|
"eval_f1_weighted": 0.9603007518796992, |
|
"eval_loss": 0.05716231092810631, |
|
"eval_precision_macro": 0.9693877551020408, |
|
"eval_precision_micro": 0.961038961038961, |
|
"eval_precision_weighted": 0.9693877551020408, |
|
"eval_recall_macro": 0.961038961038961, |
|
"eval_recall_micro": 0.961038961038961, |
|
"eval_recall_weighted": 0.961038961038961, |
|
"eval_runtime": 23.4765, |
|
"eval_samples_per_second": 3.28, |
|
"eval_steps_per_second": 0.213, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.05514498054981232, |
|
"learning_rate": 1.8354430379746836e-05, |
|
"loss": 0.0014, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 2.1552014350891113, |
|
"learning_rate": 1.8143459915611816e-05, |
|
"loss": 0.0202, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.025016020983457565, |
|
"learning_rate": 1.7932489451476795e-05, |
|
"loss": 0.0007, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 15.627620697021484, |
|
"learning_rate": 1.7721518987341772e-05, |
|
"loss": 0.1231, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.46118250489234924, |
|
"learning_rate": 1.751054852320675e-05, |
|
"loss": 0.0063, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 15.023947715759277, |
|
"learning_rate": 1.729957805907173e-05, |
|
"loss": 0.1147, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 11.93247127532959, |
|
"learning_rate": 1.7088607594936708e-05, |
|
"loss": 0.0811, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 2.5019609928131104, |
|
"learning_rate": 1.6877637130801688e-05, |
|
"loss": 0.0119, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.09159235656261444, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.0014, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 26.242231369018555, |
|
"learning_rate": 1.6455696202531644e-05, |
|
"loss": 0.236, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 2.3096747398376465, |
|
"learning_rate": 1.6244725738396624e-05, |
|
"loss": 0.0093, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 0.5707495212554932, |
|
"learning_rate": 1.6033755274261607e-05, |
|
"loss": 0.0059, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 6.667356014251709, |
|
"learning_rate": 1.5822784810126583e-05, |
|
"loss": 0.0563, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.10668758302927017, |
|
"learning_rate": 1.5611814345991563e-05, |
|
"loss": 0.0023, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 4.116157531738281, |
|
"learning_rate": 1.5400843881856543e-05, |
|
"loss": 0.0335, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 26.253101348876953, |
|
"learning_rate": 1.5189873417721521e-05, |
|
"loss": 0.1706, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 1.0219850540161133, |
|
"learning_rate": 1.4978902953586499e-05, |
|
"loss": 0.0066, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 41.47359848022461, |
|
"learning_rate": 1.4767932489451477e-05, |
|
"loss": 0.6927, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 9.33047866821289, |
|
"learning_rate": 1.4556962025316457e-05, |
|
"loss": 0.0786, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.5574650168418884, |
|
"learning_rate": 1.4345991561181435e-05, |
|
"loss": 0.0057, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.4510033428668976, |
|
"learning_rate": 1.4135021097046413e-05, |
|
"loss": 0.0043, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.07699360698461533, |
|
"learning_rate": 1.3924050632911393e-05, |
|
"loss": 0.0015, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 12.339035034179688, |
|
"learning_rate": 1.3713080168776371e-05, |
|
"loss": 0.1513, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 29.081424713134766, |
|
"learning_rate": 1.3502109704641349e-05, |
|
"loss": 0.2272, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 1.5039241313934326, |
|
"learning_rate": 1.3291139240506329e-05, |
|
"loss": 0.0113, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 1.7569365501403809, |
|
"learning_rate": 1.308016877637131e-05, |
|
"loss": 0.0143, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 4.633851051330566, |
|
"learning_rate": 1.2869198312236289e-05, |
|
"loss": 0.0273, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 9.019872665405273, |
|
"learning_rate": 1.2658227848101267e-05, |
|
"loss": 0.1055, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.14426438510417938, |
|
"learning_rate": 1.2447257383966246e-05, |
|
"loss": 0.0023, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.1260414719581604, |
|
"learning_rate": 1.2236286919831224e-05, |
|
"loss": 0.0023, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.14926879107952118, |
|
"learning_rate": 1.2025316455696203e-05, |
|
"loss": 0.0018, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.3730136752128601, |
|
"learning_rate": 1.1814345991561182e-05, |
|
"loss": 0.0058, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 4.101214408874512, |
|
"learning_rate": 1.160337552742616e-05, |
|
"loss": 0.0279, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 2.671245574951172, |
|
"learning_rate": 1.139240506329114e-05, |
|
"loss": 0.023, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 6.172547817230225, |
|
"learning_rate": 1.1181434599156118e-05, |
|
"loss": 0.0333, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 18.09273910522461, |
|
"learning_rate": 1.0970464135021098e-05, |
|
"loss": 0.3601, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 9.900489807128906, |
|
"learning_rate": 1.0759493670886076e-05, |
|
"loss": 0.5438, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 27.930665969848633, |
|
"learning_rate": 1.0548523206751056e-05, |
|
"loss": 0.2069, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.493826687335968, |
|
"learning_rate": 1.0337552742616034e-05, |
|
"loss": 0.0039, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.08800631016492844, |
|
"learning_rate": 1.0126582278481012e-05, |
|
"loss": 0.0015, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 1.6704317331314087, |
|
"learning_rate": 9.915611814345992e-06, |
|
"loss": 0.0121, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.060208290815353394, |
|
"learning_rate": 9.70464135021097e-06, |
|
"loss": 0.001, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.42390766739845276, |
|
"learning_rate": 9.49367088607595e-06, |
|
"loss": 0.0043, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 5.121891021728516, |
|
"learning_rate": 9.28270042194093e-06, |
|
"loss": 0.0373, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.2980020344257355, |
|
"learning_rate": 9.071729957805908e-06, |
|
"loss": 0.0019, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 1.821443796157837, |
|
"learning_rate": 8.860759493670886e-06, |
|
"loss": 0.0096, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.2338918149471283, |
|
"learning_rate": 8.649789029535866e-06, |
|
"loss": 0.0033, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.03316354379057884, |
|
"learning_rate": 8.438818565400844e-06, |
|
"loss": 0.001, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 44.416473388671875, |
|
"learning_rate": 8.227848101265822e-06, |
|
"loss": 0.2124, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.2957440912723541, |
|
"learning_rate": 8.016877637130803e-06, |
|
"loss": 0.0041, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 18.5585994720459, |
|
"learning_rate": 7.805907172995782e-06, |
|
"loss": 0.0596, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 5.535640716552734, |
|
"learning_rate": 7.5949367088607605e-06, |
|
"loss": 0.0476, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 38.173789978027344, |
|
"learning_rate": 7.3839662447257386e-06, |
|
"loss": 0.6888, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 18.576967239379883, |
|
"learning_rate": 7.1729957805907175e-06, |
|
"loss": 0.3636, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 3.596296548843384, |
|
"learning_rate": 6.9620253164556965e-06, |
|
"loss": 0.0224, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.09587596356868744, |
|
"learning_rate": 6.7510548523206746e-06, |
|
"loss": 0.0022, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.15251006186008453, |
|
"learning_rate": 6.540084388185655e-06, |
|
"loss": 0.0026, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.9377198219299316, |
|
"learning_rate": 6.329113924050633e-06, |
|
"loss": 0.0081, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 11.142963409423828, |
|
"learning_rate": 6.118143459915612e-06, |
|
"loss": 0.1642, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 31.128833770751953, |
|
"learning_rate": 5.907172995780591e-06, |
|
"loss": 0.3947, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 0.6137996315956116, |
|
"learning_rate": 5.69620253164557e-06, |
|
"loss": 0.0033, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.1146240383386612, |
|
"learning_rate": 5.485232067510549e-06, |
|
"loss": 0.0018, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 14.48742389678955, |
|
"learning_rate": 5.274261603375528e-06, |
|
"loss": 0.2166, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 1.7509044408798218, |
|
"learning_rate": 5.063291139240506e-06, |
|
"loss": 0.0174, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 0.0605483315885067, |
|
"learning_rate": 4.852320675105485e-06, |
|
"loss": 0.001, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.1251741498708725, |
|
"learning_rate": 4.641350210970465e-06, |
|
"loss": 0.0018, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.15386824309825897, |
|
"learning_rate": 4.430379746835443e-06, |
|
"loss": 0.0025, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 6.340598106384277, |
|
"learning_rate": 4.219409282700422e-06, |
|
"loss": 0.0342, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.18014036118984222, |
|
"learning_rate": 4.008438818565402e-06, |
|
"loss": 0.0022, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 17.62770652770996, |
|
"learning_rate": 3.7974683544303802e-06, |
|
"loss": 0.1143, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.17265360057353973, |
|
"learning_rate": 3.5864978902953588e-06, |
|
"loss": 0.0026, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 6.071741580963135, |
|
"learning_rate": 3.3755274261603373e-06, |
|
"loss": 0.0373, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 1.1394612789154053, |
|
"learning_rate": 3.1645569620253167e-06, |
|
"loss": 0.0098, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.09031672030687332, |
|
"learning_rate": 2.9535864978902956e-06, |
|
"loss": 0.0017, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 23.36667251586914, |
|
"learning_rate": 2.7426160337552745e-06, |
|
"loss": 0.7473, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 0.17166578769683838, |
|
"learning_rate": 2.531645569620253e-06, |
|
"loss": 0.0015, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.7906249165534973, |
|
"learning_rate": 2.3206751054852324e-06, |
|
"loss": 0.0071, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 13.26647663116455, |
|
"learning_rate": 2.109704641350211e-06, |
|
"loss": 0.1543, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 0.22246617078781128, |
|
"learning_rate": 1.8987341772151901e-06, |
|
"loss": 0.002, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 5.0251784324646, |
|
"learning_rate": 1.6877637130801686e-06, |
|
"loss": 0.0597, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.6754881143569946, |
|
"learning_rate": 1.4767932489451478e-06, |
|
"loss": 0.0062, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 0.03976951539516449, |
|
"learning_rate": 1.2658227848101265e-06, |
|
"loss": 0.0009, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 0.01055199932307005, |
|
"learning_rate": 1.0548523206751055e-06, |
|
"loss": 0.0003, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 20.83010482788086, |
|
"learning_rate": 8.438818565400843e-07, |
|
"loss": 0.371, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 0.4786561131477356, |
|
"learning_rate": 6.329113924050633e-07, |
|
"loss": 0.0055, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 11.4153413772583, |
|
"learning_rate": 4.2194092827004216e-07, |
|
"loss": 0.0949, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 0.1931201070547104, |
|
"learning_rate": 2.1097046413502108e-07, |
|
"loss": 0.0028, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 6.672695636749268, |
|
"learning_rate": 0.0, |
|
"loss": 0.0624, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 1.0, |
|
"eval_f1_macro": 1.0, |
|
"eval_f1_micro": 1.0, |
|
"eval_f1_weighted": 1.0, |
|
"eval_loss": 0.011418873444199562, |
|
"eval_precision_macro": 1.0, |
|
"eval_precision_micro": 1.0, |
|
"eval_precision_weighted": 1.0, |
|
"eval_recall_macro": 1.0, |
|
"eval_recall_micro": 1.0, |
|
"eval_recall_weighted": 1.0, |
|
"eval_runtime": 23.0544, |
|
"eval_samples_per_second": 3.34, |
|
"eval_steps_per_second": 0.217, |
|
"step": 264 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 264, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 1.626709167691776e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|