|
{ |
|
"best_metric": 0.8051581382751465, |
|
"best_model_checkpoint": "wav2vec2-vocal-disable-classification/checkpoint-513", |
|
"epoch": 20.0, |
|
"eval_steps": 1, |
|
"global_step": 1140, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.017543859649122806, |
|
"grad_norm": 1.56613290309906, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 1.6056, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.03508771929824561, |
|
"grad_norm": 1.8094905614852905, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.6101, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05263157894736842, |
|
"grad_norm": 1.138840913772583, |
|
"learning_rate": 1.8e-06, |
|
"loss": 1.609, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.07017543859649122, |
|
"grad_norm": 3.695634603500366, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.6097, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.08771929824561403, |
|
"grad_norm": 1.9893696308135986, |
|
"learning_rate": 3e-06, |
|
"loss": 1.6028, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.10526315789473684, |
|
"grad_norm": 2.095263957977295, |
|
"learning_rate": 3.6e-06, |
|
"loss": 1.6016, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.12280701754385964, |
|
"grad_norm": 1.7659621238708496, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 1.5994, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.14035087719298245, |
|
"grad_norm": 2.041477680206299, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.5951, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.15789473684210525, |
|
"grad_norm": 1.6106765270233154, |
|
"learning_rate": 5.4e-06, |
|
"loss": 1.5957, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.17543859649122806, |
|
"grad_norm": 1.5744407176971436, |
|
"learning_rate": 6e-06, |
|
"loss": 1.5951, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.19298245614035087, |
|
"grad_norm": 1.0527958869934082, |
|
"learning_rate": 6.6e-06, |
|
"loss": 1.5994, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.21052631578947367, |
|
"grad_norm": 1.5314522981643677, |
|
"learning_rate": 7.2e-06, |
|
"loss": 1.5766, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.22807017543859648, |
|
"grad_norm": 1.0843933820724487, |
|
"learning_rate": 7.8e-06, |
|
"loss": 1.5976, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.24561403508771928, |
|
"grad_norm": 1.8420531749725342, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 1.5803, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.2631578947368421, |
|
"grad_norm": 2.5668458938598633, |
|
"learning_rate": 9e-06, |
|
"loss": 1.5839, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.2807017543859649, |
|
"grad_norm": 1.5706558227539062, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 1.5522, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.2982456140350877, |
|
"grad_norm": 1.722334384918213, |
|
"learning_rate": 1.02e-05, |
|
"loss": 1.5391, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.3157894736842105, |
|
"grad_norm": 1.317153811454773, |
|
"learning_rate": 1.08e-05, |
|
"loss": 1.6246, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 1.3377695083618164, |
|
"learning_rate": 1.1400000000000001e-05, |
|
"loss": 1.6166, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.3508771929824561, |
|
"grad_norm": 3.216266632080078, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.5151, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.3684210526315789, |
|
"grad_norm": 1.3238260746002197, |
|
"learning_rate": 1.26e-05, |
|
"loss": 1.5195, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.38596491228070173, |
|
"grad_norm": 2.946779251098633, |
|
"learning_rate": 1.32e-05, |
|
"loss": 1.503, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.40350877192982454, |
|
"grad_norm": 1.427848219871521, |
|
"learning_rate": 1.3800000000000002e-05, |
|
"loss": 1.5538, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.42105263157894735, |
|
"grad_norm": 3.5313634872436523, |
|
"learning_rate": 1.44e-05, |
|
"loss": 1.545, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.43859649122807015, |
|
"grad_norm": 1.3384391069412231, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.5406, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.45614035087719296, |
|
"grad_norm": 1.635528802871704, |
|
"learning_rate": 1.56e-05, |
|
"loss": 1.4864, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.47368421052631576, |
|
"grad_norm": 1.3669475317001343, |
|
"learning_rate": 1.62e-05, |
|
"loss": 1.5609, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.49122807017543857, |
|
"grad_norm": 1.532422661781311, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 1.4829, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.5087719298245614, |
|
"grad_norm": 14.834467887878418, |
|
"learning_rate": 1.74e-05, |
|
"loss": 1.4049, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.5263157894736842, |
|
"grad_norm": 1.5839215517044067, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.4429, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.543859649122807, |
|
"grad_norm": 8.556641578674316, |
|
"learning_rate": 1.86e-05, |
|
"loss": 1.5076, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.5614035087719298, |
|
"grad_norm": 2.6420958042144775, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 1.6325, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.5789473684210527, |
|
"grad_norm": 1.5453814268112183, |
|
"learning_rate": 1.98e-05, |
|
"loss": 1.5331, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.5964912280701754, |
|
"grad_norm": 2.1342880725860596, |
|
"learning_rate": 2.04e-05, |
|
"loss": 1.5563, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.6140350877192983, |
|
"grad_norm": 7.298281669616699, |
|
"learning_rate": 2.1e-05, |
|
"loss": 1.3024, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.631578947368421, |
|
"grad_norm": 2.423535108566284, |
|
"learning_rate": 2.16e-05, |
|
"loss": 1.5851, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.6491228070175439, |
|
"grad_norm": 2.8099899291992188, |
|
"learning_rate": 2.22e-05, |
|
"loss": 1.664, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 3.804816961288452, |
|
"learning_rate": 2.2800000000000002e-05, |
|
"loss": 1.4642, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.6842105263157895, |
|
"grad_norm": 5.346778392791748, |
|
"learning_rate": 2.3400000000000003e-05, |
|
"loss": 1.5016, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.7017543859649122, |
|
"grad_norm": 5.4332733154296875, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.4937, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.7192982456140351, |
|
"grad_norm": 3.2267627716064453, |
|
"learning_rate": 2.4599999999999998e-05, |
|
"loss": 1.4857, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.7368421052631579, |
|
"grad_norm": 9.248668670654297, |
|
"learning_rate": 2.52e-05, |
|
"loss": 1.4574, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.7543859649122807, |
|
"grad_norm": 2.983009099960327, |
|
"learning_rate": 2.58e-05, |
|
"loss": 1.4802, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.7719298245614035, |
|
"grad_norm": 9.322035789489746, |
|
"learning_rate": 2.64e-05, |
|
"loss": 1.3646, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.7894736842105263, |
|
"grad_norm": 3.1577627658843994, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 1.3842, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.8070175438596491, |
|
"grad_norm": 3.0866541862487793, |
|
"learning_rate": 2.7600000000000003e-05, |
|
"loss": 1.478, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.8245614035087719, |
|
"grad_norm": 2.434828281402588, |
|
"learning_rate": 2.8199999999999998e-05, |
|
"loss": 1.2678, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.8421052631578947, |
|
"grad_norm": 19.617595672607422, |
|
"learning_rate": 2.88e-05, |
|
"loss": 1.5149, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.8596491228070176, |
|
"grad_norm": 3.927741527557373, |
|
"learning_rate": 2.94e-05, |
|
"loss": 1.3689, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.8771929824561403, |
|
"grad_norm": 4.532490253448486, |
|
"learning_rate": 3e-05, |
|
"loss": 1.5975, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.8947368421052632, |
|
"grad_norm": 4.9621500968933105, |
|
"learning_rate": 2.9972477064220187e-05, |
|
"loss": 1.4361, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.9122807017543859, |
|
"grad_norm": 6.061293125152588, |
|
"learning_rate": 2.994495412844037e-05, |
|
"loss": 1.4083, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.9298245614035088, |
|
"grad_norm": 4.741385459899902, |
|
"learning_rate": 2.991743119266055e-05, |
|
"loss": 1.4497, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.9473684210526315, |
|
"grad_norm": 3.2169480323791504, |
|
"learning_rate": 2.9889908256880734e-05, |
|
"loss": 1.4465, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.9649122807017544, |
|
"grad_norm": 7.039127349853516, |
|
"learning_rate": 2.986238532110092e-05, |
|
"loss": 1.2431, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.9824561403508771, |
|
"grad_norm": 6.163113594055176, |
|
"learning_rate": 2.9834862385321102e-05, |
|
"loss": 1.2167, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 3.4295806884765625, |
|
"learning_rate": 2.9807339449541285e-05, |
|
"loss": 1.3198, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.4482758620689655, |
|
"eval_loss": 1.3703213930130005, |
|
"eval_roc_auc": 0.7240004822764974, |
|
"eval_runtime": 9.1707, |
|
"eval_samples_per_second": 28.46, |
|
"eval_steps_per_second": 1.854, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.0175438596491229, |
|
"grad_norm": 2.1605288982391357, |
|
"learning_rate": 2.9779816513761467e-05, |
|
"loss": 1.3161, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.0350877192982457, |
|
"grad_norm": 2.469620943069458, |
|
"learning_rate": 2.9752293577981653e-05, |
|
"loss": 1.4468, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.0526315789473684, |
|
"grad_norm": 5.3216376304626465, |
|
"learning_rate": 2.9724770642201836e-05, |
|
"loss": 1.3517, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.0701754385964912, |
|
"grad_norm": 5.191178798675537, |
|
"learning_rate": 2.9697247706422018e-05, |
|
"loss": 1.3841, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.087719298245614, |
|
"grad_norm": 2.3336193561553955, |
|
"learning_rate": 2.9669724770642204e-05, |
|
"loss": 1.3057, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.1052631578947367, |
|
"grad_norm": 42.809783935546875, |
|
"learning_rate": 2.9642201834862386e-05, |
|
"loss": 1.2346, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.1228070175438596, |
|
"grad_norm": 6.859128952026367, |
|
"learning_rate": 2.9614678899082572e-05, |
|
"loss": 1.4394, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.1403508771929824, |
|
"grad_norm": 9.006386756896973, |
|
"learning_rate": 2.958715596330275e-05, |
|
"loss": 1.2954, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.1578947368421053, |
|
"grad_norm": 2.543628454208374, |
|
"learning_rate": 2.9559633027522937e-05, |
|
"loss": 1.0557, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.1754385964912282, |
|
"grad_norm": 2.736093044281006, |
|
"learning_rate": 2.953211009174312e-05, |
|
"loss": 1.2225, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.1929824561403508, |
|
"grad_norm": 4.434782028198242, |
|
"learning_rate": 2.9504587155963306e-05, |
|
"loss": 1.1875, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.2105263157894737, |
|
"grad_norm": 8.687559127807617, |
|
"learning_rate": 2.9477064220183485e-05, |
|
"loss": 0.937, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.2280701754385965, |
|
"grad_norm": 3.4958977699279785, |
|
"learning_rate": 2.944954128440367e-05, |
|
"loss": 1.2851, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.2456140350877192, |
|
"grad_norm": 12.490559577941895, |
|
"learning_rate": 2.9422018348623853e-05, |
|
"loss": 1.5578, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.263157894736842, |
|
"grad_norm": 3.466176748275757, |
|
"learning_rate": 2.939449541284404e-05, |
|
"loss": 0.9698, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.280701754385965, |
|
"grad_norm": 9.730321884155273, |
|
"learning_rate": 2.936697247706422e-05, |
|
"loss": 1.2044, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.2982456140350878, |
|
"grad_norm": 12.908202171325684, |
|
"learning_rate": 2.9339449541284404e-05, |
|
"loss": 1.3778, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.3157894736842106, |
|
"grad_norm": 7.409069538116455, |
|
"learning_rate": 2.931192660550459e-05, |
|
"loss": 1.6065, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 12.185145378112793, |
|
"learning_rate": 2.9284403669724772e-05, |
|
"loss": 1.2463, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.3508771929824561, |
|
"grad_norm": 16.505674362182617, |
|
"learning_rate": 2.9256880733944955e-05, |
|
"loss": 1.0027, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.368421052631579, |
|
"grad_norm": 9.078436851501465, |
|
"learning_rate": 2.9229357798165137e-05, |
|
"loss": 1.2037, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.3859649122807016, |
|
"grad_norm": 23.13191032409668, |
|
"learning_rate": 2.9201834862385323e-05, |
|
"loss": 1.0528, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.4035087719298245, |
|
"grad_norm": 7.651644229888916, |
|
"learning_rate": 2.9174311926605506e-05, |
|
"loss": 1.0373, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.4210526315789473, |
|
"grad_norm": 15.649460792541504, |
|
"learning_rate": 2.9146788990825688e-05, |
|
"loss": 1.3094, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.4385964912280702, |
|
"grad_norm": 14.335933685302734, |
|
"learning_rate": 2.9119266055045874e-05, |
|
"loss": 1.229, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.456140350877193, |
|
"grad_norm": 13.704193115234375, |
|
"learning_rate": 2.9091743119266056e-05, |
|
"loss": 1.2517, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.4736842105263157, |
|
"grad_norm": 6.872323036193848, |
|
"learning_rate": 2.9064220183486242e-05, |
|
"loss": 1.0614, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.4912280701754386, |
|
"grad_norm": 58.63323211669922, |
|
"learning_rate": 2.903669724770642e-05, |
|
"loss": 1.2651, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.5087719298245614, |
|
"grad_norm": 16.493391036987305, |
|
"learning_rate": 2.9009174311926607e-05, |
|
"loss": 1.0977, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.526315789473684, |
|
"grad_norm": 76.30412292480469, |
|
"learning_rate": 2.898165137614679e-05, |
|
"loss": 1.3534, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.543859649122807, |
|
"grad_norm": 35.07970428466797, |
|
"learning_rate": 2.8954128440366975e-05, |
|
"loss": 1.1015, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.5614035087719298, |
|
"grad_norm": 41.934513092041016, |
|
"learning_rate": 2.8926605504587155e-05, |
|
"loss": 1.252, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.5789473684210527, |
|
"grad_norm": 6.429330825805664, |
|
"learning_rate": 2.889908256880734e-05, |
|
"loss": 0.9942, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.5964912280701755, |
|
"grad_norm": 4.915737152099609, |
|
"learning_rate": 2.8871559633027523e-05, |
|
"loss": 1.0583, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.6140350877192984, |
|
"grad_norm": 5.171088695526123, |
|
"learning_rate": 2.884403669724771e-05, |
|
"loss": 0.9167, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.631578947368421, |
|
"grad_norm": 4.840869426727295, |
|
"learning_rate": 2.881651376146789e-05, |
|
"loss": 1.3165, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.6491228070175439, |
|
"grad_norm": 8.278257369995117, |
|
"learning_rate": 2.8788990825688074e-05, |
|
"loss": 1.0294, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 7.380605697631836, |
|
"learning_rate": 2.876146788990826e-05, |
|
"loss": 1.16, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.6842105263157894, |
|
"grad_norm": 27.351318359375, |
|
"learning_rate": 2.8733944954128442e-05, |
|
"loss": 1.6427, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.7017543859649122, |
|
"grad_norm": 5.766673564910889, |
|
"learning_rate": 2.8706422018348625e-05, |
|
"loss": 1.1461, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.719298245614035, |
|
"grad_norm": 6.586304664611816, |
|
"learning_rate": 2.8678899082568807e-05, |
|
"loss": 1.1368, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.736842105263158, |
|
"grad_norm": 13.888910293579102, |
|
"learning_rate": 2.8651376146788993e-05, |
|
"loss": 0.8426, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.7543859649122808, |
|
"grad_norm": 14.992203712463379, |
|
"learning_rate": 2.8623853211009175e-05, |
|
"loss": 1.1494, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.7719298245614035, |
|
"grad_norm": 8.557571411132812, |
|
"learning_rate": 2.8596330275229358e-05, |
|
"loss": 1.1022, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.7894736842105263, |
|
"grad_norm": 19.743364334106445, |
|
"learning_rate": 2.856880733944954e-05, |
|
"loss": 1.0737, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.807017543859649, |
|
"grad_norm": 13.77326488494873, |
|
"learning_rate": 2.8541284403669726e-05, |
|
"loss": 0.8547, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.8245614035087718, |
|
"grad_norm": 4.906980514526367, |
|
"learning_rate": 2.8513761467889912e-05, |
|
"loss": 0.8529, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.8421052631578947, |
|
"grad_norm": 2.8242599964141846, |
|
"learning_rate": 2.848623853211009e-05, |
|
"loss": 1.2058, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.8596491228070176, |
|
"grad_norm": 3.802095890045166, |
|
"learning_rate": 2.8458715596330277e-05, |
|
"loss": 0.9236, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.8771929824561404, |
|
"grad_norm": 4.197514057159424, |
|
"learning_rate": 2.843119266055046e-05, |
|
"loss": 0.8856, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.8947368421052633, |
|
"grad_norm": 13.207415580749512, |
|
"learning_rate": 2.8403669724770645e-05, |
|
"loss": 0.769, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.912280701754386, |
|
"grad_norm": 4.843390941619873, |
|
"learning_rate": 2.8376146788990824e-05, |
|
"loss": 1.1706, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.9298245614035088, |
|
"grad_norm": 13.065381050109863, |
|
"learning_rate": 2.834862385321101e-05, |
|
"loss": 1.0655, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.9473684210526314, |
|
"grad_norm": 15.791166305541992, |
|
"learning_rate": 2.8321100917431193e-05, |
|
"loss": 1.2378, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.9649122807017543, |
|
"grad_norm": 11.551010131835938, |
|
"learning_rate": 2.829357798165138e-05, |
|
"loss": 0.9768, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.9824561403508771, |
|
"grad_norm": 14.838546752929688, |
|
"learning_rate": 2.826605504587156e-05, |
|
"loss": 1.0073, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 2.385023355484009, |
|
"learning_rate": 2.8238532110091744e-05, |
|
"loss": 0.9486, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.6091954022988506, |
|
"eval_loss": 1.0561790466308594, |
|
"eval_roc_auc": 0.8297140297597192, |
|
"eval_runtime": 10.2393, |
|
"eval_samples_per_second": 25.49, |
|
"eval_steps_per_second": 1.66, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 2.017543859649123, |
|
"grad_norm": 13.100618362426758, |
|
"learning_rate": 2.821100917431193e-05, |
|
"loss": 0.9394, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 2.0350877192982457, |
|
"grad_norm": 22.331451416015625, |
|
"learning_rate": 2.8183486238532112e-05, |
|
"loss": 1.0603, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 2.0526315789473686, |
|
"grad_norm": 4.956656455993652, |
|
"learning_rate": 2.8155963302752294e-05, |
|
"loss": 0.9566, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 2.0701754385964914, |
|
"grad_norm": 19.40747833251953, |
|
"learning_rate": 2.8128440366972477e-05, |
|
"loss": 1.1504, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 2.087719298245614, |
|
"grad_norm": 13.512128829956055, |
|
"learning_rate": 2.8100917431192663e-05, |
|
"loss": 1.194, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 2.1052631578947367, |
|
"grad_norm": 6.616446495056152, |
|
"learning_rate": 2.8073394495412845e-05, |
|
"loss": 1.0321, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.1228070175438596, |
|
"grad_norm": 2.3998453617095947, |
|
"learning_rate": 2.8045871559633028e-05, |
|
"loss": 1.0636, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 2.1403508771929824, |
|
"grad_norm": 3.214811086654663, |
|
"learning_rate": 2.801834862385321e-05, |
|
"loss": 0.7859, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 2.1578947368421053, |
|
"grad_norm": 2.0244710445404053, |
|
"learning_rate": 2.7990825688073396e-05, |
|
"loss": 0.8801, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 2.175438596491228, |
|
"grad_norm": 3.1682400703430176, |
|
"learning_rate": 2.7963302752293582e-05, |
|
"loss": 0.9413, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 2.192982456140351, |
|
"grad_norm": 3.509514570236206, |
|
"learning_rate": 2.793577981651376e-05, |
|
"loss": 0.8083, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.2105263157894735, |
|
"grad_norm": 6.175449848175049, |
|
"learning_rate": 2.7908256880733947e-05, |
|
"loss": 1.0884, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 2.2280701754385963, |
|
"grad_norm": 4.469012260437012, |
|
"learning_rate": 2.788073394495413e-05, |
|
"loss": 0.8281, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 2.245614035087719, |
|
"grad_norm": 18.924325942993164, |
|
"learning_rate": 2.7853211009174315e-05, |
|
"loss": 0.9676, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 2.263157894736842, |
|
"grad_norm": 24.372833251953125, |
|
"learning_rate": 2.7825688073394494e-05, |
|
"loss": 1.1955, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 2.280701754385965, |
|
"grad_norm": 1.3634321689605713, |
|
"learning_rate": 2.779816513761468e-05, |
|
"loss": 0.9889, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.2982456140350878, |
|
"grad_norm": 17.57159996032715, |
|
"learning_rate": 2.7770642201834863e-05, |
|
"loss": 1.1324, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 2.3157894736842106, |
|
"grad_norm": 16.63871192932129, |
|
"learning_rate": 2.774311926605505e-05, |
|
"loss": 1.1951, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 2.3333333333333335, |
|
"grad_norm": 16.466339111328125, |
|
"learning_rate": 2.7715596330275228e-05, |
|
"loss": 0.9375, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 2.3508771929824563, |
|
"grad_norm": 13.808109283447266, |
|
"learning_rate": 2.7688073394495413e-05, |
|
"loss": 1.0418, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 2.3684210526315788, |
|
"grad_norm": 5.8455729484558105, |
|
"learning_rate": 2.76605504587156e-05, |
|
"loss": 1.0906, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.3859649122807016, |
|
"grad_norm": 18.494245529174805, |
|
"learning_rate": 2.7633027522935782e-05, |
|
"loss": 0.9786, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 2.4035087719298245, |
|
"grad_norm": 21.47800636291504, |
|
"learning_rate": 2.7605504587155964e-05, |
|
"loss": 0.8138, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 2.4210526315789473, |
|
"grad_norm": 8.400023460388184, |
|
"learning_rate": 2.7577981651376147e-05, |
|
"loss": 1.027, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 2.43859649122807, |
|
"grad_norm": 7.198583602905273, |
|
"learning_rate": 2.7550458715596333e-05, |
|
"loss": 0.7104, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 2.456140350877193, |
|
"grad_norm": 23.315410614013672, |
|
"learning_rate": 2.7522935779816515e-05, |
|
"loss": 1.2115, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.473684210526316, |
|
"grad_norm": 18.734041213989258, |
|
"learning_rate": 2.7495412844036697e-05, |
|
"loss": 0.9739, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 2.4912280701754383, |
|
"grad_norm": 10.015871047973633, |
|
"learning_rate": 2.746788990825688e-05, |
|
"loss": 0.9796, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 2.5087719298245617, |
|
"grad_norm": 11.979524612426758, |
|
"learning_rate": 2.7440366972477066e-05, |
|
"loss": 0.8025, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 2.526315789473684, |
|
"grad_norm": 9.438429832458496, |
|
"learning_rate": 2.741284403669725e-05, |
|
"loss": 0.8838, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 2.543859649122807, |
|
"grad_norm": 43.801063537597656, |
|
"learning_rate": 2.738532110091743e-05, |
|
"loss": 0.9144, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.56140350877193, |
|
"grad_norm": 19.495887756347656, |
|
"learning_rate": 2.7357798165137617e-05, |
|
"loss": 0.8573, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 2.5789473684210527, |
|
"grad_norm": 21.922138214111328, |
|
"learning_rate": 2.73302752293578e-05, |
|
"loss": 0.9611, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 2.5964912280701755, |
|
"grad_norm": 16.30131721496582, |
|
"learning_rate": 2.7302752293577985e-05, |
|
"loss": 1.3519, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 2.6140350877192984, |
|
"grad_norm": 43.10526657104492, |
|
"learning_rate": 2.7275229357798164e-05, |
|
"loss": 1.0194, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 2.6315789473684212, |
|
"grad_norm": 18.148771286010742, |
|
"learning_rate": 2.724770642201835e-05, |
|
"loss": 1.0759, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.6491228070175437, |
|
"grad_norm": 28.365219116210938, |
|
"learning_rate": 2.7220183486238532e-05, |
|
"loss": 0.9998, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 10.812620162963867, |
|
"learning_rate": 2.7192660550458718e-05, |
|
"loss": 0.9368, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 2.6842105263157894, |
|
"grad_norm": 32.16633605957031, |
|
"learning_rate": 2.7165137614678897e-05, |
|
"loss": 1.0304, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.7017543859649122, |
|
"grad_norm": 63.48967742919922, |
|
"learning_rate": 2.7137614678899083e-05, |
|
"loss": 1.0795, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.719298245614035, |
|
"grad_norm": 4.874389171600342, |
|
"learning_rate": 2.711009174311927e-05, |
|
"loss": 1.062, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.736842105263158, |
|
"grad_norm": 18.235118865966797, |
|
"learning_rate": 2.708256880733945e-05, |
|
"loss": 1.3184, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.754385964912281, |
|
"grad_norm": 22.853351593017578, |
|
"learning_rate": 2.7055045871559634e-05, |
|
"loss": 1.0108, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.7719298245614032, |
|
"grad_norm": 50.805885314941406, |
|
"learning_rate": 2.7027522935779817e-05, |
|
"loss": 0.914, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.7894736842105265, |
|
"grad_norm": 5.718901634216309, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 0.7322, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.807017543859649, |
|
"grad_norm": 60.18952178955078, |
|
"learning_rate": 2.6972477064220185e-05, |
|
"loss": 1.088, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.824561403508772, |
|
"grad_norm": 5.959671974182129, |
|
"learning_rate": 2.6944954128440367e-05, |
|
"loss": 0.5275, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.8421052631578947, |
|
"grad_norm": 2.963286876678467, |
|
"learning_rate": 2.691743119266055e-05, |
|
"loss": 0.8271, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.8596491228070176, |
|
"grad_norm": 18.080677032470703, |
|
"learning_rate": 2.6889908256880736e-05, |
|
"loss": 0.7593, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.8771929824561404, |
|
"grad_norm": 3.0679006576538086, |
|
"learning_rate": 2.6862385321100918e-05, |
|
"loss": 0.582, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.8947368421052633, |
|
"grad_norm": 27.97750473022461, |
|
"learning_rate": 2.68348623853211e-05, |
|
"loss": 1.0477, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.912280701754386, |
|
"grad_norm": 9.784193992614746, |
|
"learning_rate": 2.6807339449541286e-05, |
|
"loss": 0.7083, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.9298245614035086, |
|
"grad_norm": 7.2379045486450195, |
|
"learning_rate": 2.677981651376147e-05, |
|
"loss": 0.6767, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.9473684210526314, |
|
"grad_norm": 17.974803924560547, |
|
"learning_rate": 2.6752293577981655e-05, |
|
"loss": 1.117, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.9649122807017543, |
|
"grad_norm": 8.451967239379883, |
|
"learning_rate": 2.6724770642201834e-05, |
|
"loss": 1.0163, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.982456140350877, |
|
"grad_norm": 16.834774017333984, |
|
"learning_rate": 2.669724770642202e-05, |
|
"loss": 0.642, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 10.743306159973145, |
|
"learning_rate": 2.6669724770642202e-05, |
|
"loss": 0.8485, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.5938697318007663, |
|
"eval_loss": 1.042966604232788, |
|
"eval_roc_auc": 0.8370073235789949, |
|
"eval_runtime": 9.8627, |
|
"eval_samples_per_second": 26.463, |
|
"eval_steps_per_second": 1.724, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 3.017543859649123, |
|
"grad_norm": 7.450799942016602, |
|
"learning_rate": 2.6642201834862388e-05, |
|
"loss": 0.7936, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 3.0350877192982457, |
|
"grad_norm": 13.938980102539062, |
|
"learning_rate": 2.6614678899082567e-05, |
|
"loss": 0.9981, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 3.0526315789473686, |
|
"grad_norm": 10.046002388000488, |
|
"learning_rate": 2.6587155963302753e-05, |
|
"loss": 0.9664, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 3.0701754385964914, |
|
"grad_norm": 2.47877836227417, |
|
"learning_rate": 2.6559633027522936e-05, |
|
"loss": 0.4564, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 3.087719298245614, |
|
"grad_norm": 4.682964324951172, |
|
"learning_rate": 2.653211009174312e-05, |
|
"loss": 0.7715, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 3.1052631578947367, |
|
"grad_norm": 31.68276023864746, |
|
"learning_rate": 2.6504587155963304e-05, |
|
"loss": 1.0283, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 3.1228070175438596, |
|
"grad_norm": 6.329732418060303, |
|
"learning_rate": 2.6477064220183486e-05, |
|
"loss": 0.6996, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 3.1403508771929824, |
|
"grad_norm": 14.793739318847656, |
|
"learning_rate": 2.6449541284403672e-05, |
|
"loss": 0.5858, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 3.1578947368421053, |
|
"grad_norm": 123.91106414794922, |
|
"learning_rate": 2.6422018348623855e-05, |
|
"loss": 0.8345, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 3.175438596491228, |
|
"grad_norm": 10.042440414428711, |
|
"learning_rate": 2.6394495412844037e-05, |
|
"loss": 0.8356, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 3.192982456140351, |
|
"grad_norm": 15.220785140991211, |
|
"learning_rate": 2.636697247706422e-05, |
|
"loss": 0.5512, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 3.2105263157894735, |
|
"grad_norm": 9.333229064941406, |
|
"learning_rate": 2.6339449541284406e-05, |
|
"loss": 0.6247, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 3.2280701754385963, |
|
"grad_norm": 13.827474594116211, |
|
"learning_rate": 2.6311926605504588e-05, |
|
"loss": 1.0353, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 3.245614035087719, |
|
"grad_norm": 4.803915500640869, |
|
"learning_rate": 2.628440366972477e-05, |
|
"loss": 0.8724, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 3.263157894736842, |
|
"grad_norm": 9.242655754089355, |
|
"learning_rate": 2.6256880733944953e-05, |
|
"loss": 0.7395, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 3.280701754385965, |
|
"grad_norm": 82.34434509277344, |
|
"learning_rate": 2.622935779816514e-05, |
|
"loss": 0.7797, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 3.2982456140350878, |
|
"grad_norm": 29.72227668762207, |
|
"learning_rate": 2.6201834862385325e-05, |
|
"loss": 0.9626, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 3.3157894736842106, |
|
"grad_norm": 30.35354232788086, |
|
"learning_rate": 2.6174311926605504e-05, |
|
"loss": 1.0387, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 3.3333333333333335, |
|
"grad_norm": 9.3469820022583, |
|
"learning_rate": 2.614678899082569e-05, |
|
"loss": 0.9555, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 3.3508771929824563, |
|
"grad_norm": 10.0740327835083, |
|
"learning_rate": 2.6119266055045872e-05, |
|
"loss": 0.7608, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 3.3684210526315788, |
|
"grad_norm": 16.590267181396484, |
|
"learning_rate": 2.6091743119266058e-05, |
|
"loss": 0.8894, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 3.3859649122807016, |
|
"grad_norm": 6.453067779541016, |
|
"learning_rate": 2.6064220183486237e-05, |
|
"loss": 0.6409, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 3.4035087719298245, |
|
"grad_norm": 18.96304702758789, |
|
"learning_rate": 2.6036697247706423e-05, |
|
"loss": 1.1674, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 3.4210526315789473, |
|
"grad_norm": 4.519999027252197, |
|
"learning_rate": 2.6009174311926605e-05, |
|
"loss": 0.9088, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 3.43859649122807, |
|
"grad_norm": 14.560205459594727, |
|
"learning_rate": 2.598165137614679e-05, |
|
"loss": 1.0394, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 3.456140350877193, |
|
"grad_norm": 9.643004417419434, |
|
"learning_rate": 2.5954128440366974e-05, |
|
"loss": 0.7166, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 3.473684210526316, |
|
"grad_norm": 6.686179161071777, |
|
"learning_rate": 2.5926605504587156e-05, |
|
"loss": 0.9138, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 3.4912280701754383, |
|
"grad_norm": 6.065926551818848, |
|
"learning_rate": 2.5899082568807342e-05, |
|
"loss": 0.5985, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 3.5087719298245617, |
|
"grad_norm": 8.297146797180176, |
|
"learning_rate": 2.5871559633027525e-05, |
|
"loss": 0.8459, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 3.526315789473684, |
|
"grad_norm": 28.6617431640625, |
|
"learning_rate": 2.5844036697247707e-05, |
|
"loss": 0.4553, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 3.543859649122807, |
|
"grad_norm": 8.133190155029297, |
|
"learning_rate": 2.581651376146789e-05, |
|
"loss": 0.8616, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 3.56140350877193, |
|
"grad_norm": 7.848640441894531, |
|
"learning_rate": 2.5788990825688075e-05, |
|
"loss": 1.0041, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 3.5789473684210527, |
|
"grad_norm": 12.316092491149902, |
|
"learning_rate": 2.5761467889908258e-05, |
|
"loss": 0.8715, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 3.5964912280701755, |
|
"grad_norm": 7.93389892578125, |
|
"learning_rate": 2.573394495412844e-05, |
|
"loss": 0.7744, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 3.6140350877192984, |
|
"grad_norm": 9.929866790771484, |
|
"learning_rate": 2.5706422018348623e-05, |
|
"loss": 0.6355, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 3.6315789473684212, |
|
"grad_norm": 11.27926254272461, |
|
"learning_rate": 2.567889908256881e-05, |
|
"loss": 1.0616, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 3.6491228070175437, |
|
"grad_norm": 12.538276672363281, |
|
"learning_rate": 2.5651376146788994e-05, |
|
"loss": 0.8681, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 3.6666666666666665, |
|
"grad_norm": 14.14212703704834, |
|
"learning_rate": 2.5623853211009174e-05, |
|
"loss": 0.8615, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 3.6842105263157894, |
|
"grad_norm": 12.915414810180664, |
|
"learning_rate": 2.559633027522936e-05, |
|
"loss": 0.905, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 3.7017543859649122, |
|
"grad_norm": 6.382973670959473, |
|
"learning_rate": 2.5568807339449542e-05, |
|
"loss": 0.8229, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 3.719298245614035, |
|
"grad_norm": 8.958497047424316, |
|
"learning_rate": 2.5541284403669728e-05, |
|
"loss": 0.8367, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 3.736842105263158, |
|
"grad_norm": 13.200749397277832, |
|
"learning_rate": 2.5513761467889907e-05, |
|
"loss": 0.7738, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 3.754385964912281, |
|
"grad_norm": 25.975767135620117, |
|
"learning_rate": 2.5486238532110093e-05, |
|
"loss": 1.1707, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 3.7719298245614032, |
|
"grad_norm": 46.98295211791992, |
|
"learning_rate": 2.5458715596330275e-05, |
|
"loss": 0.815, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 3.7894736842105265, |
|
"grad_norm": 6.342257499694824, |
|
"learning_rate": 2.543119266055046e-05, |
|
"loss": 0.9045, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 3.807017543859649, |
|
"grad_norm": 11.122331619262695, |
|
"learning_rate": 2.540366972477064e-05, |
|
"loss": 0.4915, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 3.824561403508772, |
|
"grad_norm": 11.39393424987793, |
|
"learning_rate": 2.5376146788990826e-05, |
|
"loss": 0.8584, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 3.8421052631578947, |
|
"grad_norm": 16.47450828552246, |
|
"learning_rate": 2.5348623853211012e-05, |
|
"loss": 0.9062, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 3.8596491228070176, |
|
"grad_norm": 8.9651517868042, |
|
"learning_rate": 2.5321100917431194e-05, |
|
"loss": 0.6332, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 3.8771929824561404, |
|
"grad_norm": 14.864767074584961, |
|
"learning_rate": 2.5293577981651377e-05, |
|
"loss": 0.7327, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 3.8947368421052633, |
|
"grad_norm": 13.695717811584473, |
|
"learning_rate": 2.526605504587156e-05, |
|
"loss": 0.8439, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 3.912280701754386, |
|
"grad_norm": 10.765061378479004, |
|
"learning_rate": 2.5238532110091745e-05, |
|
"loss": 1.0394, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 3.9298245614035086, |
|
"grad_norm": 15.805130958557129, |
|
"learning_rate": 2.5211009174311928e-05, |
|
"loss": 1.3535, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 3.9473684210526314, |
|
"grad_norm": 19.387144088745117, |
|
"learning_rate": 2.518348623853211e-05, |
|
"loss": 0.7274, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 3.9649122807017543, |
|
"grad_norm": 37.748497009277344, |
|
"learning_rate": 2.5155963302752293e-05, |
|
"loss": 0.7626, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 3.982456140350877, |
|
"grad_norm": 16.551725387573242, |
|
"learning_rate": 2.512844036697248e-05, |
|
"loss": 0.9978, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 25.66643524169922, |
|
"learning_rate": 2.510091743119266e-05, |
|
"loss": 0.7405, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.6245210727969349, |
|
"eval_loss": 0.9546777606010437, |
|
"eval_roc_auc": 0.8586184139970431, |
|
"eval_runtime": 9.814, |
|
"eval_samples_per_second": 26.595, |
|
"eval_steps_per_second": 1.732, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 4.017543859649122, |
|
"grad_norm": 4.81406307220459, |
|
"learning_rate": 2.5073394495412843e-05, |
|
"loss": 0.5165, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 4.035087719298246, |
|
"grad_norm": 9.769265174865723, |
|
"learning_rate": 2.504587155963303e-05, |
|
"loss": 0.6222, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 4.052631578947368, |
|
"grad_norm": 8.312880516052246, |
|
"learning_rate": 2.5018348623853212e-05, |
|
"loss": 0.6313, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 4.0701754385964914, |
|
"grad_norm": 5.13502311706543, |
|
"learning_rate": 2.4990825688073398e-05, |
|
"loss": 0.7425, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 4.087719298245614, |
|
"grad_norm": 19.665512084960938, |
|
"learning_rate": 2.4963302752293577e-05, |
|
"loss": 1.0384, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 4.105263157894737, |
|
"grad_norm": 29.155986785888672, |
|
"learning_rate": 2.4935779816513763e-05, |
|
"loss": 1.0976, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 4.12280701754386, |
|
"grad_norm": 11.831360816955566, |
|
"learning_rate": 2.4908256880733945e-05, |
|
"loss": 0.7877, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 4.140350877192983, |
|
"grad_norm": 19.49347686767578, |
|
"learning_rate": 2.488073394495413e-05, |
|
"loss": 0.6279, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 4.157894736842105, |
|
"grad_norm": 6.922645092010498, |
|
"learning_rate": 2.485321100917431e-05, |
|
"loss": 0.7153, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 4.175438596491228, |
|
"grad_norm": 3.6549301147460938, |
|
"learning_rate": 2.4825688073394496e-05, |
|
"loss": 0.873, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 4.192982456140351, |
|
"grad_norm": 10.724910736083984, |
|
"learning_rate": 2.4798165137614682e-05, |
|
"loss": 0.7728, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 4.2105263157894735, |
|
"grad_norm": 3.7120726108551025, |
|
"learning_rate": 2.4770642201834864e-05, |
|
"loss": 0.7132, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 4.228070175438597, |
|
"grad_norm": 29.73471450805664, |
|
"learning_rate": 2.4743119266055047e-05, |
|
"loss": 0.5634, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 4.245614035087719, |
|
"grad_norm": 5.868150234222412, |
|
"learning_rate": 2.471559633027523e-05, |
|
"loss": 0.6967, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 4.2631578947368425, |
|
"grad_norm": 9.828577995300293, |
|
"learning_rate": 2.4688073394495415e-05, |
|
"loss": 0.8814, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 4.280701754385965, |
|
"grad_norm": 19.289398193359375, |
|
"learning_rate": 2.4660550458715597e-05, |
|
"loss": 0.6907, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 4.298245614035087, |
|
"grad_norm": 18.95272445678711, |
|
"learning_rate": 2.463302752293578e-05, |
|
"loss": 0.7413, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 4.315789473684211, |
|
"grad_norm": 9.175381660461426, |
|
"learning_rate": 2.4605504587155962e-05, |
|
"loss": 0.5549, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 4.333333333333333, |
|
"grad_norm": 11.017579078674316, |
|
"learning_rate": 2.4577981651376148e-05, |
|
"loss": 0.5834, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 4.350877192982456, |
|
"grad_norm": 4.641144752502441, |
|
"learning_rate": 2.455045871559633e-05, |
|
"loss": 0.7171, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 4.368421052631579, |
|
"grad_norm": 30.48017120361328, |
|
"learning_rate": 2.4522935779816513e-05, |
|
"loss": 0.5314, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 4.385964912280702, |
|
"grad_norm": 5.151516914367676, |
|
"learning_rate": 2.44954128440367e-05, |
|
"loss": 0.822, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 4.4035087719298245, |
|
"grad_norm": 4.152652263641357, |
|
"learning_rate": 2.446788990825688e-05, |
|
"loss": 0.8574, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 4.421052631578947, |
|
"grad_norm": 26.325111389160156, |
|
"learning_rate": 2.4440366972477067e-05, |
|
"loss": 0.5536, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 4.43859649122807, |
|
"grad_norm": 3.4374260902404785, |
|
"learning_rate": 2.4412844036697247e-05, |
|
"loss": 0.7328, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 4.456140350877193, |
|
"grad_norm": 7.868347644805908, |
|
"learning_rate": 2.4385321100917432e-05, |
|
"loss": 0.8698, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 4.473684210526316, |
|
"grad_norm": 14.990694046020508, |
|
"learning_rate": 2.4357798165137615e-05, |
|
"loss": 0.9928, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 4.491228070175438, |
|
"grad_norm": 55.8467903137207, |
|
"learning_rate": 2.43302752293578e-05, |
|
"loss": 0.8542, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 4.508771929824562, |
|
"grad_norm": 15.516489028930664, |
|
"learning_rate": 2.430275229357798e-05, |
|
"loss": 0.935, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 4.526315789473684, |
|
"grad_norm": 31.769515991210938, |
|
"learning_rate": 2.4275229357798166e-05, |
|
"loss": 0.5695, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 4.543859649122807, |
|
"grad_norm": 6.501729488372803, |
|
"learning_rate": 2.4247706422018348e-05, |
|
"loss": 0.7384, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 4.56140350877193, |
|
"grad_norm": 13.194774627685547, |
|
"learning_rate": 2.4220183486238534e-05, |
|
"loss": 0.7311, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 4.578947368421053, |
|
"grad_norm": 9.523067474365234, |
|
"learning_rate": 2.4192660550458717e-05, |
|
"loss": 0.8254, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 4.5964912280701755, |
|
"grad_norm": 40.60873794555664, |
|
"learning_rate": 2.41651376146789e-05, |
|
"loss": 0.7756, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 4.614035087719298, |
|
"grad_norm": 20.81313133239746, |
|
"learning_rate": 2.4137614678899085e-05, |
|
"loss": 0.7474, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 4.631578947368421, |
|
"grad_norm": 30.596458435058594, |
|
"learning_rate": 2.4110091743119267e-05, |
|
"loss": 0.8261, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 4.649122807017544, |
|
"grad_norm": 36.82753372192383, |
|
"learning_rate": 2.408256880733945e-05, |
|
"loss": 0.7422, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 4.666666666666667, |
|
"grad_norm": 7.798766136169434, |
|
"learning_rate": 2.4055045871559632e-05, |
|
"loss": 0.7433, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 4.684210526315789, |
|
"grad_norm": 10.442023277282715, |
|
"learning_rate": 2.4027522935779818e-05, |
|
"loss": 0.9382, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 4.701754385964913, |
|
"grad_norm": 7.129739761352539, |
|
"learning_rate": 2.4e-05, |
|
"loss": 0.4544, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 4.719298245614035, |
|
"grad_norm": 6.29140567779541, |
|
"learning_rate": 2.3972477064220183e-05, |
|
"loss": 0.5828, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 4.7368421052631575, |
|
"grad_norm": 14.20146369934082, |
|
"learning_rate": 2.3944954128440366e-05, |
|
"loss": 0.7544, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 4.754385964912281, |
|
"grad_norm": 25.83724594116211, |
|
"learning_rate": 2.391743119266055e-05, |
|
"loss": 0.8725, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 4.771929824561403, |
|
"grad_norm": 19.661861419677734, |
|
"learning_rate": 2.3889908256880737e-05, |
|
"loss": 0.9125, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 4.7894736842105265, |
|
"grad_norm": 7.706132888793945, |
|
"learning_rate": 2.3862385321100916e-05, |
|
"loss": 0.4359, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 4.807017543859649, |
|
"grad_norm": 13.709455490112305, |
|
"learning_rate": 2.3834862385321102e-05, |
|
"loss": 0.6747, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 4.824561403508772, |
|
"grad_norm": 27.501550674438477, |
|
"learning_rate": 2.3807339449541285e-05, |
|
"loss": 0.5231, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 4.842105263157895, |
|
"grad_norm": 3.0259737968444824, |
|
"learning_rate": 2.377981651376147e-05, |
|
"loss": 0.5482, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 4.859649122807017, |
|
"grad_norm": 22.053667068481445, |
|
"learning_rate": 2.375229357798165e-05, |
|
"loss": 0.6844, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 4.87719298245614, |
|
"grad_norm": 21.116025924682617, |
|
"learning_rate": 2.3724770642201836e-05, |
|
"loss": 0.5924, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 4.894736842105263, |
|
"grad_norm": 45.17483901977539, |
|
"learning_rate": 2.3697247706422018e-05, |
|
"loss": 0.9959, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 4.912280701754386, |
|
"grad_norm": 2.0751166343688965, |
|
"learning_rate": 2.3669724770642204e-05, |
|
"loss": 0.4215, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 4.9298245614035086, |
|
"grad_norm": 17.698915481567383, |
|
"learning_rate": 2.3642201834862386e-05, |
|
"loss": 0.8212, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 4.947368421052632, |
|
"grad_norm": 11.810864448547363, |
|
"learning_rate": 2.361467889908257e-05, |
|
"loss": 0.7421, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 4.964912280701754, |
|
"grad_norm": 25.751745223999023, |
|
"learning_rate": 2.3587155963302755e-05, |
|
"loss": 0.8161, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 4.982456140350877, |
|
"grad_norm": 9.799457550048828, |
|
"learning_rate": 2.3559633027522937e-05, |
|
"loss": 0.6857, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 10.592196464538574, |
|
"learning_rate": 2.353211009174312e-05, |
|
"loss": 0.8668, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.7011494252873564, |
|
"eval_loss": 0.8508906364440918, |
|
"eval_roc_auc": 0.8698378210083979, |
|
"eval_runtime": 9.9199, |
|
"eval_samples_per_second": 26.311, |
|
"eval_steps_per_second": 1.714, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 5.017543859649122, |
|
"grad_norm": 29.6278133392334, |
|
"learning_rate": 2.3504587155963302e-05, |
|
"loss": 0.8195, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 5.035087719298246, |
|
"grad_norm": 8.088125228881836, |
|
"learning_rate": 2.3477064220183488e-05, |
|
"loss": 0.4163, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 5.052631578947368, |
|
"grad_norm": 2.1910946369171143, |
|
"learning_rate": 2.344954128440367e-05, |
|
"loss": 0.4273, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 5.0701754385964914, |
|
"grad_norm": 27.934329986572266, |
|
"learning_rate": 2.3422018348623853e-05, |
|
"loss": 0.7804, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 5.087719298245614, |
|
"grad_norm": 28.2197265625, |
|
"learning_rate": 2.3394495412844035e-05, |
|
"loss": 0.5328, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 5.105263157894737, |
|
"grad_norm": 20.080032348632812, |
|
"learning_rate": 2.336697247706422e-05, |
|
"loss": 0.9274, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 5.12280701754386, |
|
"grad_norm": 8.307439804077148, |
|
"learning_rate": 2.3339449541284407e-05, |
|
"loss": 0.7575, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 5.140350877192983, |
|
"grad_norm": 15.012224197387695, |
|
"learning_rate": 2.3311926605504586e-05, |
|
"loss": 0.6881, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 5.157894736842105, |
|
"grad_norm": 19.6754093170166, |
|
"learning_rate": 2.3284403669724772e-05, |
|
"loss": 0.5243, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 5.175438596491228, |
|
"grad_norm": 2.4565722942352295, |
|
"learning_rate": 2.3256880733944955e-05, |
|
"loss": 0.5858, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 5.192982456140351, |
|
"grad_norm": 12.843107223510742, |
|
"learning_rate": 2.322935779816514e-05, |
|
"loss": 0.7095, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 5.2105263157894735, |
|
"grad_norm": 3.9343743324279785, |
|
"learning_rate": 2.320183486238532e-05, |
|
"loss": 0.6592, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 5.228070175438597, |
|
"grad_norm": 21.421518325805664, |
|
"learning_rate": 2.3174311926605505e-05, |
|
"loss": 0.6792, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 5.245614035087719, |
|
"grad_norm": 29.099624633789062, |
|
"learning_rate": 2.3146788990825688e-05, |
|
"loss": 0.8901, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 5.2631578947368425, |
|
"grad_norm": 28.176443099975586, |
|
"learning_rate": 2.3119266055045874e-05, |
|
"loss": 0.677, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 5.280701754385965, |
|
"grad_norm": 49.84172439575195, |
|
"learning_rate": 2.3091743119266053e-05, |
|
"loss": 0.6627, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 5.298245614035087, |
|
"grad_norm": 11.457085609436035, |
|
"learning_rate": 2.306422018348624e-05, |
|
"loss": 0.3993, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 5.315789473684211, |
|
"grad_norm": 187.01239013671875, |
|
"learning_rate": 2.3036697247706425e-05, |
|
"loss": 0.6346, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 5.333333333333333, |
|
"grad_norm": 3.9368183612823486, |
|
"learning_rate": 2.3009174311926607e-05, |
|
"loss": 0.8296, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 5.350877192982456, |
|
"grad_norm": 100.2872085571289, |
|
"learning_rate": 2.298165137614679e-05, |
|
"loss": 0.8781, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 5.368421052631579, |
|
"grad_norm": 24.209882736206055, |
|
"learning_rate": 2.2954128440366972e-05, |
|
"loss": 0.5137, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 5.385964912280702, |
|
"grad_norm": 3.6952366828918457, |
|
"learning_rate": 2.2926605504587158e-05, |
|
"loss": 0.6469, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 5.4035087719298245, |
|
"grad_norm": 29.038558959960938, |
|
"learning_rate": 2.289908256880734e-05, |
|
"loss": 0.8628, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 5.421052631578947, |
|
"grad_norm": 40.4047737121582, |
|
"learning_rate": 2.2871559633027523e-05, |
|
"loss": 1.065, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 5.43859649122807, |
|
"grad_norm": 43.313846588134766, |
|
"learning_rate": 2.2844036697247705e-05, |
|
"loss": 0.5773, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 5.456140350877193, |
|
"grad_norm": 7.541952133178711, |
|
"learning_rate": 2.281651376146789e-05, |
|
"loss": 0.7124, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 5.473684210526316, |
|
"grad_norm": 9.793838500976562, |
|
"learning_rate": 2.2788990825688074e-05, |
|
"loss": 0.5441, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 5.491228070175438, |
|
"grad_norm": 17.21137809753418, |
|
"learning_rate": 2.2761467889908256e-05, |
|
"loss": 0.5927, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 5.508771929824562, |
|
"grad_norm": 23.36025047302246, |
|
"learning_rate": 2.2733944954128442e-05, |
|
"loss": 0.8349, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 5.526315789473684, |
|
"grad_norm": 10.170068740844727, |
|
"learning_rate": 2.2706422018348624e-05, |
|
"loss": 0.7946, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 5.543859649122807, |
|
"grad_norm": 7.996306419372559, |
|
"learning_rate": 2.267889908256881e-05, |
|
"loss": 0.7086, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 5.56140350877193, |
|
"grad_norm": 4.8672027587890625, |
|
"learning_rate": 2.265137614678899e-05, |
|
"loss": 0.4384, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 5.578947368421053, |
|
"grad_norm": 14.543272972106934, |
|
"learning_rate": 2.2623853211009175e-05, |
|
"loss": 0.9153, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 5.5964912280701755, |
|
"grad_norm": 28.248517990112305, |
|
"learning_rate": 2.2596330275229358e-05, |
|
"loss": 0.4361, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 5.614035087719298, |
|
"grad_norm": 10.02695083618164, |
|
"learning_rate": 2.2568807339449544e-05, |
|
"loss": 0.5584, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 5.631578947368421, |
|
"grad_norm": 255.10897827148438, |
|
"learning_rate": 2.2541284403669723e-05, |
|
"loss": 0.812, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 5.649122807017544, |
|
"grad_norm": 29.070514678955078, |
|
"learning_rate": 2.251376146788991e-05, |
|
"loss": 0.7201, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 5.666666666666667, |
|
"grad_norm": 70.94801330566406, |
|
"learning_rate": 2.2486238532110094e-05, |
|
"loss": 0.7983, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 5.684210526315789, |
|
"grad_norm": 9.982966423034668, |
|
"learning_rate": 2.2458715596330277e-05, |
|
"loss": 0.3869, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 5.701754385964913, |
|
"grad_norm": 44.79756546020508, |
|
"learning_rate": 2.243119266055046e-05, |
|
"loss": 0.5092, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 5.719298245614035, |
|
"grad_norm": 14.900089263916016, |
|
"learning_rate": 2.2403669724770642e-05, |
|
"loss": 0.7798, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 5.7368421052631575, |
|
"grad_norm": 5.473045825958252, |
|
"learning_rate": 2.2376146788990828e-05, |
|
"loss": 0.527, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 5.754385964912281, |
|
"grad_norm": 5.292675495147705, |
|
"learning_rate": 2.234862385321101e-05, |
|
"loss": 0.599, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 5.771929824561403, |
|
"grad_norm": 7.409575462341309, |
|
"learning_rate": 2.2321100917431193e-05, |
|
"loss": 0.6566, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 5.7894736842105265, |
|
"grad_norm": 16.337003707885742, |
|
"learning_rate": 2.2293577981651375e-05, |
|
"loss": 0.421, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 5.807017543859649, |
|
"grad_norm": 10.473221778869629, |
|
"learning_rate": 2.226605504587156e-05, |
|
"loss": 0.6791, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 5.824561403508772, |
|
"grad_norm": 24.943050384521484, |
|
"learning_rate": 2.2238532110091743e-05, |
|
"loss": 1.0399, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 5.842105263157895, |
|
"grad_norm": 3.1360652446746826, |
|
"learning_rate": 2.2211009174311926e-05, |
|
"loss": 0.4585, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 5.859649122807017, |
|
"grad_norm": 5.742803573608398, |
|
"learning_rate": 2.2183486238532112e-05, |
|
"loss": 0.7424, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 5.87719298245614, |
|
"grad_norm": 10.02749252319336, |
|
"learning_rate": 2.2155963302752294e-05, |
|
"loss": 0.5505, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 5.894736842105263, |
|
"grad_norm": 11.549918174743652, |
|
"learning_rate": 2.212844036697248e-05, |
|
"loss": 0.7165, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 5.912280701754386, |
|
"grad_norm": 11.704695701599121, |
|
"learning_rate": 2.210091743119266e-05, |
|
"loss": 0.5897, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 5.9298245614035086, |
|
"grad_norm": 8.045721054077148, |
|
"learning_rate": 2.2073394495412845e-05, |
|
"loss": 0.5606, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 5.947368421052632, |
|
"grad_norm": 6.550736904144287, |
|
"learning_rate": 2.2045871559633028e-05, |
|
"loss": 0.4498, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 5.964912280701754, |
|
"grad_norm": 7.8976335525512695, |
|
"learning_rate": 2.2018348623853213e-05, |
|
"loss": 0.6578, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 5.982456140350877, |
|
"grad_norm": 19.857566833496094, |
|
"learning_rate": 2.1990825688073392e-05, |
|
"loss": 0.6241, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"grad_norm": 389.2672119140625, |
|
"learning_rate": 2.196330275229358e-05, |
|
"loss": 0.5834, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.6436781609195402, |
|
"eval_loss": 0.9111022353172302, |
|
"eval_roc_auc": 0.8661937516704385, |
|
"eval_runtime": 8.4523, |
|
"eval_samples_per_second": 30.879, |
|
"eval_steps_per_second": 2.011, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 6.017543859649122, |
|
"grad_norm": 6.004997253417969, |
|
"learning_rate": 2.193577981651376e-05, |
|
"loss": 0.4383, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 6.035087719298246, |
|
"grad_norm": 32.85868453979492, |
|
"learning_rate": 2.1908256880733947e-05, |
|
"loss": 0.8127, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 6.052631578947368, |
|
"grad_norm": 22.06620216369629, |
|
"learning_rate": 2.188073394495413e-05, |
|
"loss": 0.5588, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 6.0701754385964914, |
|
"grad_norm": 12.496269226074219, |
|
"learning_rate": 2.185321100917431e-05, |
|
"loss": 0.7202, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 6.087719298245614, |
|
"grad_norm": 39.94806671142578, |
|
"learning_rate": 2.1825688073394497e-05, |
|
"loss": 0.4362, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 6.105263157894737, |
|
"grad_norm": 5.034066677093506, |
|
"learning_rate": 2.179816513761468e-05, |
|
"loss": 0.6235, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 6.12280701754386, |
|
"grad_norm": 98.4699935913086, |
|
"learning_rate": 2.1770642201834862e-05, |
|
"loss": 0.8505, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 6.140350877192983, |
|
"grad_norm": 22.5294132232666, |
|
"learning_rate": 2.1743119266055045e-05, |
|
"loss": 0.6741, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 6.157894736842105, |
|
"grad_norm": 33.31329345703125, |
|
"learning_rate": 2.171559633027523e-05, |
|
"loss": 0.6783, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 6.175438596491228, |
|
"grad_norm": 2.9709410667419434, |
|
"learning_rate": 2.1688073394495413e-05, |
|
"loss": 0.3269, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 6.192982456140351, |
|
"grad_norm": 14.349902153015137, |
|
"learning_rate": 2.1660550458715596e-05, |
|
"loss": 0.5928, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 6.2105263157894735, |
|
"grad_norm": 6.400623321533203, |
|
"learning_rate": 2.163302752293578e-05, |
|
"loss": 0.5708, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 6.228070175438597, |
|
"grad_norm": 40.32659912109375, |
|
"learning_rate": 2.1605504587155964e-05, |
|
"loss": 0.4459, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 6.245614035087719, |
|
"grad_norm": 16.54581069946289, |
|
"learning_rate": 2.157798165137615e-05, |
|
"loss": 1.0475, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 6.2631578947368425, |
|
"grad_norm": 7.048498630523682, |
|
"learning_rate": 2.155045871559633e-05, |
|
"loss": 0.6128, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 6.280701754385965, |
|
"grad_norm": 18.782760620117188, |
|
"learning_rate": 2.1522935779816515e-05, |
|
"loss": 0.6483, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 6.298245614035087, |
|
"grad_norm": 13.411852836608887, |
|
"learning_rate": 2.1495412844036697e-05, |
|
"loss": 0.6957, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 6.315789473684211, |
|
"grad_norm": 3.7218034267425537, |
|
"learning_rate": 2.1467889908256883e-05, |
|
"loss": 0.268, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 6.333333333333333, |
|
"grad_norm": 5.883011817932129, |
|
"learning_rate": 2.1440366972477062e-05, |
|
"loss": 0.5415, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 6.350877192982456, |
|
"grad_norm": 6.700387477874756, |
|
"learning_rate": 2.1412844036697248e-05, |
|
"loss": 0.4707, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 6.368421052631579, |
|
"grad_norm": 12.57919692993164, |
|
"learning_rate": 2.138532110091743e-05, |
|
"loss": 0.5245, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 6.385964912280702, |
|
"grad_norm": 23.14158058166504, |
|
"learning_rate": 2.1357798165137616e-05, |
|
"loss": 0.4275, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 6.4035087719298245, |
|
"grad_norm": 20.404285430908203, |
|
"learning_rate": 2.13302752293578e-05, |
|
"loss": 0.7325, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 6.421052631578947, |
|
"grad_norm": 7.9764580726623535, |
|
"learning_rate": 2.130275229357798e-05, |
|
"loss": 0.469, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 6.43859649122807, |
|
"grad_norm": 11.24958324432373, |
|
"learning_rate": 2.1275229357798167e-05, |
|
"loss": 0.7704, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 6.456140350877193, |
|
"grad_norm": 3.0611751079559326, |
|
"learning_rate": 2.124770642201835e-05, |
|
"loss": 0.3323, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 6.473684210526316, |
|
"grad_norm": 40.933162689208984, |
|
"learning_rate": 2.1220183486238532e-05, |
|
"loss": 0.5066, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 6.491228070175438, |
|
"grad_norm": 9.126625061035156, |
|
"learning_rate": 2.1192660550458715e-05, |
|
"loss": 0.4907, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 6.508771929824562, |
|
"grad_norm": 9.465023040771484, |
|
"learning_rate": 2.11651376146789e-05, |
|
"loss": 0.5455, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 6.526315789473684, |
|
"grad_norm": 24.881996154785156, |
|
"learning_rate": 2.1137614678899083e-05, |
|
"loss": 0.6284, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 6.543859649122807, |
|
"grad_norm": 32.151466369628906, |
|
"learning_rate": 2.1110091743119266e-05, |
|
"loss": 0.4083, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 6.56140350877193, |
|
"grad_norm": 12.968864440917969, |
|
"learning_rate": 2.1082568807339448e-05, |
|
"loss": 0.5154, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 6.578947368421053, |
|
"grad_norm": 64.264892578125, |
|
"learning_rate": 2.1055045871559634e-05, |
|
"loss": 0.2809, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 6.5964912280701755, |
|
"grad_norm": 19.623029708862305, |
|
"learning_rate": 2.102752293577982e-05, |
|
"loss": 0.7864, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 6.614035087719298, |
|
"grad_norm": 7.40114688873291, |
|
"learning_rate": 2.1e-05, |
|
"loss": 0.5327, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 6.631578947368421, |
|
"grad_norm": 15.018710136413574, |
|
"learning_rate": 2.0972477064220185e-05, |
|
"loss": 0.5948, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 6.649122807017544, |
|
"grad_norm": 6.5761847496032715, |
|
"learning_rate": 2.0944954128440367e-05, |
|
"loss": 0.5606, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 6.666666666666667, |
|
"grad_norm": 21.176044464111328, |
|
"learning_rate": 2.0917431192660553e-05, |
|
"loss": 0.8495, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 6.684210526315789, |
|
"grad_norm": 15.794927597045898, |
|
"learning_rate": 2.0889908256880732e-05, |
|
"loss": 0.2922, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 6.701754385964913, |
|
"grad_norm": 21.976320266723633, |
|
"learning_rate": 2.0862385321100918e-05, |
|
"loss": 0.4193, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 6.719298245614035, |
|
"grad_norm": 127.4170150756836, |
|
"learning_rate": 2.08348623853211e-05, |
|
"loss": 0.7539, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 6.7368421052631575, |
|
"grad_norm": 23.519792556762695, |
|
"learning_rate": 2.0807339449541286e-05, |
|
"loss": 0.8324, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 6.754385964912281, |
|
"grad_norm": 34.533729553222656, |
|
"learning_rate": 2.0779816513761465e-05, |
|
"loss": 0.635, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 6.771929824561403, |
|
"grad_norm": 48.43922805786133, |
|
"learning_rate": 2.075229357798165e-05, |
|
"loss": 0.7622, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 6.7894736842105265, |
|
"grad_norm": 9.6075439453125, |
|
"learning_rate": 2.0724770642201837e-05, |
|
"loss": 0.4885, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 6.807017543859649, |
|
"grad_norm": 25.955398559570312, |
|
"learning_rate": 2.069724770642202e-05, |
|
"loss": 0.2685, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 6.824561403508772, |
|
"grad_norm": 17.847700119018555, |
|
"learning_rate": 2.0669724770642202e-05, |
|
"loss": 0.3958, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 6.842105263157895, |
|
"grad_norm": 13.096657752990723, |
|
"learning_rate": 2.0642201834862385e-05, |
|
"loss": 0.9942, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 6.859649122807017, |
|
"grad_norm": 11.541616439819336, |
|
"learning_rate": 2.061467889908257e-05, |
|
"loss": 0.5704, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 6.87719298245614, |
|
"grad_norm": 3.565450668334961, |
|
"learning_rate": 2.0587155963302753e-05, |
|
"loss": 0.6809, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 6.894736842105263, |
|
"grad_norm": 27.333837509155273, |
|
"learning_rate": 2.0559633027522935e-05, |
|
"loss": 0.6584, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 6.912280701754386, |
|
"grad_norm": 15.244169235229492, |
|
"learning_rate": 2.0532110091743118e-05, |
|
"loss": 0.6753, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 6.9298245614035086, |
|
"grad_norm": 18.803529739379883, |
|
"learning_rate": 2.0504587155963304e-05, |
|
"loss": 0.5975, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 6.947368421052632, |
|
"grad_norm": 7.317320346832275, |
|
"learning_rate": 2.047706422018349e-05, |
|
"loss": 0.9889, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 6.964912280701754, |
|
"grad_norm": 19.616382598876953, |
|
"learning_rate": 2.044954128440367e-05, |
|
"loss": 0.6709, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 6.982456140350877, |
|
"grad_norm": 46.652469635009766, |
|
"learning_rate": 2.0422018348623855e-05, |
|
"loss": 0.5268, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"grad_norm": 11.762057304382324, |
|
"learning_rate": 2.0394495412844037e-05, |
|
"loss": 0.9838, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.6819923371647509, |
|
"eval_loss": 0.8196430802345276, |
|
"eval_roc_auc": 0.8813437942252584, |
|
"eval_runtime": 9.6385, |
|
"eval_samples_per_second": 27.079, |
|
"eval_steps_per_second": 1.764, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 7.017543859649122, |
|
"grad_norm": 14.474690437316895, |
|
"learning_rate": 2.0366972477064223e-05, |
|
"loss": 0.3408, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 7.035087719298246, |
|
"grad_norm": 19.78782081604004, |
|
"learning_rate": 2.0339449541284402e-05, |
|
"loss": 0.4244, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 7.052631578947368, |
|
"grad_norm": 10.25345230102539, |
|
"learning_rate": 2.0311926605504588e-05, |
|
"loss": 0.6707, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 7.0701754385964914, |
|
"grad_norm": 2.85701060295105, |
|
"learning_rate": 2.028440366972477e-05, |
|
"loss": 0.2397, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 7.087719298245614, |
|
"grad_norm": 6.676547527313232, |
|
"learning_rate": 2.0256880733944956e-05, |
|
"loss": 0.3686, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 7.105263157894737, |
|
"grad_norm": 165.5542755126953, |
|
"learning_rate": 2.0229357798165135e-05, |
|
"loss": 0.5689, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 7.12280701754386, |
|
"grad_norm": 14.226078033447266, |
|
"learning_rate": 2.020183486238532e-05, |
|
"loss": 0.3423, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 7.140350877192983, |
|
"grad_norm": 5.127176761627197, |
|
"learning_rate": 2.0174311926605507e-05, |
|
"loss": 0.6358, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 7.157894736842105, |
|
"grad_norm": 21.368282318115234, |
|
"learning_rate": 2.014678899082569e-05, |
|
"loss": 0.5103, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 7.175438596491228, |
|
"grad_norm": 3.635981321334839, |
|
"learning_rate": 2.0119266055045872e-05, |
|
"loss": 0.3426, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 7.192982456140351, |
|
"grad_norm": 8.354199409484863, |
|
"learning_rate": 2.0091743119266054e-05, |
|
"loss": 0.4433, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 7.2105263157894735, |
|
"grad_norm": 125.07445526123047, |
|
"learning_rate": 2.006422018348624e-05, |
|
"loss": 0.5754, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 7.228070175438597, |
|
"grad_norm": 18.86847496032715, |
|
"learning_rate": 2.0036697247706423e-05, |
|
"loss": 0.6004, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 7.245614035087719, |
|
"grad_norm": 13.290031433105469, |
|
"learning_rate": 2.0009174311926605e-05, |
|
"loss": 0.6924, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 7.2631578947368425, |
|
"grad_norm": 7.521588325500488, |
|
"learning_rate": 1.9981651376146788e-05, |
|
"loss": 0.4925, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 7.280701754385965, |
|
"grad_norm": 14.16098690032959, |
|
"learning_rate": 1.9954128440366974e-05, |
|
"loss": 0.6182, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 7.298245614035087, |
|
"grad_norm": 12.972576141357422, |
|
"learning_rate": 1.9926605504587156e-05, |
|
"loss": 0.4994, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 7.315789473684211, |
|
"grad_norm": 7.475390911102295, |
|
"learning_rate": 1.989908256880734e-05, |
|
"loss": 0.5975, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 7.333333333333333, |
|
"grad_norm": 19.82378387451172, |
|
"learning_rate": 1.9871559633027524e-05, |
|
"loss": 0.4584, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 7.350877192982456, |
|
"grad_norm": 7.333564281463623, |
|
"learning_rate": 1.9844036697247707e-05, |
|
"loss": 0.5651, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 7.368421052631579, |
|
"grad_norm": 12.829773902893066, |
|
"learning_rate": 1.9816513761467893e-05, |
|
"loss": 0.4198, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 7.385964912280702, |
|
"grad_norm": 3.876220464706421, |
|
"learning_rate": 1.9788990825688072e-05, |
|
"loss": 0.2843, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 7.4035087719298245, |
|
"grad_norm": 10.361215591430664, |
|
"learning_rate": 1.9761467889908258e-05, |
|
"loss": 0.4935, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 7.421052631578947, |
|
"grad_norm": 59.4860954284668, |
|
"learning_rate": 1.973394495412844e-05, |
|
"loss": 0.5763, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 7.43859649122807, |
|
"grad_norm": 10.748738288879395, |
|
"learning_rate": 1.9706422018348626e-05, |
|
"loss": 0.7249, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 7.456140350877193, |
|
"grad_norm": 19.900938034057617, |
|
"learning_rate": 1.9678899082568805e-05, |
|
"loss": 0.3448, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 7.473684210526316, |
|
"grad_norm": 9.298110961914062, |
|
"learning_rate": 1.965137614678899e-05, |
|
"loss": 0.424, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 7.491228070175438, |
|
"grad_norm": 6.225700855255127, |
|
"learning_rate": 1.9623853211009173e-05, |
|
"loss": 0.5622, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 7.508771929824562, |
|
"grad_norm": 7.592733860015869, |
|
"learning_rate": 1.959633027522936e-05, |
|
"loss": 0.327, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 7.526315789473684, |
|
"grad_norm": 15.926929473876953, |
|
"learning_rate": 1.9568807339449542e-05, |
|
"loss": 0.5397, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 7.543859649122807, |
|
"grad_norm": 5.76393461227417, |
|
"learning_rate": 1.9541284403669724e-05, |
|
"loss": 0.6615, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 7.56140350877193, |
|
"grad_norm": 9.405485153198242, |
|
"learning_rate": 1.951376146788991e-05, |
|
"loss": 0.5491, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 7.578947368421053, |
|
"grad_norm": 6.3222479820251465, |
|
"learning_rate": 1.9486238532110093e-05, |
|
"loss": 0.5539, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 7.5964912280701755, |
|
"grad_norm": 15.603589057922363, |
|
"learning_rate": 1.9458715596330275e-05, |
|
"loss": 0.352, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 7.614035087719298, |
|
"grad_norm": 54.22720718383789, |
|
"learning_rate": 1.9431192660550458e-05, |
|
"loss": 0.5936, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 7.631578947368421, |
|
"grad_norm": 7.901265621185303, |
|
"learning_rate": 1.9403669724770643e-05, |
|
"loss": 0.6319, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 7.649122807017544, |
|
"grad_norm": 16.199270248413086, |
|
"learning_rate": 1.9376146788990826e-05, |
|
"loss": 0.5468, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 7.666666666666667, |
|
"grad_norm": 84.76512908935547, |
|
"learning_rate": 1.934862385321101e-05, |
|
"loss": 0.5745, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 7.684210526315789, |
|
"grad_norm": 8.624265670776367, |
|
"learning_rate": 1.9321100917431194e-05, |
|
"loss": 0.5141, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 7.701754385964913, |
|
"grad_norm": 4.701018333435059, |
|
"learning_rate": 1.9293577981651377e-05, |
|
"loss": 0.2706, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 7.719298245614035, |
|
"grad_norm": 3.979341745376587, |
|
"learning_rate": 1.9266055045871563e-05, |
|
"loss": 0.4677, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 7.7368421052631575, |
|
"grad_norm": 10.237481117248535, |
|
"learning_rate": 1.923853211009174e-05, |
|
"loss": 0.6246, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 7.754385964912281, |
|
"grad_norm": 16.742145538330078, |
|
"learning_rate": 1.9211009174311927e-05, |
|
"loss": 0.5573, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 7.771929824561403, |
|
"grad_norm": 7.850118637084961, |
|
"learning_rate": 1.918348623853211e-05, |
|
"loss": 0.3705, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 7.7894736842105265, |
|
"grad_norm": 7.423269271850586, |
|
"learning_rate": 1.9155963302752296e-05, |
|
"loss": 0.5847, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 7.807017543859649, |
|
"grad_norm": 3.7411139011383057, |
|
"learning_rate": 1.9128440366972475e-05, |
|
"loss": 0.3861, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 7.824561403508772, |
|
"grad_norm": 131.05946350097656, |
|
"learning_rate": 1.910091743119266e-05, |
|
"loss": 0.5242, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 7.842105263157895, |
|
"grad_norm": 12.686647415161133, |
|
"learning_rate": 1.9073394495412843e-05, |
|
"loss": 0.2422, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 7.859649122807017, |
|
"grad_norm": 7.073160171508789, |
|
"learning_rate": 1.904587155963303e-05, |
|
"loss": 0.2323, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 7.87719298245614, |
|
"grad_norm": 4.771604061126709, |
|
"learning_rate": 1.901834862385321e-05, |
|
"loss": 0.2788, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 7.894736842105263, |
|
"grad_norm": 11.123143196105957, |
|
"learning_rate": 1.8990825688073394e-05, |
|
"loss": 0.3865, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 7.912280701754386, |
|
"grad_norm": 19.361055374145508, |
|
"learning_rate": 1.896330275229358e-05, |
|
"loss": 0.4511, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 7.9298245614035086, |
|
"grad_norm": 8.789932250976562, |
|
"learning_rate": 1.8935779816513762e-05, |
|
"loss": 0.3365, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 7.947368421052632, |
|
"grad_norm": 12.437773704528809, |
|
"learning_rate": 1.8908256880733945e-05, |
|
"loss": 0.3662, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 7.964912280701754, |
|
"grad_norm": 17.096485137939453, |
|
"learning_rate": 1.8880733944954127e-05, |
|
"loss": 0.5647, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 7.982456140350877, |
|
"grad_norm": 20.25951385498047, |
|
"learning_rate": 1.8853211009174313e-05, |
|
"loss": 0.4497, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 14.128129005432129, |
|
"learning_rate": 1.8825688073394496e-05, |
|
"loss": 0.3699, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.6743295019157088, |
|
"eval_loss": 0.9114387035369873, |
|
"eval_roc_auc": 0.8783069858814981, |
|
"eval_runtime": 9.7288, |
|
"eval_samples_per_second": 26.827, |
|
"eval_steps_per_second": 1.747, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 8.017543859649123, |
|
"grad_norm": 14.421273231506348, |
|
"learning_rate": 1.8798165137614678e-05, |
|
"loss": 0.4197, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 8.035087719298245, |
|
"grad_norm": 7.326334476470947, |
|
"learning_rate": 1.877064220183486e-05, |
|
"loss": 0.3367, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 8.052631578947368, |
|
"grad_norm": 9.997421264648438, |
|
"learning_rate": 1.8743119266055047e-05, |
|
"loss": 0.4693, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 8.070175438596491, |
|
"grad_norm": 16.052030563354492, |
|
"learning_rate": 1.8715596330275232e-05, |
|
"loss": 0.3885, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 8.087719298245615, |
|
"grad_norm": 5.081548690795898, |
|
"learning_rate": 1.868807339449541e-05, |
|
"loss": 0.3361, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 8.105263157894736, |
|
"grad_norm": 12.288408279418945, |
|
"learning_rate": 1.8660550458715597e-05, |
|
"loss": 0.4407, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 8.12280701754386, |
|
"grad_norm": 20.765512466430664, |
|
"learning_rate": 1.863302752293578e-05, |
|
"loss": 0.3498, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 8.140350877192983, |
|
"grad_norm": 16.51128578186035, |
|
"learning_rate": 1.8605504587155966e-05, |
|
"loss": 0.5519, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 8.157894736842104, |
|
"grad_norm": 9.807088851928711, |
|
"learning_rate": 1.8577981651376145e-05, |
|
"loss": 0.4753, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 8.175438596491228, |
|
"grad_norm": 27.620849609375, |
|
"learning_rate": 1.855045871559633e-05, |
|
"loss": 0.4292, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 8.192982456140351, |
|
"grad_norm": 3.228712320327759, |
|
"learning_rate": 1.8522935779816513e-05, |
|
"loss": 0.2354, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 8.210526315789474, |
|
"grad_norm": 19.046974182128906, |
|
"learning_rate": 1.84954128440367e-05, |
|
"loss": 0.7982, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 8.228070175438596, |
|
"grad_norm": 35.34560775756836, |
|
"learning_rate": 1.8467889908256878e-05, |
|
"loss": 0.6045, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 8.24561403508772, |
|
"grad_norm": 19.82710838317871, |
|
"learning_rate": 1.8440366972477064e-05, |
|
"loss": 0.2748, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 8.263157894736842, |
|
"grad_norm": 107.26939392089844, |
|
"learning_rate": 1.841284403669725e-05, |
|
"loss": 0.5046, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 8.280701754385966, |
|
"grad_norm": 9.757340431213379, |
|
"learning_rate": 1.8385321100917432e-05, |
|
"loss": 0.2989, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 8.298245614035087, |
|
"grad_norm": 42.89109802246094, |
|
"learning_rate": 1.8357798165137615e-05, |
|
"loss": 0.4882, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 8.31578947368421, |
|
"grad_norm": 27.338111877441406, |
|
"learning_rate": 1.8330275229357797e-05, |
|
"loss": 0.4523, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 8.333333333333334, |
|
"grad_norm": 3.047417163848877, |
|
"learning_rate": 1.8302752293577983e-05, |
|
"loss": 0.2369, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 8.350877192982455, |
|
"grad_norm": 27.468341827392578, |
|
"learning_rate": 1.8275229357798166e-05, |
|
"loss": 0.4376, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 8.368421052631579, |
|
"grad_norm": 4.080908298492432, |
|
"learning_rate": 1.8247706422018348e-05, |
|
"loss": 0.3097, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 8.385964912280702, |
|
"grad_norm": 15.838717460632324, |
|
"learning_rate": 1.822018348623853e-05, |
|
"loss": 0.424, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 8.403508771929825, |
|
"grad_norm": 9.841987609863281, |
|
"learning_rate": 1.8192660550458716e-05, |
|
"loss": 0.5209, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 8.421052631578947, |
|
"grad_norm": 11.688602447509766, |
|
"learning_rate": 1.8165137614678902e-05, |
|
"loss": 0.2871, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 8.43859649122807, |
|
"grad_norm": 42.60127639770508, |
|
"learning_rate": 1.813761467889908e-05, |
|
"loss": 0.7017, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 8.456140350877194, |
|
"grad_norm": 21.921663284301758, |
|
"learning_rate": 1.8110091743119267e-05, |
|
"loss": 0.6333, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 8.473684210526315, |
|
"grad_norm": 14.230140686035156, |
|
"learning_rate": 1.808256880733945e-05, |
|
"loss": 0.4494, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 8.491228070175438, |
|
"grad_norm": 4.411128997802734, |
|
"learning_rate": 1.8055045871559636e-05, |
|
"loss": 0.2, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 8.508771929824562, |
|
"grad_norm": 4.632619857788086, |
|
"learning_rate": 1.8027522935779815e-05, |
|
"loss": 0.3362, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 8.526315789473685, |
|
"grad_norm": 11.009140014648438, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.2433, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 8.543859649122806, |
|
"grad_norm": 35.23042297363281, |
|
"learning_rate": 1.7972477064220183e-05, |
|
"loss": 0.4174, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 8.56140350877193, |
|
"grad_norm": 6.151736259460449, |
|
"learning_rate": 1.794495412844037e-05, |
|
"loss": 0.3024, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 8.578947368421053, |
|
"grad_norm": 25.69590950012207, |
|
"learning_rate": 1.7917431192660548e-05, |
|
"loss": 0.7038, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 8.596491228070175, |
|
"grad_norm": 20.76173973083496, |
|
"learning_rate": 1.7889908256880734e-05, |
|
"loss": 0.6203, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 8.614035087719298, |
|
"grad_norm": 17.6284122467041, |
|
"learning_rate": 1.786238532110092e-05, |
|
"loss": 0.3738, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 8.631578947368421, |
|
"grad_norm": 6.477032661437988, |
|
"learning_rate": 1.7834862385321102e-05, |
|
"loss": 0.4933, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 8.649122807017545, |
|
"grad_norm": 11.045804023742676, |
|
"learning_rate": 1.7807339449541285e-05, |
|
"loss": 0.4582, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 8.666666666666666, |
|
"grad_norm": 7.45139741897583, |
|
"learning_rate": 1.7779816513761467e-05, |
|
"loss": 0.2789, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 8.68421052631579, |
|
"grad_norm": 6.092661380767822, |
|
"learning_rate": 1.7752293577981653e-05, |
|
"loss": 0.5297, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 8.701754385964913, |
|
"grad_norm": 9.822498321533203, |
|
"learning_rate": 1.7724770642201835e-05, |
|
"loss": 0.4994, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 8.719298245614034, |
|
"grad_norm": 25.944725036621094, |
|
"learning_rate": 1.7697247706422018e-05, |
|
"loss": 0.5637, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 8.736842105263158, |
|
"grad_norm": 7.313636302947998, |
|
"learning_rate": 1.76697247706422e-05, |
|
"loss": 0.3809, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 8.75438596491228, |
|
"grad_norm": 28.630786895751953, |
|
"learning_rate": 1.7642201834862386e-05, |
|
"loss": 0.2553, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 8.771929824561404, |
|
"grad_norm": 13.779499053955078, |
|
"learning_rate": 1.761467889908257e-05, |
|
"loss": 0.6088, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 8.789473684210526, |
|
"grad_norm": 13.866922378540039, |
|
"learning_rate": 1.758715596330275e-05, |
|
"loss": 0.3418, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 8.807017543859649, |
|
"grad_norm": 9.200101852416992, |
|
"learning_rate": 1.7559633027522937e-05, |
|
"loss": 0.5261, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 8.824561403508772, |
|
"grad_norm": 4.6678853034973145, |
|
"learning_rate": 1.753211009174312e-05, |
|
"loss": 0.2657, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 8.842105263157894, |
|
"grad_norm": 13.394104957580566, |
|
"learning_rate": 1.7504587155963305e-05, |
|
"loss": 0.4874, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 8.859649122807017, |
|
"grad_norm": 8.163538932800293, |
|
"learning_rate": 1.7477064220183484e-05, |
|
"loss": 0.4425, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 8.87719298245614, |
|
"grad_norm": 11.24685001373291, |
|
"learning_rate": 1.744954128440367e-05, |
|
"loss": 0.4143, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 8.894736842105264, |
|
"grad_norm": 15.51749324798584, |
|
"learning_rate": 1.7422018348623853e-05, |
|
"loss": 0.2075, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 8.912280701754385, |
|
"grad_norm": 18.03982162475586, |
|
"learning_rate": 1.739449541284404e-05, |
|
"loss": 0.3895, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 8.929824561403509, |
|
"grad_norm": 30.238689422607422, |
|
"learning_rate": 1.7366972477064218e-05, |
|
"loss": 0.502, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 8.947368421052632, |
|
"grad_norm": 17.355592727661133, |
|
"learning_rate": 1.7339449541284404e-05, |
|
"loss": 0.3032, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 8.964912280701755, |
|
"grad_norm": 8.36916732788086, |
|
"learning_rate": 1.7311926605504586e-05, |
|
"loss": 0.4566, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 8.982456140350877, |
|
"grad_norm": 4.937262535095215, |
|
"learning_rate": 1.7284403669724772e-05, |
|
"loss": 0.2615, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"grad_norm": 61.639869689941406, |
|
"learning_rate": 1.7256880733944954e-05, |
|
"loss": 0.6482, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.7126436781609196, |
|
"eval_loss": 0.8051581382751465, |
|
"eval_roc_auc": 0.8928142130921077, |
|
"eval_runtime": 9.5241, |
|
"eval_samples_per_second": 27.404, |
|
"eval_steps_per_second": 1.785, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 9.017543859649123, |
|
"grad_norm": 9.234489440917969, |
|
"learning_rate": 1.7229357798165137e-05, |
|
"loss": 0.2302, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 9.035087719298245, |
|
"grad_norm": 9.468729019165039, |
|
"learning_rate": 1.7201834862385323e-05, |
|
"loss": 0.4173, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 9.052631578947368, |
|
"grad_norm": 10.059374809265137, |
|
"learning_rate": 1.7174311926605505e-05, |
|
"loss": 0.232, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 9.070175438596491, |
|
"grad_norm": 35.99481201171875, |
|
"learning_rate": 1.7146788990825688e-05, |
|
"loss": 0.42, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 9.087719298245615, |
|
"grad_norm": 2.6732192039489746, |
|
"learning_rate": 1.711926605504587e-05, |
|
"loss": 0.2202, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 9.105263157894736, |
|
"grad_norm": 26.779964447021484, |
|
"learning_rate": 1.7091743119266056e-05, |
|
"loss": 0.3977, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 9.12280701754386, |
|
"grad_norm": 8.011734962463379, |
|
"learning_rate": 1.706422018348624e-05, |
|
"loss": 0.2616, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 9.140350877192983, |
|
"grad_norm": 9.349709510803223, |
|
"learning_rate": 1.703669724770642e-05, |
|
"loss": 0.1727, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 9.157894736842104, |
|
"grad_norm": 3.374070405960083, |
|
"learning_rate": 1.7009174311926607e-05, |
|
"loss": 0.3442, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 9.175438596491228, |
|
"grad_norm": 7.063957691192627, |
|
"learning_rate": 1.698165137614679e-05, |
|
"loss": 0.3712, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 9.192982456140351, |
|
"grad_norm": 8.70645523071289, |
|
"learning_rate": 1.6954128440366975e-05, |
|
"loss": 0.371, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 9.210526315789474, |
|
"grad_norm": 6.593190670013428, |
|
"learning_rate": 1.6926605504587154e-05, |
|
"loss": 0.4862, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 9.228070175438596, |
|
"grad_norm": 5.061346054077148, |
|
"learning_rate": 1.689908256880734e-05, |
|
"loss": 0.2712, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 9.24561403508772, |
|
"grad_norm": 9.419418334960938, |
|
"learning_rate": 1.6871559633027523e-05, |
|
"loss": 0.2264, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 9.263157894736842, |
|
"grad_norm": 10.859743118286133, |
|
"learning_rate": 1.684403669724771e-05, |
|
"loss": 0.3662, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 9.280701754385966, |
|
"grad_norm": 30.619741439819336, |
|
"learning_rate": 1.6816513761467888e-05, |
|
"loss": 0.3531, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 9.298245614035087, |
|
"grad_norm": 3.2730002403259277, |
|
"learning_rate": 1.6788990825688073e-05, |
|
"loss": 0.4001, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 9.31578947368421, |
|
"grad_norm": 8.583318710327148, |
|
"learning_rate": 1.6761467889908256e-05, |
|
"loss": 0.2039, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 9.333333333333334, |
|
"grad_norm": 19.599987030029297, |
|
"learning_rate": 1.6733944954128442e-05, |
|
"loss": 0.5786, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 9.350877192982455, |
|
"grad_norm": 3.10512638092041, |
|
"learning_rate": 1.6706422018348624e-05, |
|
"loss": 0.2436, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 9.368421052631579, |
|
"grad_norm": 10.77072525024414, |
|
"learning_rate": 1.6678899082568807e-05, |
|
"loss": 0.2794, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 9.385964912280702, |
|
"grad_norm": 14.997283935546875, |
|
"learning_rate": 1.6651376146788993e-05, |
|
"loss": 0.4462, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 9.403508771929825, |
|
"grad_norm": 14.47448444366455, |
|
"learning_rate": 1.6623853211009175e-05, |
|
"loss": 0.3414, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 9.421052631578947, |
|
"grad_norm": 26.79741096496582, |
|
"learning_rate": 1.6596330275229358e-05, |
|
"loss": 0.3425, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 9.43859649122807, |
|
"grad_norm": 16.75027847290039, |
|
"learning_rate": 1.656880733944954e-05, |
|
"loss": 0.6174, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 9.456140350877194, |
|
"grad_norm": 16.61779022216797, |
|
"learning_rate": 1.6541284403669726e-05, |
|
"loss": 0.4949, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 9.473684210526315, |
|
"grad_norm": 14.869440078735352, |
|
"learning_rate": 1.651376146788991e-05, |
|
"loss": 0.337, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 9.491228070175438, |
|
"grad_norm": 9.696203231811523, |
|
"learning_rate": 1.648623853211009e-05, |
|
"loss": 0.3742, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 9.508771929824562, |
|
"grad_norm": 4.401230335235596, |
|
"learning_rate": 1.6458715596330273e-05, |
|
"loss": 0.2271, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 9.526315789473685, |
|
"grad_norm": 5.773757457733154, |
|
"learning_rate": 1.643119266055046e-05, |
|
"loss": 0.3476, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 9.543859649122806, |
|
"grad_norm": 18.14839744567871, |
|
"learning_rate": 1.6403669724770645e-05, |
|
"loss": 0.4136, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 9.56140350877193, |
|
"grad_norm": 12.958759307861328, |
|
"learning_rate": 1.6376146788990824e-05, |
|
"loss": 0.2327, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 9.578947368421053, |
|
"grad_norm": 5.4245734214782715, |
|
"learning_rate": 1.634862385321101e-05, |
|
"loss": 0.2524, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 9.596491228070175, |
|
"grad_norm": 9.484623908996582, |
|
"learning_rate": 1.6321100917431192e-05, |
|
"loss": 0.1879, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 9.614035087719298, |
|
"grad_norm": 5.631322860717773, |
|
"learning_rate": 1.6293577981651378e-05, |
|
"loss": 0.3271, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 9.631578947368421, |
|
"grad_norm": 3.2394251823425293, |
|
"learning_rate": 1.6266055045871557e-05, |
|
"loss": 0.338, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 9.649122807017545, |
|
"grad_norm": 20.733617782592773, |
|
"learning_rate": 1.6238532110091743e-05, |
|
"loss": 0.6739, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 9.666666666666666, |
|
"grad_norm": 5.4612555503845215, |
|
"learning_rate": 1.6211009174311926e-05, |
|
"loss": 0.2177, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 9.68421052631579, |
|
"grad_norm": 13.240394592285156, |
|
"learning_rate": 1.618348623853211e-05, |
|
"loss": 0.5177, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 9.701754385964913, |
|
"grad_norm": 12.377360343933105, |
|
"learning_rate": 1.6155963302752294e-05, |
|
"loss": 0.382, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 9.719298245614034, |
|
"grad_norm": 27.4326114654541, |
|
"learning_rate": 1.6128440366972477e-05, |
|
"loss": 0.2683, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 9.736842105263158, |
|
"grad_norm": 102.49446105957031, |
|
"learning_rate": 1.6100917431192662e-05, |
|
"loss": 0.1924, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 9.75438596491228, |
|
"grad_norm": 28.67510414123535, |
|
"learning_rate": 1.6073394495412845e-05, |
|
"loss": 0.4392, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 9.771929824561404, |
|
"grad_norm": 7.125996112823486, |
|
"learning_rate": 1.6045871559633027e-05, |
|
"loss": 0.4484, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 9.789473684210526, |
|
"grad_norm": 6.322452545166016, |
|
"learning_rate": 1.601834862385321e-05, |
|
"loss": 0.2461, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 9.807017543859649, |
|
"grad_norm": 14.071425437927246, |
|
"learning_rate": 1.5990825688073396e-05, |
|
"loss": 0.3559, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 9.824561403508772, |
|
"grad_norm": 17.456308364868164, |
|
"learning_rate": 1.5963302752293578e-05, |
|
"loss": 0.3976, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 9.842105263157894, |
|
"grad_norm": 12.360490798950195, |
|
"learning_rate": 1.593577981651376e-05, |
|
"loss": 0.6526, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 9.859649122807017, |
|
"grad_norm": 16.147422790527344, |
|
"learning_rate": 1.5908256880733943e-05, |
|
"loss": 0.5249, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 9.87719298245614, |
|
"grad_norm": 3.189755439758301, |
|
"learning_rate": 1.588073394495413e-05, |
|
"loss": 0.216, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 9.894736842105264, |
|
"grad_norm": 3.7975523471832275, |
|
"learning_rate": 1.5853211009174315e-05, |
|
"loss": 0.2947, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 9.912280701754385, |
|
"grad_norm": 9.276900291442871, |
|
"learning_rate": 1.5825688073394494e-05, |
|
"loss": 0.3288, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 9.929824561403509, |
|
"grad_norm": 6.910634994506836, |
|
"learning_rate": 1.579816513761468e-05, |
|
"loss": 0.3269, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 9.947368421052632, |
|
"grad_norm": 15.361982345581055, |
|
"learning_rate": 1.5770642201834862e-05, |
|
"loss": 0.5478, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 9.964912280701755, |
|
"grad_norm": 8.738931655883789, |
|
"learning_rate": 1.5743119266055048e-05, |
|
"loss": 0.3404, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 9.982456140350877, |
|
"grad_norm": 6.036810398101807, |
|
"learning_rate": 1.5715596330275227e-05, |
|
"loss": 0.4008, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 9.815285682678223, |
|
"learning_rate": 1.5688073394495413e-05, |
|
"loss": 0.2739, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_accuracy": 0.6781609195402298, |
|
"eval_loss": 0.8435059189796448, |
|
"eval_roc_auc": 0.8950196286952365, |
|
"eval_runtime": 10.5896, |
|
"eval_samples_per_second": 24.647, |
|
"eval_steps_per_second": 1.605, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 10.017543859649123, |
|
"grad_norm": 6.685360431671143, |
|
"learning_rate": 1.5660550458715596e-05, |
|
"loss": 0.2748, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 10.035087719298245, |
|
"grad_norm": 5.618095874786377, |
|
"learning_rate": 1.563302752293578e-05, |
|
"loss": 0.2501, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 10.052631578947368, |
|
"grad_norm": 12.535326957702637, |
|
"learning_rate": 1.560550458715596e-05, |
|
"loss": 0.3087, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 10.070175438596491, |
|
"grad_norm": 8.183959007263184, |
|
"learning_rate": 1.5577981651376146e-05, |
|
"loss": 0.2843, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 10.087719298245615, |
|
"grad_norm": 1.980502724647522, |
|
"learning_rate": 1.5550458715596332e-05, |
|
"loss": 0.1799, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 10.105263157894736, |
|
"grad_norm": 13.456244468688965, |
|
"learning_rate": 1.5522935779816515e-05, |
|
"loss": 0.3431, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 10.12280701754386, |
|
"grad_norm": 10.74759578704834, |
|
"learning_rate": 1.5495412844036697e-05, |
|
"loss": 0.1717, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 10.140350877192983, |
|
"grad_norm": 26.72271728515625, |
|
"learning_rate": 1.546788990825688e-05, |
|
"loss": 0.235, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 10.157894736842104, |
|
"grad_norm": 1.6154626607894897, |
|
"learning_rate": 1.5440366972477066e-05, |
|
"loss": 0.1152, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 10.175438596491228, |
|
"grad_norm": 11.566985130310059, |
|
"learning_rate": 1.5412844036697248e-05, |
|
"loss": 0.1322, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 10.192982456140351, |
|
"grad_norm": 5.070148944854736, |
|
"learning_rate": 1.538532110091743e-05, |
|
"loss": 0.2487, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 10.210526315789474, |
|
"grad_norm": 17.369050979614258, |
|
"learning_rate": 1.5357798165137613e-05, |
|
"loss": 0.2958, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 10.228070175438596, |
|
"grad_norm": 8.392364501953125, |
|
"learning_rate": 1.53302752293578e-05, |
|
"loss": 0.3091, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 10.24561403508772, |
|
"grad_norm": 2.859358787536621, |
|
"learning_rate": 1.530275229357798e-05, |
|
"loss": 0.1137, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 10.263157894736842, |
|
"grad_norm": 23.28866195678711, |
|
"learning_rate": 1.5275229357798164e-05, |
|
"loss": 0.2434, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 10.280701754385966, |
|
"grad_norm": 18.40946388244629, |
|
"learning_rate": 1.5247706422018348e-05, |
|
"loss": 0.6074, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 10.298245614035087, |
|
"grad_norm": 12.528573036193848, |
|
"learning_rate": 1.5220183486238532e-05, |
|
"loss": 0.1412, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 10.31578947368421, |
|
"grad_norm": 10.554133415222168, |
|
"learning_rate": 1.5192660550458716e-05, |
|
"loss": 0.4182, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 10.333333333333334, |
|
"grad_norm": 4.216406345367432, |
|
"learning_rate": 1.5165137614678897e-05, |
|
"loss": 0.2399, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 10.350877192982455, |
|
"grad_norm": 9.16854476928711, |
|
"learning_rate": 1.5137614678899083e-05, |
|
"loss": 0.2882, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 10.368421052631579, |
|
"grad_norm": 2.488682746887207, |
|
"learning_rate": 1.5110091743119267e-05, |
|
"loss": 0.0998, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 10.385964912280702, |
|
"grad_norm": 30.423564910888672, |
|
"learning_rate": 1.5082568807339451e-05, |
|
"loss": 0.3046, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 10.403508771929825, |
|
"grad_norm": 20.648027420043945, |
|
"learning_rate": 1.5055045871559632e-05, |
|
"loss": 0.4733, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 10.421052631578947, |
|
"grad_norm": 16.398672103881836, |
|
"learning_rate": 1.5027522935779816e-05, |
|
"loss": 0.2178, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 10.43859649122807, |
|
"grad_norm": 6.614181995391846, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.2064, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 10.456140350877194, |
|
"grad_norm": 30.626367568969727, |
|
"learning_rate": 1.4972477064220185e-05, |
|
"loss": 0.3677, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 10.473684210526315, |
|
"grad_norm": 3.0452020168304443, |
|
"learning_rate": 1.4944954128440367e-05, |
|
"loss": 0.1095, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 10.491228070175438, |
|
"grad_norm": 13.619409561157227, |
|
"learning_rate": 1.4917431192660551e-05, |
|
"loss": 0.4249, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 10.508771929824562, |
|
"grad_norm": 9.04359245300293, |
|
"learning_rate": 1.4889908256880734e-05, |
|
"loss": 0.2014, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 10.526315789473685, |
|
"grad_norm": 17.97020149230957, |
|
"learning_rate": 1.4862385321100918e-05, |
|
"loss": 0.5679, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 10.543859649122806, |
|
"grad_norm": 7.490416526794434, |
|
"learning_rate": 1.4834862385321102e-05, |
|
"loss": 0.4832, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 10.56140350877193, |
|
"grad_norm": 12.512399673461914, |
|
"learning_rate": 1.4807339449541286e-05, |
|
"loss": 0.3462, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 10.578947368421053, |
|
"grad_norm": 6.70708703994751, |
|
"learning_rate": 1.4779816513761469e-05, |
|
"loss": 0.1513, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 10.596491228070175, |
|
"grad_norm": 8.195464134216309, |
|
"learning_rate": 1.4752293577981653e-05, |
|
"loss": 0.2896, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 10.614035087719298, |
|
"grad_norm": 35.68895721435547, |
|
"learning_rate": 1.4724770642201835e-05, |
|
"loss": 0.2295, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 10.631578947368421, |
|
"grad_norm": 6.922648906707764, |
|
"learning_rate": 1.469724770642202e-05, |
|
"loss": 0.5227, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 10.649122807017545, |
|
"grad_norm": 6.483743190765381, |
|
"learning_rate": 1.4669724770642202e-05, |
|
"loss": 0.22, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 10.666666666666666, |
|
"grad_norm": 5.109847545623779, |
|
"learning_rate": 1.4642201834862386e-05, |
|
"loss": 0.1755, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 10.68421052631579, |
|
"grad_norm": 3.8107926845550537, |
|
"learning_rate": 1.4614678899082569e-05, |
|
"loss": 0.1058, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 10.701754385964913, |
|
"grad_norm": 9.708571434020996, |
|
"learning_rate": 1.4587155963302753e-05, |
|
"loss": 0.3024, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 10.719298245614034, |
|
"grad_norm": 6.205732345581055, |
|
"learning_rate": 1.4559633027522937e-05, |
|
"loss": 0.1044, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 10.736842105263158, |
|
"grad_norm": 9.928633689880371, |
|
"learning_rate": 1.4532110091743121e-05, |
|
"loss": 0.1499, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 10.75438596491228, |
|
"grad_norm": 19.466793060302734, |
|
"learning_rate": 1.4504587155963304e-05, |
|
"loss": 0.5, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 10.771929824561404, |
|
"grad_norm": 11.241743087768555, |
|
"learning_rate": 1.4477064220183488e-05, |
|
"loss": 0.3349, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 10.789473684210526, |
|
"grad_norm": 8.29288101196289, |
|
"learning_rate": 1.444954128440367e-05, |
|
"loss": 0.1464, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 10.807017543859649, |
|
"grad_norm": 31.766202926635742, |
|
"learning_rate": 1.4422018348623854e-05, |
|
"loss": 0.2376, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 10.824561403508772, |
|
"grad_norm": 17.91666030883789, |
|
"learning_rate": 1.4394495412844037e-05, |
|
"loss": 0.3159, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 10.842105263157894, |
|
"grad_norm": 19.936201095581055, |
|
"learning_rate": 1.4366972477064221e-05, |
|
"loss": 0.2585, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 10.859649122807017, |
|
"grad_norm": 72.99993896484375, |
|
"learning_rate": 1.4339449541284404e-05, |
|
"loss": 0.5076, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 10.87719298245614, |
|
"grad_norm": 8.826135635375977, |
|
"learning_rate": 1.4311926605504588e-05, |
|
"loss": 0.2219, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 10.894736842105264, |
|
"grad_norm": 26.077892303466797, |
|
"learning_rate": 1.428440366972477e-05, |
|
"loss": 0.4843, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 10.912280701754385, |
|
"grad_norm": 20.25221061706543, |
|
"learning_rate": 1.4256880733944956e-05, |
|
"loss": 0.5897, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 10.929824561403509, |
|
"grad_norm": 31.701025009155273, |
|
"learning_rate": 1.4229357798165138e-05, |
|
"loss": 0.3267, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 10.947368421052632, |
|
"grad_norm": 8.264379501342773, |
|
"learning_rate": 1.4201834862385323e-05, |
|
"loss": 0.2787, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 10.964912280701755, |
|
"grad_norm": 48.150184631347656, |
|
"learning_rate": 1.4174311926605505e-05, |
|
"loss": 0.4824, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 10.982456140350877, |
|
"grad_norm": 10.574882507324219, |
|
"learning_rate": 1.414678899082569e-05, |
|
"loss": 0.3676, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"grad_norm": 24.72840118408203, |
|
"learning_rate": 1.4119266055045872e-05, |
|
"loss": 0.581, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_accuracy": 0.7432950191570882, |
|
"eval_loss": 0.8437221646308899, |
|
"eval_roc_auc": 0.9043893455592809, |
|
"eval_runtime": 9.7269, |
|
"eval_samples_per_second": 26.833, |
|
"eval_steps_per_second": 1.748, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 11.017543859649123, |
|
"grad_norm": 6.984513759613037, |
|
"learning_rate": 1.4091743119266056e-05, |
|
"loss": 0.2926, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 11.035087719298245, |
|
"grad_norm": 31.709365844726562, |
|
"learning_rate": 1.4064220183486238e-05, |
|
"loss": 0.1972, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 11.052631578947368, |
|
"grad_norm": 4.533780574798584, |
|
"learning_rate": 1.4036697247706423e-05, |
|
"loss": 0.1809, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 11.070175438596491, |
|
"grad_norm": 8.72680377960205, |
|
"learning_rate": 1.4009174311926605e-05, |
|
"loss": 0.2892, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 11.087719298245615, |
|
"grad_norm": 1.0965291261672974, |
|
"learning_rate": 1.3981651376146791e-05, |
|
"loss": 0.1061, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 11.105263157894736, |
|
"grad_norm": 7.810534477233887, |
|
"learning_rate": 1.3954128440366973e-05, |
|
"loss": 0.2574, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 11.12280701754386, |
|
"grad_norm": 5.813023567199707, |
|
"learning_rate": 1.3926605504587158e-05, |
|
"loss": 0.1923, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 11.140350877192983, |
|
"grad_norm": 27.594276428222656, |
|
"learning_rate": 1.389908256880734e-05, |
|
"loss": 0.3515, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 11.157894736842104, |
|
"grad_norm": 11.405938148498535, |
|
"learning_rate": 1.3871559633027524e-05, |
|
"loss": 0.2625, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 11.175438596491228, |
|
"grad_norm": 4.616410732269287, |
|
"learning_rate": 1.3844036697247707e-05, |
|
"loss": 0.1946, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 11.192982456140351, |
|
"grad_norm": 5.800034046173096, |
|
"learning_rate": 1.3816513761467891e-05, |
|
"loss": 0.2544, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 11.210526315789474, |
|
"grad_norm": 9.26241397857666, |
|
"learning_rate": 1.3788990825688073e-05, |
|
"loss": 0.416, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 11.228070175438596, |
|
"grad_norm": 10.779268264770508, |
|
"learning_rate": 1.3761467889908258e-05, |
|
"loss": 0.2791, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 11.24561403508772, |
|
"grad_norm": 55.15119552612305, |
|
"learning_rate": 1.373394495412844e-05, |
|
"loss": 0.3073, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 11.263157894736842, |
|
"grad_norm": 5.921196460723877, |
|
"learning_rate": 1.3706422018348624e-05, |
|
"loss": 0.1119, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 11.280701754385966, |
|
"grad_norm": 14.839341163635254, |
|
"learning_rate": 1.3678899082568808e-05, |
|
"loss": 0.3157, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 11.298245614035087, |
|
"grad_norm": 6.483922481536865, |
|
"learning_rate": 1.3651376146788992e-05, |
|
"loss": 0.1668, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 11.31578947368421, |
|
"grad_norm": 2.650290012359619, |
|
"learning_rate": 1.3623853211009175e-05, |
|
"loss": 0.1149, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 11.333333333333334, |
|
"grad_norm": 2.6592800617218018, |
|
"learning_rate": 1.3596330275229359e-05, |
|
"loss": 0.0868, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 11.350877192982455, |
|
"grad_norm": 22.124906539916992, |
|
"learning_rate": 1.3568807339449542e-05, |
|
"loss": 0.4327, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 11.368421052631579, |
|
"grad_norm": 20.250707626342773, |
|
"learning_rate": 1.3541284403669726e-05, |
|
"loss": 0.3964, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 11.385964912280702, |
|
"grad_norm": 5.71112585067749, |
|
"learning_rate": 1.3513761467889908e-05, |
|
"loss": 0.1463, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 11.403508771929825, |
|
"grad_norm": 17.264068603515625, |
|
"learning_rate": 1.3486238532110092e-05, |
|
"loss": 0.5867, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 11.421052631578947, |
|
"grad_norm": 21.228487014770508, |
|
"learning_rate": 1.3458715596330275e-05, |
|
"loss": 0.2334, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 11.43859649122807, |
|
"grad_norm": 8.839598655700684, |
|
"learning_rate": 1.3431192660550459e-05, |
|
"loss": 0.2421, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 11.456140350877194, |
|
"grad_norm": 18.84731101989746, |
|
"learning_rate": 1.3403669724770643e-05, |
|
"loss": 0.3786, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 11.473684210526315, |
|
"grad_norm": 32.75419998168945, |
|
"learning_rate": 1.3376146788990827e-05, |
|
"loss": 0.276, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 11.491228070175438, |
|
"grad_norm": 10.089400291442871, |
|
"learning_rate": 1.334862385321101e-05, |
|
"loss": 0.2987, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 11.508771929824562, |
|
"grad_norm": 13.225504875183105, |
|
"learning_rate": 1.3321100917431194e-05, |
|
"loss": 0.1948, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 11.526315789473685, |
|
"grad_norm": 17.269506454467773, |
|
"learning_rate": 1.3293577981651377e-05, |
|
"loss": 0.1486, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 11.543859649122806, |
|
"grad_norm": 5.897825241088867, |
|
"learning_rate": 1.326605504587156e-05, |
|
"loss": 0.2797, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 11.56140350877193, |
|
"grad_norm": 4.05665397644043, |
|
"learning_rate": 1.3238532110091743e-05, |
|
"loss": 0.1859, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 11.578947368421053, |
|
"grad_norm": 12.295641899108887, |
|
"learning_rate": 1.3211009174311927e-05, |
|
"loss": 0.3204, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 11.596491228070175, |
|
"grad_norm": 15.206548690795898, |
|
"learning_rate": 1.318348623853211e-05, |
|
"loss": 0.5305, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 11.614035087719298, |
|
"grad_norm": 19.5164852142334, |
|
"learning_rate": 1.3155963302752294e-05, |
|
"loss": 0.316, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 11.631578947368421, |
|
"grad_norm": 4.328670501708984, |
|
"learning_rate": 1.3128440366972476e-05, |
|
"loss": 0.1426, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 11.649122807017545, |
|
"grad_norm": 19.915075302124023, |
|
"learning_rate": 1.3100917431192662e-05, |
|
"loss": 0.5241, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 11.666666666666666, |
|
"grad_norm": 9.99632740020752, |
|
"learning_rate": 1.3073394495412845e-05, |
|
"loss": 0.3287, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 11.68421052631579, |
|
"grad_norm": 8.295022964477539, |
|
"learning_rate": 1.3045871559633029e-05, |
|
"loss": 0.1386, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 11.701754385964913, |
|
"grad_norm": 11.660796165466309, |
|
"learning_rate": 1.3018348623853211e-05, |
|
"loss": 0.3504, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 11.719298245614034, |
|
"grad_norm": 19.916318893432617, |
|
"learning_rate": 1.2990825688073396e-05, |
|
"loss": 0.4487, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 11.736842105263158, |
|
"grad_norm": 6.137487411499023, |
|
"learning_rate": 1.2963302752293578e-05, |
|
"loss": 0.248, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 11.75438596491228, |
|
"grad_norm": 16.032516479492188, |
|
"learning_rate": 1.2935779816513762e-05, |
|
"loss": 0.5295, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 11.771929824561404, |
|
"grad_norm": 0.6119496822357178, |
|
"learning_rate": 1.2908256880733945e-05, |
|
"loss": 0.0604, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 11.789473684210526, |
|
"grad_norm": 5.562976837158203, |
|
"learning_rate": 1.2880733944954129e-05, |
|
"loss": 0.2119, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 11.807017543859649, |
|
"grad_norm": 4.243768215179443, |
|
"learning_rate": 1.2853211009174311e-05, |
|
"loss": 0.2239, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 11.824561403508772, |
|
"grad_norm": 20.970874786376953, |
|
"learning_rate": 1.2825688073394497e-05, |
|
"loss": 0.3221, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 11.842105263157894, |
|
"grad_norm": 2.212523937225342, |
|
"learning_rate": 1.279816513761468e-05, |
|
"loss": 0.0897, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 11.859649122807017, |
|
"grad_norm": 28.7296199798584, |
|
"learning_rate": 1.2770642201834864e-05, |
|
"loss": 0.362, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 11.87719298245614, |
|
"grad_norm": 13.14705753326416, |
|
"learning_rate": 1.2743119266055046e-05, |
|
"loss": 0.3464, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 11.894736842105264, |
|
"grad_norm": 9.211715698242188, |
|
"learning_rate": 1.271559633027523e-05, |
|
"loss": 0.3259, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 11.912280701754385, |
|
"grad_norm": 6.302637100219727, |
|
"learning_rate": 1.2688073394495413e-05, |
|
"loss": 0.1719, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 11.929824561403509, |
|
"grad_norm": 0.7406455874443054, |
|
"learning_rate": 1.2660550458715597e-05, |
|
"loss": 0.0792, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 11.947368421052632, |
|
"grad_norm": 15.276371002197266, |
|
"learning_rate": 1.263302752293578e-05, |
|
"loss": 0.1284, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 11.964912280701755, |
|
"grad_norm": 113.07421875, |
|
"learning_rate": 1.2605504587155964e-05, |
|
"loss": 0.4433, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 11.982456140350877, |
|
"grad_norm": 4.4429931640625, |
|
"learning_rate": 1.2577981651376146e-05, |
|
"loss": 0.1152, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"grad_norm": 18.602813720703125, |
|
"learning_rate": 1.255045871559633e-05, |
|
"loss": 0.3169, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_accuracy": 0.7049808429118773, |
|
"eval_loss": 0.9107945561408997, |
|
"eval_roc_auc": 0.8936780938450124, |
|
"eval_runtime": 8.7978, |
|
"eval_samples_per_second": 29.666, |
|
"eval_steps_per_second": 1.932, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 12.017543859649123, |
|
"grad_norm": 0.6636399626731873, |
|
"learning_rate": 1.2522935779816515e-05, |
|
"loss": 0.0605, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 12.035087719298245, |
|
"grad_norm": 18.400907516479492, |
|
"learning_rate": 1.2495412844036699e-05, |
|
"loss": 0.1937, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 12.052631578947368, |
|
"grad_norm": 8.080551147460938, |
|
"learning_rate": 1.2467889908256881e-05, |
|
"loss": 0.3127, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 12.070175438596491, |
|
"grad_norm": 8.157322883605957, |
|
"learning_rate": 1.2440366972477065e-05, |
|
"loss": 0.1947, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 12.087719298245615, |
|
"grad_norm": 6.911866188049316, |
|
"learning_rate": 1.2412844036697248e-05, |
|
"loss": 0.2021, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 12.105263157894736, |
|
"grad_norm": 1.3366920948028564, |
|
"learning_rate": 1.2385321100917432e-05, |
|
"loss": 0.0808, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 12.12280701754386, |
|
"grad_norm": 2.7027173042297363, |
|
"learning_rate": 1.2357798165137615e-05, |
|
"loss": 0.1324, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 12.140350877192983, |
|
"grad_norm": 6.3235697746276855, |
|
"learning_rate": 1.2330275229357799e-05, |
|
"loss": 0.1185, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 12.157894736842104, |
|
"grad_norm": 9.077238082885742, |
|
"learning_rate": 1.2302752293577981e-05, |
|
"loss": 0.1614, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 12.175438596491228, |
|
"grad_norm": 2.126521348953247, |
|
"learning_rate": 1.2275229357798165e-05, |
|
"loss": 0.181, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 12.192982456140351, |
|
"grad_norm": 7.8555097579956055, |
|
"learning_rate": 1.224770642201835e-05, |
|
"loss": 0.1929, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 12.210526315789474, |
|
"grad_norm": 10.955324172973633, |
|
"learning_rate": 1.2220183486238534e-05, |
|
"loss": 0.2502, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 12.228070175438596, |
|
"grad_norm": 24.433555603027344, |
|
"learning_rate": 1.2192660550458716e-05, |
|
"loss": 0.5622, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 12.24561403508772, |
|
"grad_norm": 15.262462615966797, |
|
"learning_rate": 1.21651376146789e-05, |
|
"loss": 0.3855, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 12.263157894736842, |
|
"grad_norm": 4.2611918449401855, |
|
"learning_rate": 1.2137614678899083e-05, |
|
"loss": 0.1054, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 12.280701754385966, |
|
"grad_norm": 3.4223506450653076, |
|
"learning_rate": 1.2110091743119267e-05, |
|
"loss": 0.2222, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 12.298245614035087, |
|
"grad_norm": 5.349019527435303, |
|
"learning_rate": 1.208256880733945e-05, |
|
"loss": 0.2028, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 12.31578947368421, |
|
"grad_norm": 15.387149810791016, |
|
"learning_rate": 1.2055045871559634e-05, |
|
"loss": 0.1211, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 12.333333333333334, |
|
"grad_norm": 1.1758196353912354, |
|
"learning_rate": 1.2027522935779816e-05, |
|
"loss": 0.0588, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 12.350877192982455, |
|
"grad_norm": 7.5386176109313965, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.1079, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 12.368421052631579, |
|
"grad_norm": 3.1408779621124268, |
|
"learning_rate": 1.1972477064220183e-05, |
|
"loss": 0.1088, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 12.385964912280702, |
|
"grad_norm": 2.542567253112793, |
|
"learning_rate": 1.1944954128440369e-05, |
|
"loss": 0.2409, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 12.403508771929825, |
|
"grad_norm": 5.91030216217041, |
|
"learning_rate": 1.1917431192660551e-05, |
|
"loss": 0.1228, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 12.421052631578947, |
|
"grad_norm": 144.98297119140625, |
|
"learning_rate": 1.1889908256880735e-05, |
|
"loss": 0.4235, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 12.43859649122807, |
|
"grad_norm": 0.39158356189727783, |
|
"learning_rate": 1.1862385321100918e-05, |
|
"loss": 0.0468, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 12.456140350877194, |
|
"grad_norm": 16.603181838989258, |
|
"learning_rate": 1.1834862385321102e-05, |
|
"loss": 0.2334, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 12.473684210526315, |
|
"grad_norm": 19.475147247314453, |
|
"learning_rate": 1.1807339449541284e-05, |
|
"loss": 0.3784, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 12.491228070175438, |
|
"grad_norm": 10.970072746276855, |
|
"learning_rate": 1.1779816513761469e-05, |
|
"loss": 0.3142, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 12.508771929824562, |
|
"grad_norm": 591.7606811523438, |
|
"learning_rate": 1.1752293577981651e-05, |
|
"loss": 0.2733, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 12.526315789473685, |
|
"grad_norm": 16.299694061279297, |
|
"learning_rate": 1.1724770642201835e-05, |
|
"loss": 0.3991, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 12.543859649122806, |
|
"grad_norm": 6.645303249359131, |
|
"learning_rate": 1.1697247706422018e-05, |
|
"loss": 0.1417, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 12.56140350877193, |
|
"grad_norm": 11.64770221710205, |
|
"learning_rate": 1.1669724770642204e-05, |
|
"loss": 0.3619, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 12.578947368421053, |
|
"grad_norm": 4.1909685134887695, |
|
"learning_rate": 1.1642201834862386e-05, |
|
"loss": 0.1734, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 12.596491228070175, |
|
"grad_norm": 21.624954223632812, |
|
"learning_rate": 1.161467889908257e-05, |
|
"loss": 0.3642, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 12.614035087719298, |
|
"grad_norm": 14.657827377319336, |
|
"learning_rate": 1.1587155963302753e-05, |
|
"loss": 0.1945, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 12.631578947368421, |
|
"grad_norm": 13.738436698913574, |
|
"learning_rate": 1.1559633027522937e-05, |
|
"loss": 0.179, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 12.649122807017545, |
|
"grad_norm": 14.478985786437988, |
|
"learning_rate": 1.153211009174312e-05, |
|
"loss": 0.3448, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 12.666666666666666, |
|
"grad_norm": 9.730142593383789, |
|
"learning_rate": 1.1504587155963303e-05, |
|
"loss": 0.1897, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 12.68421052631579, |
|
"grad_norm": 163.12696838378906, |
|
"learning_rate": 1.1477064220183486e-05, |
|
"loss": 0.2439, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 12.701754385964913, |
|
"grad_norm": 9.576773643493652, |
|
"learning_rate": 1.144954128440367e-05, |
|
"loss": 0.2353, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 12.719298245614034, |
|
"grad_norm": 10.299495697021484, |
|
"learning_rate": 1.1422018348623853e-05, |
|
"loss": 0.242, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 12.736842105263158, |
|
"grad_norm": 33.32149887084961, |
|
"learning_rate": 1.1394495412844037e-05, |
|
"loss": 0.7456, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 12.75438596491228, |
|
"grad_norm": 6.180983066558838, |
|
"learning_rate": 1.1366972477064221e-05, |
|
"loss": 0.1535, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 12.771929824561404, |
|
"grad_norm": 7.370275020599365, |
|
"learning_rate": 1.1339449541284405e-05, |
|
"loss": 0.1143, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 12.789473684210526, |
|
"grad_norm": 14.886811256408691, |
|
"learning_rate": 1.1311926605504588e-05, |
|
"loss": 0.2212, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 12.807017543859649, |
|
"grad_norm": 14.88480281829834, |
|
"learning_rate": 1.1284403669724772e-05, |
|
"loss": 0.2585, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 12.824561403508772, |
|
"grad_norm": 28.494150161743164, |
|
"learning_rate": 1.1256880733944954e-05, |
|
"loss": 0.3074, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 12.842105263157894, |
|
"grad_norm": 5.734357833862305, |
|
"learning_rate": 1.1229357798165138e-05, |
|
"loss": 0.2147, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 12.859649122807017, |
|
"grad_norm": 8.859356880187988, |
|
"learning_rate": 1.1201834862385321e-05, |
|
"loss": 0.3344, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 12.87719298245614, |
|
"grad_norm": 6.154085159301758, |
|
"learning_rate": 1.1174311926605505e-05, |
|
"loss": 0.1326, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 12.894736842105264, |
|
"grad_norm": 86.77043151855469, |
|
"learning_rate": 1.1146788990825688e-05, |
|
"loss": 0.2653, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 12.912280701754385, |
|
"grad_norm": 2.9679932594299316, |
|
"learning_rate": 1.1119266055045872e-05, |
|
"loss": 0.1537, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 12.929824561403509, |
|
"grad_norm": 6.805034160614014, |
|
"learning_rate": 1.1091743119266056e-05, |
|
"loss": 0.1669, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 12.947368421052632, |
|
"grad_norm": 16.86161231994629, |
|
"learning_rate": 1.106422018348624e-05, |
|
"loss": 0.261, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 12.964912280701755, |
|
"grad_norm": 4.945237636566162, |
|
"learning_rate": 1.1036697247706423e-05, |
|
"loss": 0.2191, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 12.982456140350877, |
|
"grad_norm": 70.42694091796875, |
|
"learning_rate": 1.1009174311926607e-05, |
|
"loss": 0.5588, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"grad_norm": 33.98917007446289, |
|
"learning_rate": 1.098165137614679e-05, |
|
"loss": 0.5302, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_accuracy": 0.7049808429118773, |
|
"eval_loss": 0.9366002678871155, |
|
"eval_roc_auc": 0.8886698826983219, |
|
"eval_runtime": 8.6062, |
|
"eval_samples_per_second": 30.327, |
|
"eval_steps_per_second": 1.975, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 13.017543859649123, |
|
"grad_norm": 16.30794334411621, |
|
"learning_rate": 1.0954128440366973e-05, |
|
"loss": 0.1746, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 13.035087719298245, |
|
"grad_norm": 19.023889541625977, |
|
"learning_rate": 1.0926605504587156e-05, |
|
"loss": 0.3532, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 13.052631578947368, |
|
"grad_norm": 58.92171859741211, |
|
"learning_rate": 1.089908256880734e-05, |
|
"loss": 0.2303, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 13.070175438596491, |
|
"grad_norm": 2.3900561332702637, |
|
"learning_rate": 1.0871559633027522e-05, |
|
"loss": 0.0868, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 13.087719298245615, |
|
"grad_norm": 9.594512939453125, |
|
"learning_rate": 1.0844036697247707e-05, |
|
"loss": 0.2155, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 13.105263157894736, |
|
"grad_norm": 6.165768623352051, |
|
"learning_rate": 1.081651376146789e-05, |
|
"loss": 0.1699, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 13.12280701754386, |
|
"grad_norm": 11.738020896911621, |
|
"learning_rate": 1.0788990825688075e-05, |
|
"loss": 0.3146, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 13.140350877192983, |
|
"grad_norm": 245.3873748779297, |
|
"learning_rate": 1.0761467889908257e-05, |
|
"loss": 0.3095, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 13.157894736842104, |
|
"grad_norm": 6.468425750732422, |
|
"learning_rate": 1.0733944954128442e-05, |
|
"loss": 0.1565, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 13.175438596491228, |
|
"grad_norm": 51.99348449707031, |
|
"learning_rate": 1.0706422018348624e-05, |
|
"loss": 0.5008, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 13.192982456140351, |
|
"grad_norm": 2.463118076324463, |
|
"learning_rate": 1.0678899082568808e-05, |
|
"loss": 0.1401, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 13.210526315789474, |
|
"grad_norm": 10.347915649414062, |
|
"learning_rate": 1.065137614678899e-05, |
|
"loss": 0.2108, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 13.228070175438596, |
|
"grad_norm": 14.886469841003418, |
|
"learning_rate": 1.0623853211009175e-05, |
|
"loss": 0.1522, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 13.24561403508772, |
|
"grad_norm": 4.025850296020508, |
|
"learning_rate": 1.0596330275229357e-05, |
|
"loss": 0.0933, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 13.263157894736842, |
|
"grad_norm": 8.510875701904297, |
|
"learning_rate": 1.0568807339449542e-05, |
|
"loss": 0.4154, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 13.280701754385966, |
|
"grad_norm": 3.559708595275879, |
|
"learning_rate": 1.0541284403669724e-05, |
|
"loss": 0.067, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 13.298245614035087, |
|
"grad_norm": 5.087008476257324, |
|
"learning_rate": 1.051376146788991e-05, |
|
"loss": 0.1601, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 13.31578947368421, |
|
"grad_norm": 8.079976081848145, |
|
"learning_rate": 1.0486238532110092e-05, |
|
"loss": 0.1577, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 13.333333333333334, |
|
"grad_norm": 17.081035614013672, |
|
"learning_rate": 1.0458715596330277e-05, |
|
"loss": 0.2366, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 13.350877192982455, |
|
"grad_norm": 1.349025011062622, |
|
"learning_rate": 1.0431192660550459e-05, |
|
"loss": 0.0553, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 13.368421052631579, |
|
"grad_norm": 12.248740196228027, |
|
"learning_rate": 1.0403669724770643e-05, |
|
"loss": 0.1111, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 13.385964912280702, |
|
"grad_norm": 5.697534561157227, |
|
"learning_rate": 1.0376146788990826e-05, |
|
"loss": 0.1188, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 13.403508771929825, |
|
"grad_norm": 9.995718955993652, |
|
"learning_rate": 1.034862385321101e-05, |
|
"loss": 0.4405, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 13.421052631578947, |
|
"grad_norm": 3.141644239425659, |
|
"learning_rate": 1.0321100917431192e-05, |
|
"loss": 0.1329, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 13.43859649122807, |
|
"grad_norm": 31.985647201538086, |
|
"learning_rate": 1.0293577981651376e-05, |
|
"loss": 0.1126, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 13.456140350877194, |
|
"grad_norm": 9.96912956237793, |
|
"learning_rate": 1.0266055045871559e-05, |
|
"loss": 0.51, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 13.473684210526315, |
|
"grad_norm": 12.615479469299316, |
|
"learning_rate": 1.0238532110091745e-05, |
|
"loss": 0.286, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 13.491228070175438, |
|
"grad_norm": 6.572673797607422, |
|
"learning_rate": 1.0211009174311927e-05, |
|
"loss": 0.1562, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 13.508771929824562, |
|
"grad_norm": 1.261430025100708, |
|
"learning_rate": 1.0183486238532111e-05, |
|
"loss": 0.078, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 13.526315789473685, |
|
"grad_norm": 11.288647651672363, |
|
"learning_rate": 1.0155963302752294e-05, |
|
"loss": 0.2406, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 13.543859649122806, |
|
"grad_norm": 6.564558506011963, |
|
"learning_rate": 1.0128440366972478e-05, |
|
"loss": 0.2341, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 13.56140350877193, |
|
"grad_norm": 71.70243072509766, |
|
"learning_rate": 1.010091743119266e-05, |
|
"loss": 0.5161, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 13.578947368421053, |
|
"grad_norm": 1.2909406423568726, |
|
"learning_rate": 1.0073394495412845e-05, |
|
"loss": 0.0777, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 13.596491228070175, |
|
"grad_norm": 9.205297470092773, |
|
"learning_rate": 1.0045871559633027e-05, |
|
"loss": 0.2522, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 13.614035087719298, |
|
"grad_norm": 19.708072662353516, |
|
"learning_rate": 1.0018348623853211e-05, |
|
"loss": 0.4493, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 13.631578947368421, |
|
"grad_norm": 5.593297958374023, |
|
"learning_rate": 9.990825688073394e-06, |
|
"loss": 0.1821, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 13.649122807017545, |
|
"grad_norm": 0.7555585503578186, |
|
"learning_rate": 9.963302752293578e-06, |
|
"loss": 0.066, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 13.666666666666666, |
|
"grad_norm": 9.295495986938477, |
|
"learning_rate": 9.935779816513762e-06, |
|
"loss": 0.3145, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 13.68421052631579, |
|
"grad_norm": 12.652939796447754, |
|
"learning_rate": 9.908256880733946e-06, |
|
"loss": 0.2695, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 13.701754385964913, |
|
"grad_norm": 0.517791211605072, |
|
"learning_rate": 9.880733944954129e-06, |
|
"loss": 0.047, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 13.719298245614034, |
|
"grad_norm": 2.591848850250244, |
|
"learning_rate": 9.853211009174313e-06, |
|
"loss": 0.0483, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 13.736842105263158, |
|
"grad_norm": 13.37531566619873, |
|
"learning_rate": 9.825688073394495e-06, |
|
"loss": 0.3604, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 13.75438596491228, |
|
"grad_norm": 9.913411140441895, |
|
"learning_rate": 9.79816513761468e-06, |
|
"loss": 0.1038, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 13.771929824561404, |
|
"grad_norm": 84.06814575195312, |
|
"learning_rate": 9.770642201834862e-06, |
|
"loss": 0.2635, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 13.789473684210526, |
|
"grad_norm": 12.877340316772461, |
|
"learning_rate": 9.743119266055046e-06, |
|
"loss": 0.318, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 13.807017543859649, |
|
"grad_norm": 0.8507923483848572, |
|
"learning_rate": 9.715596330275229e-06, |
|
"loss": 0.049, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 13.824561403508772, |
|
"grad_norm": 47.35422134399414, |
|
"learning_rate": 9.688073394495413e-06, |
|
"loss": 0.4669, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 13.842105263157894, |
|
"grad_norm": 4.622620582580566, |
|
"learning_rate": 9.660550458715597e-06, |
|
"loss": 0.1298, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 13.859649122807017, |
|
"grad_norm": 12.158206939697266, |
|
"learning_rate": 9.633027522935781e-06, |
|
"loss": 0.3268, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 13.87719298245614, |
|
"grad_norm": 6.32510232925415, |
|
"learning_rate": 9.605504587155964e-06, |
|
"loss": 0.1676, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 13.894736842105264, |
|
"grad_norm": 7.81455135345459, |
|
"learning_rate": 9.577981651376148e-06, |
|
"loss": 0.327, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 13.912280701754385, |
|
"grad_norm": 6.204614639282227, |
|
"learning_rate": 9.55045871559633e-06, |
|
"loss": 0.2074, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 13.929824561403509, |
|
"grad_norm": 3.006291389465332, |
|
"learning_rate": 9.522935779816515e-06, |
|
"loss": 0.1931, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 13.947368421052632, |
|
"grad_norm": 27.143718719482422, |
|
"learning_rate": 9.495412844036697e-06, |
|
"loss": 0.2277, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 13.964912280701755, |
|
"grad_norm": 14.346508026123047, |
|
"learning_rate": 9.467889908256881e-06, |
|
"loss": 0.3415, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 13.982456140350877, |
|
"grad_norm": 23.73762321472168, |
|
"learning_rate": 9.440366972477064e-06, |
|
"loss": 0.3348, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"grad_norm": 340.23724365234375, |
|
"learning_rate": 9.412844036697248e-06, |
|
"loss": 0.1826, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_accuracy": 0.685823754789272, |
|
"eval_loss": 1.013038992881775, |
|
"eval_roc_auc": 0.8966899641922588, |
|
"eval_runtime": 8.463, |
|
"eval_samples_per_second": 30.84, |
|
"eval_steps_per_second": 2.009, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 14.017543859649123, |
|
"grad_norm": 9.037409782409668, |
|
"learning_rate": 9.38532110091743e-06, |
|
"loss": 0.2681, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 14.035087719298245, |
|
"grad_norm": 14.613738059997559, |
|
"learning_rate": 9.357798165137616e-06, |
|
"loss": 0.2714, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 14.052631578947368, |
|
"grad_norm": 18.193498611450195, |
|
"learning_rate": 9.330275229357799e-06, |
|
"loss": 0.2295, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 14.070175438596491, |
|
"grad_norm": 19.435922622680664, |
|
"learning_rate": 9.302752293577983e-06, |
|
"loss": 0.568, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 14.087719298245615, |
|
"grad_norm": 5.796917915344238, |
|
"learning_rate": 9.275229357798165e-06, |
|
"loss": 0.1761, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 14.105263157894736, |
|
"grad_norm": 16.154891967773438, |
|
"learning_rate": 9.24770642201835e-06, |
|
"loss": 0.2004, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 14.12280701754386, |
|
"grad_norm": 50.181121826171875, |
|
"learning_rate": 9.220183486238532e-06, |
|
"loss": 0.286, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 14.140350877192983, |
|
"grad_norm": 9.757275581359863, |
|
"learning_rate": 9.192660550458716e-06, |
|
"loss": 0.1081, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 14.157894736842104, |
|
"grad_norm": 10.20756721496582, |
|
"learning_rate": 9.165137614678899e-06, |
|
"loss": 0.1053, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 14.175438596491228, |
|
"grad_norm": 147.6493682861328, |
|
"learning_rate": 9.137614678899083e-06, |
|
"loss": 0.3689, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 14.192982456140351, |
|
"grad_norm": 5.954428195953369, |
|
"learning_rate": 9.110091743119265e-06, |
|
"loss": 0.1967, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 14.210526315789474, |
|
"grad_norm": 0.8736594915390015, |
|
"learning_rate": 9.082568807339451e-06, |
|
"loss": 0.0672, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 14.228070175438596, |
|
"grad_norm": 19.074871063232422, |
|
"learning_rate": 9.055045871559634e-06, |
|
"loss": 0.2219, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 14.24561403508772, |
|
"grad_norm": 24.449487686157227, |
|
"learning_rate": 9.027522935779818e-06, |
|
"loss": 0.3973, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 14.263157894736842, |
|
"grad_norm": 15.369214057922363, |
|
"learning_rate": 9e-06, |
|
"loss": 0.3232, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 14.280701754385966, |
|
"grad_norm": 9.815387725830078, |
|
"learning_rate": 8.972477064220184e-06, |
|
"loss": 0.2076, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 14.298245614035087, |
|
"grad_norm": 4.816423416137695, |
|
"learning_rate": 8.944954128440367e-06, |
|
"loss": 0.0842, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 14.31578947368421, |
|
"grad_norm": 18.863229751586914, |
|
"learning_rate": 8.917431192660551e-06, |
|
"loss": 0.1569, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 14.333333333333334, |
|
"grad_norm": 11.182153701782227, |
|
"learning_rate": 8.889908256880734e-06, |
|
"loss": 0.1107, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 14.350877192982455, |
|
"grad_norm": 1.1255884170532227, |
|
"learning_rate": 8.862385321100918e-06, |
|
"loss": 0.0429, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 14.368421052631579, |
|
"grad_norm": 8.27351188659668, |
|
"learning_rate": 8.8348623853211e-06, |
|
"loss": 0.0553, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 14.385964912280702, |
|
"grad_norm": 2.460218667984009, |
|
"learning_rate": 8.807339449541284e-06, |
|
"loss": 0.0525, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 14.403508771929825, |
|
"grad_norm": 9.96479606628418, |
|
"learning_rate": 8.779816513761469e-06, |
|
"loss": 0.2107, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 14.421052631578947, |
|
"grad_norm": 19.677907943725586, |
|
"learning_rate": 8.752293577981653e-06, |
|
"loss": 0.2648, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 14.43859649122807, |
|
"grad_norm": 10.419135093688965, |
|
"learning_rate": 8.724770642201835e-06, |
|
"loss": 0.1504, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 14.456140350877194, |
|
"grad_norm": 15.515992164611816, |
|
"learning_rate": 8.69724770642202e-06, |
|
"loss": 0.1876, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 14.473684210526315, |
|
"grad_norm": 4.553437232971191, |
|
"learning_rate": 8.669724770642202e-06, |
|
"loss": 0.0726, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 14.491228070175438, |
|
"grad_norm": 6.339976787567139, |
|
"learning_rate": 8.642201834862386e-06, |
|
"loss": 0.2749, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 14.508771929824562, |
|
"grad_norm": 23.903335571289062, |
|
"learning_rate": 8.614678899082568e-06, |
|
"loss": 0.1898, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 14.526315789473685, |
|
"grad_norm": 3.4404420852661133, |
|
"learning_rate": 8.587155963302753e-06, |
|
"loss": 0.145, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 14.543859649122806, |
|
"grad_norm": 0.5167752504348755, |
|
"learning_rate": 8.559633027522935e-06, |
|
"loss": 0.0594, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 14.56140350877193, |
|
"grad_norm": 13.959207534790039, |
|
"learning_rate": 8.53211009174312e-06, |
|
"loss": 0.3462, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 14.578947368421053, |
|
"grad_norm": 5.944933891296387, |
|
"learning_rate": 8.504587155963303e-06, |
|
"loss": 0.0515, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 14.596491228070175, |
|
"grad_norm": 8.689725875854492, |
|
"learning_rate": 8.477064220183488e-06, |
|
"loss": 0.1593, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 14.614035087719298, |
|
"grad_norm": 26.8358097076416, |
|
"learning_rate": 8.44954128440367e-06, |
|
"loss": 0.1423, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 14.631578947368421, |
|
"grad_norm": 3.620288372039795, |
|
"learning_rate": 8.422018348623854e-06, |
|
"loss": 0.0726, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 14.649122807017545, |
|
"grad_norm": 6.725648880004883, |
|
"learning_rate": 8.394495412844037e-06, |
|
"loss": 0.0989, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 14.666666666666666, |
|
"grad_norm": 6.595190048217773, |
|
"learning_rate": 8.366972477064221e-06, |
|
"loss": 0.3077, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 14.68421052631579, |
|
"grad_norm": 14.732500076293945, |
|
"learning_rate": 8.339449541284403e-06, |
|
"loss": 0.1057, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 14.701754385964913, |
|
"grad_norm": 23.38099479675293, |
|
"learning_rate": 8.311926605504588e-06, |
|
"loss": 0.2858, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 14.719298245614034, |
|
"grad_norm": 6.091468334197998, |
|
"learning_rate": 8.28440366972477e-06, |
|
"loss": 0.1929, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 14.736842105263158, |
|
"grad_norm": 19.15097999572754, |
|
"learning_rate": 8.256880733944954e-06, |
|
"loss": 0.1127, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 14.75438596491228, |
|
"grad_norm": 7.495136737823486, |
|
"learning_rate": 8.229357798165137e-06, |
|
"loss": 0.1402, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 14.771929824561404, |
|
"grad_norm": 10.095050811767578, |
|
"learning_rate": 8.201834862385323e-06, |
|
"loss": 0.1918, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 14.789473684210526, |
|
"grad_norm": 10.647483825683594, |
|
"learning_rate": 8.174311926605505e-06, |
|
"loss": 0.4263, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 14.807017543859649, |
|
"grad_norm": 4.994744300842285, |
|
"learning_rate": 8.146788990825689e-06, |
|
"loss": 0.1223, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 14.824561403508772, |
|
"grad_norm": 1.3206747770309448, |
|
"learning_rate": 8.119266055045872e-06, |
|
"loss": 0.0626, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 14.842105263157894, |
|
"grad_norm": 10.279633522033691, |
|
"learning_rate": 8.091743119266056e-06, |
|
"loss": 0.1556, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 14.859649122807017, |
|
"grad_norm": 6.295610427856445, |
|
"learning_rate": 8.064220183486238e-06, |
|
"loss": 0.1125, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 14.87719298245614, |
|
"grad_norm": 6.005034446716309, |
|
"learning_rate": 8.036697247706422e-06, |
|
"loss": 0.1204, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 14.894736842105264, |
|
"grad_norm": 88.43120574951172, |
|
"learning_rate": 8.009174311926605e-06, |
|
"loss": 0.4208, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 14.912280701754385, |
|
"grad_norm": 4.66910982131958, |
|
"learning_rate": 7.981651376146789e-06, |
|
"loss": 0.0938, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 14.929824561403509, |
|
"grad_norm": 10.302392959594727, |
|
"learning_rate": 7.954128440366972e-06, |
|
"loss": 0.2364, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 14.947368421052632, |
|
"grad_norm": 12.315834045410156, |
|
"learning_rate": 7.926605504587157e-06, |
|
"loss": 0.3843, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 14.964912280701755, |
|
"grad_norm": 2.329768180847168, |
|
"learning_rate": 7.89908256880734e-06, |
|
"loss": 0.2024, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 14.982456140350877, |
|
"grad_norm": 5.507812976837158, |
|
"learning_rate": 7.871559633027524e-06, |
|
"loss": 0.2056, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"grad_norm": 15.961007118225098, |
|
"learning_rate": 7.844036697247707e-06, |
|
"loss": 0.2684, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"eval_accuracy": 0.6781609195402298, |
|
"eval_loss": 1.0226866006851196, |
|
"eval_roc_auc": 0.8921742047234581, |
|
"eval_runtime": 9.4145, |
|
"eval_samples_per_second": 27.723, |
|
"eval_steps_per_second": 1.806, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 15.017543859649123, |
|
"grad_norm": 15.226679801940918, |
|
"learning_rate": 7.81651376146789e-06, |
|
"loss": 0.2926, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 15.035087719298245, |
|
"grad_norm": 8.864625930786133, |
|
"learning_rate": 7.788990825688073e-06, |
|
"loss": 0.1756, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 15.052631578947368, |
|
"grad_norm": 1.6936672925949097, |
|
"learning_rate": 7.761467889908257e-06, |
|
"loss": 0.0536, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 15.070175438596491, |
|
"grad_norm": 15.099090576171875, |
|
"learning_rate": 7.73394495412844e-06, |
|
"loss": 0.1459, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 15.087719298245615, |
|
"grad_norm": 5.978076457977295, |
|
"learning_rate": 7.706422018348624e-06, |
|
"loss": 0.116, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 15.105263157894736, |
|
"grad_norm": 136.03578186035156, |
|
"learning_rate": 7.678899082568806e-06, |
|
"loss": 0.1331, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 15.12280701754386, |
|
"grad_norm": 0.6831678152084351, |
|
"learning_rate": 7.65137614678899e-06, |
|
"loss": 0.0381, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 15.140350877192983, |
|
"grad_norm": 16.589181900024414, |
|
"learning_rate": 7.623853211009174e-06, |
|
"loss": 0.2011, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 15.157894736842104, |
|
"grad_norm": 4.638211250305176, |
|
"learning_rate": 7.596330275229358e-06, |
|
"loss": 0.0889, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 15.175438596491228, |
|
"grad_norm": 10.425081253051758, |
|
"learning_rate": 7.5688073394495415e-06, |
|
"loss": 0.1062, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 15.192982456140351, |
|
"grad_norm": 5.109747886657715, |
|
"learning_rate": 7.541284403669726e-06, |
|
"loss": 0.1663, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 15.210526315789474, |
|
"grad_norm": 8.572009086608887, |
|
"learning_rate": 7.513761467889908e-06, |
|
"loss": 0.1877, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 15.228070175438596, |
|
"grad_norm": 7.750594139099121, |
|
"learning_rate": 7.486238532110092e-06, |
|
"loss": 0.1766, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 15.24561403508772, |
|
"grad_norm": 13.109859466552734, |
|
"learning_rate": 7.458715596330276e-06, |
|
"loss": 0.1765, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 15.263157894736842, |
|
"grad_norm": 3.056985855102539, |
|
"learning_rate": 7.431192660550459e-06, |
|
"loss": 0.0814, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 15.280701754385966, |
|
"grad_norm": 0.44665461778640747, |
|
"learning_rate": 7.403669724770643e-06, |
|
"loss": 0.0525, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 15.298245614035087, |
|
"grad_norm": 15.339385032653809, |
|
"learning_rate": 7.376146788990826e-06, |
|
"loss": 0.1396, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 15.31578947368421, |
|
"grad_norm": 6.135118007659912, |
|
"learning_rate": 7.34862385321101e-06, |
|
"loss": 0.1651, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 15.333333333333334, |
|
"grad_norm": 5.053482532501221, |
|
"learning_rate": 7.321100917431193e-06, |
|
"loss": 0.1773, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 15.350877192982455, |
|
"grad_norm": 2.368586301803589, |
|
"learning_rate": 7.293577981651376e-06, |
|
"loss": 0.1684, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 15.368421052631579, |
|
"grad_norm": 39.954166412353516, |
|
"learning_rate": 7.2660550458715605e-06, |
|
"loss": 0.3675, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 15.385964912280702, |
|
"grad_norm": 0.6676788330078125, |
|
"learning_rate": 7.238532110091744e-06, |
|
"loss": 0.0364, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 15.403508771929825, |
|
"grad_norm": 7.863750457763672, |
|
"learning_rate": 7.211009174311927e-06, |
|
"loss": 0.2284, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 15.421052631578947, |
|
"grad_norm": 2.7634315490722656, |
|
"learning_rate": 7.1834862385321105e-06, |
|
"loss": 0.0924, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 15.43859649122807, |
|
"grad_norm": 19.595252990722656, |
|
"learning_rate": 7.155963302752294e-06, |
|
"loss": 0.1652, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 15.456140350877194, |
|
"grad_norm": 43.90449905395508, |
|
"learning_rate": 7.128440366972478e-06, |
|
"loss": 0.3822, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 15.473684210526315, |
|
"grad_norm": 12.782305717468262, |
|
"learning_rate": 7.100917431192661e-06, |
|
"loss": 0.1843, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 15.491228070175438, |
|
"grad_norm": 0.46928420662879944, |
|
"learning_rate": 7.073394495412845e-06, |
|
"loss": 0.0541, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 15.508771929824562, |
|
"grad_norm": 7.558313846588135, |
|
"learning_rate": 7.045871559633028e-06, |
|
"loss": 0.228, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 15.526315789473685, |
|
"grad_norm": 1.2508295774459839, |
|
"learning_rate": 7.018348623853211e-06, |
|
"loss": 0.0703, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 15.543859649122806, |
|
"grad_norm": 14.253005981445312, |
|
"learning_rate": 6.9908256880733955e-06, |
|
"loss": 0.0983, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 15.56140350877193, |
|
"grad_norm": 9.677652359008789, |
|
"learning_rate": 6.963302752293579e-06, |
|
"loss": 0.186, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 15.578947368421053, |
|
"grad_norm": 24.92579460144043, |
|
"learning_rate": 6.935779816513762e-06, |
|
"loss": 0.2001, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 15.596491228070175, |
|
"grad_norm": 40.403839111328125, |
|
"learning_rate": 6.9082568807339454e-06, |
|
"loss": 0.09, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 15.614035087719298, |
|
"grad_norm": 11.924145698547363, |
|
"learning_rate": 6.880733944954129e-06, |
|
"loss": 0.2185, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 15.631578947368421, |
|
"grad_norm": 1.4456658363342285, |
|
"learning_rate": 6.853211009174312e-06, |
|
"loss": 0.2724, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 15.649122807017545, |
|
"grad_norm": 10.164271354675293, |
|
"learning_rate": 6.825688073394496e-06, |
|
"loss": 0.3432, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 15.666666666666666, |
|
"grad_norm": 15.096166610717773, |
|
"learning_rate": 6.7981651376146796e-06, |
|
"loss": 0.2307, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 15.68421052631579, |
|
"grad_norm": 12.728583335876465, |
|
"learning_rate": 6.770642201834863e-06, |
|
"loss": 0.242, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 15.701754385964913, |
|
"grad_norm": 28.29001235961914, |
|
"learning_rate": 6.743119266055046e-06, |
|
"loss": 0.1087, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 15.719298245614034, |
|
"grad_norm": 17.925107955932617, |
|
"learning_rate": 6.7155963302752295e-06, |
|
"loss": 0.4188, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 15.736842105263158, |
|
"grad_norm": 6.433342933654785, |
|
"learning_rate": 6.688073394495414e-06, |
|
"loss": 0.0936, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 15.75438596491228, |
|
"grad_norm": 0.8694811463356018, |
|
"learning_rate": 6.660550458715597e-06, |
|
"loss": 0.0473, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 15.771929824561404, |
|
"grad_norm": 30.822620391845703, |
|
"learning_rate": 6.63302752293578e-06, |
|
"loss": 0.0913, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 15.789473684210526, |
|
"grad_norm": 22.46412467956543, |
|
"learning_rate": 6.605504587155964e-06, |
|
"loss": 0.1278, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 15.807017543859649, |
|
"grad_norm": 19.16153335571289, |
|
"learning_rate": 6.577981651376147e-06, |
|
"loss": 0.2673, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 15.824561403508772, |
|
"grad_norm": 50.9233283996582, |
|
"learning_rate": 6.550458715596331e-06, |
|
"loss": 0.557, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 15.842105263157894, |
|
"grad_norm": 13.619710922241211, |
|
"learning_rate": 6.5229357798165145e-06, |
|
"loss": 0.2187, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 15.859649122807017, |
|
"grad_norm": 3.181476593017578, |
|
"learning_rate": 6.495412844036698e-06, |
|
"loss": 0.1298, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 15.87719298245614, |
|
"grad_norm": 12.634454727172852, |
|
"learning_rate": 6.467889908256881e-06, |
|
"loss": 0.1284, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 15.894736842105264, |
|
"grad_norm": 21.935789108276367, |
|
"learning_rate": 6.4403669724770645e-06, |
|
"loss": 0.5293, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 15.912280701754385, |
|
"grad_norm": 20.671627044677734, |
|
"learning_rate": 6.412844036697249e-06, |
|
"loss": 0.1588, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 15.929824561403509, |
|
"grad_norm": 7.02576208114624, |
|
"learning_rate": 6.385321100917432e-06, |
|
"loss": 0.1236, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 15.947368421052632, |
|
"grad_norm": 10.424530982971191, |
|
"learning_rate": 6.357798165137615e-06, |
|
"loss": 0.4814, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 15.964912280701755, |
|
"grad_norm": 11.366877555847168, |
|
"learning_rate": 6.330275229357799e-06, |
|
"loss": 0.3608, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 15.982456140350877, |
|
"grad_norm": 0.31679338216781616, |
|
"learning_rate": 6.302752293577982e-06, |
|
"loss": 0.0309, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"grad_norm": 25.50751495361328, |
|
"learning_rate": 6.275229357798165e-06, |
|
"loss": 0.3512, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_accuracy": 0.7088122605363985, |
|
"eval_loss": 1.0425840616226196, |
|
"eval_roc_auc": 0.8961770097130725, |
|
"eval_runtime": 9.7922, |
|
"eval_samples_per_second": 26.654, |
|
"eval_steps_per_second": 1.736, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 16.017543859649123, |
|
"grad_norm": 5.3255228996276855, |
|
"learning_rate": 6.247706422018349e-06, |
|
"loss": 0.0517, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 16.035087719298247, |
|
"grad_norm": 9.271040916442871, |
|
"learning_rate": 6.220183486238533e-06, |
|
"loss": 0.1322, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 16.05263157894737, |
|
"grad_norm": 4.202192783355713, |
|
"learning_rate": 6.192660550458716e-06, |
|
"loss": 0.1875, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 16.07017543859649, |
|
"grad_norm": 5.035487174987793, |
|
"learning_rate": 6.165137614678899e-06, |
|
"loss": 0.2728, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 16.087719298245613, |
|
"grad_norm": 4.098052024841309, |
|
"learning_rate": 6.137614678899083e-06, |
|
"loss": 0.1016, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 16.105263157894736, |
|
"grad_norm": 0.5254385471343994, |
|
"learning_rate": 6.110091743119267e-06, |
|
"loss": 0.0444, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 16.12280701754386, |
|
"grad_norm": 3.7572407722473145, |
|
"learning_rate": 6.08256880733945e-06, |
|
"loss": 0.0953, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 16.140350877192983, |
|
"grad_norm": 9.17808723449707, |
|
"learning_rate": 6.0550458715596335e-06, |
|
"loss": 0.0714, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 16.157894736842106, |
|
"grad_norm": 13.068717002868652, |
|
"learning_rate": 6.027522935779817e-06, |
|
"loss": 0.5088, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 16.17543859649123, |
|
"grad_norm": 18.120952606201172, |
|
"learning_rate": 6e-06, |
|
"loss": 0.3557, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 16.19298245614035, |
|
"grad_norm": 17.2962589263916, |
|
"learning_rate": 5.972477064220184e-06, |
|
"loss": 0.2254, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 16.210526315789473, |
|
"grad_norm": 12.725507736206055, |
|
"learning_rate": 5.944954128440368e-06, |
|
"loss": 0.2201, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 16.228070175438596, |
|
"grad_norm": 9.663554191589355, |
|
"learning_rate": 5.917431192660551e-06, |
|
"loss": 0.4431, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 16.24561403508772, |
|
"grad_norm": 10.163387298583984, |
|
"learning_rate": 5.889908256880734e-06, |
|
"loss": 0.1505, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 16.263157894736842, |
|
"grad_norm": 4.416619300842285, |
|
"learning_rate": 5.862385321100918e-06, |
|
"loss": 0.1077, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 16.280701754385966, |
|
"grad_norm": 9.514927864074707, |
|
"learning_rate": 5.834862385321102e-06, |
|
"loss": 0.209, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 16.29824561403509, |
|
"grad_norm": 13.716423988342285, |
|
"learning_rate": 5.807339449541285e-06, |
|
"loss": 0.2191, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 16.31578947368421, |
|
"grad_norm": 6.016623020172119, |
|
"learning_rate": 5.7798165137614684e-06, |
|
"loss": 0.2304, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 16.333333333333332, |
|
"grad_norm": 3.8412370681762695, |
|
"learning_rate": 5.752293577981652e-06, |
|
"loss": 0.1962, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 16.350877192982455, |
|
"grad_norm": 2.5499162673950195, |
|
"learning_rate": 5.724770642201835e-06, |
|
"loss": 0.053, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 16.36842105263158, |
|
"grad_norm": 0.8069320917129517, |
|
"learning_rate": 5.697247706422018e-06, |
|
"loss": 0.0544, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 16.385964912280702, |
|
"grad_norm": 10.786516189575195, |
|
"learning_rate": 5.6697247706422026e-06, |
|
"loss": 0.2443, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 16.403508771929825, |
|
"grad_norm": 5.2906389236450195, |
|
"learning_rate": 5.642201834862386e-06, |
|
"loss": 0.0895, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 16.42105263157895, |
|
"grad_norm": 7.044477939605713, |
|
"learning_rate": 5.614678899082569e-06, |
|
"loss": 0.1128, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 16.43859649122807, |
|
"grad_norm": 4.098491668701172, |
|
"learning_rate": 5.5871559633027525e-06, |
|
"loss": 0.0885, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 16.45614035087719, |
|
"grad_norm": 24.7549991607666, |
|
"learning_rate": 5.559633027522936e-06, |
|
"loss": 0.2579, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 16.473684210526315, |
|
"grad_norm": 9.953288078308105, |
|
"learning_rate": 5.53211009174312e-06, |
|
"loss": 0.2695, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 16.49122807017544, |
|
"grad_norm": 1.9676154851913452, |
|
"learning_rate": 5.504587155963303e-06, |
|
"loss": 0.0861, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 16.50877192982456, |
|
"grad_norm": 39.79118728637695, |
|
"learning_rate": 5.477064220183487e-06, |
|
"loss": 0.2086, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 16.526315789473685, |
|
"grad_norm": 8.34636402130127, |
|
"learning_rate": 5.44954128440367e-06, |
|
"loss": 0.1239, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 16.54385964912281, |
|
"grad_norm": 0.8310889601707458, |
|
"learning_rate": 5.422018348623853e-06, |
|
"loss": 0.0438, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 16.56140350877193, |
|
"grad_norm": 2.904680013656616, |
|
"learning_rate": 5.3944954128440375e-06, |
|
"loss": 0.0959, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 16.57894736842105, |
|
"grad_norm": 16.282615661621094, |
|
"learning_rate": 5.366972477064221e-06, |
|
"loss": 0.2871, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 16.596491228070175, |
|
"grad_norm": 7.053973197937012, |
|
"learning_rate": 5.339449541284404e-06, |
|
"loss": 0.2315, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 16.614035087719298, |
|
"grad_norm": 10.810872077941895, |
|
"learning_rate": 5.3119266055045874e-06, |
|
"loss": 0.1121, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 16.63157894736842, |
|
"grad_norm": 5.931017875671387, |
|
"learning_rate": 5.284403669724771e-06, |
|
"loss": 0.0757, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 16.649122807017545, |
|
"grad_norm": 3.6563878059387207, |
|
"learning_rate": 5.256880733944955e-06, |
|
"loss": 0.0865, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 16.666666666666668, |
|
"grad_norm": 1.590165138244629, |
|
"learning_rate": 5.229357798165138e-06, |
|
"loss": 0.0639, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 16.68421052631579, |
|
"grad_norm": 2.1470823287963867, |
|
"learning_rate": 5.201834862385322e-06, |
|
"loss": 0.0781, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 16.70175438596491, |
|
"grad_norm": 8.965104103088379, |
|
"learning_rate": 5.174311926605505e-06, |
|
"loss": 0.1548, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 16.719298245614034, |
|
"grad_norm": 27.018918991088867, |
|
"learning_rate": 5.146788990825688e-06, |
|
"loss": 0.4119, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 16.736842105263158, |
|
"grad_norm": 7.252739906311035, |
|
"learning_rate": 5.119266055045872e-06, |
|
"loss": 0.2262, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 16.75438596491228, |
|
"grad_norm": 14.698616981506348, |
|
"learning_rate": 5.091743119266056e-06, |
|
"loss": 0.1485, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 16.771929824561404, |
|
"grad_norm": 5.847207546234131, |
|
"learning_rate": 5.064220183486239e-06, |
|
"loss": 0.1799, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 16.789473684210527, |
|
"grad_norm": 5.336987018585205, |
|
"learning_rate": 5.036697247706422e-06, |
|
"loss": 0.0802, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 16.80701754385965, |
|
"grad_norm": 8.911815643310547, |
|
"learning_rate": 5.009174311926606e-06, |
|
"loss": 0.109, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 16.82456140350877, |
|
"grad_norm": 0.5730437636375427, |
|
"learning_rate": 4.981651376146789e-06, |
|
"loss": 0.0404, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 16.842105263157894, |
|
"grad_norm": 1.8662869930267334, |
|
"learning_rate": 4.954128440366973e-06, |
|
"loss": 0.0672, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 16.859649122807017, |
|
"grad_norm": 10.673025131225586, |
|
"learning_rate": 4.9266055045871565e-06, |
|
"loss": 0.3202, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 16.87719298245614, |
|
"grad_norm": 2.2482078075408936, |
|
"learning_rate": 4.89908256880734e-06, |
|
"loss": 0.0618, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 16.894736842105264, |
|
"grad_norm": 15.744501113891602, |
|
"learning_rate": 4.871559633027523e-06, |
|
"loss": 0.5188, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 16.912280701754387, |
|
"grad_norm": 15.033669471740723, |
|
"learning_rate": 4.8440366972477065e-06, |
|
"loss": 0.1138, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 16.92982456140351, |
|
"grad_norm": 13.679003715515137, |
|
"learning_rate": 4.816513761467891e-06, |
|
"loss": 0.3009, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 16.94736842105263, |
|
"grad_norm": 27.240638732910156, |
|
"learning_rate": 4.788990825688074e-06, |
|
"loss": 0.1779, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 16.964912280701753, |
|
"grad_norm": 8.780830383300781, |
|
"learning_rate": 4.761467889908257e-06, |
|
"loss": 0.2533, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 16.982456140350877, |
|
"grad_norm": 2.302766799926758, |
|
"learning_rate": 4.733944954128441e-06, |
|
"loss": 0.0429, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"grad_norm": 6.349617958068848, |
|
"learning_rate": 4.706422018348624e-06, |
|
"loss": 0.1867, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"eval_accuracy": 0.6896551724137931, |
|
"eval_loss": 1.066833734512329, |
|
"eval_roc_auc": 0.8918422445130159, |
|
"eval_runtime": 8.5336, |
|
"eval_samples_per_second": 30.585, |
|
"eval_steps_per_second": 1.992, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 17.017543859649123, |
|
"grad_norm": 0.8767920732498169, |
|
"learning_rate": 4.678899082568808e-06, |
|
"loss": 0.0473, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 17.035087719298247, |
|
"grad_norm": 0.24234896898269653, |
|
"learning_rate": 4.651376146788991e-06, |
|
"loss": 0.0386, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 17.05263157894737, |
|
"grad_norm": 0.7192365527153015, |
|
"learning_rate": 4.623853211009175e-06, |
|
"loss": 0.0648, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 17.07017543859649, |
|
"grad_norm": 13.74758529663086, |
|
"learning_rate": 4.596330275229358e-06, |
|
"loss": 0.1321, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 17.087719298245613, |
|
"grad_norm": 54.41605758666992, |
|
"learning_rate": 4.568807339449541e-06, |
|
"loss": 0.1375, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 17.105263157894736, |
|
"grad_norm": 1.3205739259719849, |
|
"learning_rate": 4.5412844036697256e-06, |
|
"loss": 0.056, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 17.12280701754386, |
|
"grad_norm": 15.213194847106934, |
|
"learning_rate": 4.513761467889909e-06, |
|
"loss": 0.1278, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 17.140350877192983, |
|
"grad_norm": 5.64646577835083, |
|
"learning_rate": 4.486238532110092e-06, |
|
"loss": 0.2903, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 17.157894736842106, |
|
"grad_norm": 24.47264289855957, |
|
"learning_rate": 4.4587155963302755e-06, |
|
"loss": 0.1468, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 17.17543859649123, |
|
"grad_norm": 11.426804542541504, |
|
"learning_rate": 4.431192660550459e-06, |
|
"loss": 0.0811, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 17.19298245614035, |
|
"grad_norm": 62.90053939819336, |
|
"learning_rate": 4.403669724770642e-06, |
|
"loss": 0.1826, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 17.210526315789473, |
|
"grad_norm": 6.824471950531006, |
|
"learning_rate": 4.376146788990826e-06, |
|
"loss": 0.203, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 17.228070175438596, |
|
"grad_norm": 40.639400482177734, |
|
"learning_rate": 4.34862385321101e-06, |
|
"loss": 0.4493, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 17.24561403508772, |
|
"grad_norm": 4.320133686065674, |
|
"learning_rate": 4.321100917431193e-06, |
|
"loss": 0.0976, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 17.263157894736842, |
|
"grad_norm": 15.716089248657227, |
|
"learning_rate": 4.293577981651376e-06, |
|
"loss": 0.1551, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 17.280701754385966, |
|
"grad_norm": 3.049983501434326, |
|
"learning_rate": 4.26605504587156e-06, |
|
"loss": 0.0604, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 17.29824561403509, |
|
"grad_norm": 1.962673306465149, |
|
"learning_rate": 4.238532110091744e-06, |
|
"loss": 0.042, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 17.31578947368421, |
|
"grad_norm": 19.601146697998047, |
|
"learning_rate": 4.211009174311927e-06, |
|
"loss": 0.4181, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 17.333333333333332, |
|
"grad_norm": 16.026573181152344, |
|
"learning_rate": 4.1834862385321104e-06, |
|
"loss": 0.0682, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 17.350877192982455, |
|
"grad_norm": 13.569026947021484, |
|
"learning_rate": 4.155963302752294e-06, |
|
"loss": 0.1055, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 17.36842105263158, |
|
"grad_norm": 0.9620193243026733, |
|
"learning_rate": 4.128440366972477e-06, |
|
"loss": 0.0294, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 17.385964912280702, |
|
"grad_norm": 0.5341245532035828, |
|
"learning_rate": 4.100917431192661e-06, |
|
"loss": 0.0755, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 17.403508771929825, |
|
"grad_norm": 2.195127248764038, |
|
"learning_rate": 4.0733944954128446e-06, |
|
"loss": 0.0427, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 17.42105263157895, |
|
"grad_norm": 45.747459411621094, |
|
"learning_rate": 4.045871559633028e-06, |
|
"loss": 0.2021, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 17.43859649122807, |
|
"grad_norm": 18.44023895263672, |
|
"learning_rate": 4.018348623853211e-06, |
|
"loss": 0.2546, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 17.45614035087719, |
|
"grad_norm": 14.940963745117188, |
|
"learning_rate": 3.9908256880733945e-06, |
|
"loss": 0.0749, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 17.473684210526315, |
|
"grad_norm": 9.153297424316406, |
|
"learning_rate": 3.963302752293579e-06, |
|
"loss": 0.1652, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 17.49122807017544, |
|
"grad_norm": 19.046146392822266, |
|
"learning_rate": 3.935779816513762e-06, |
|
"loss": 0.2192, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 17.50877192982456, |
|
"grad_norm": 24.5616397857666, |
|
"learning_rate": 3.908256880733945e-06, |
|
"loss": 0.0883, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 17.526315789473685, |
|
"grad_norm": 5.023523330688477, |
|
"learning_rate": 3.880733944954129e-06, |
|
"loss": 0.2193, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 17.54385964912281, |
|
"grad_norm": 7.122688293457031, |
|
"learning_rate": 3.853211009174312e-06, |
|
"loss": 0.1682, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 17.56140350877193, |
|
"grad_norm": 4.198907375335693, |
|
"learning_rate": 3.825688073394495e-06, |
|
"loss": 0.0674, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 17.57894736842105, |
|
"grad_norm": 6.622169017791748, |
|
"learning_rate": 3.798165137614679e-06, |
|
"loss": 0.0591, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 17.596491228070175, |
|
"grad_norm": 5.111667156219482, |
|
"learning_rate": 3.770642201834863e-06, |
|
"loss": 0.104, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 17.614035087719298, |
|
"grad_norm": 7.502628326416016, |
|
"learning_rate": 3.743119266055046e-06, |
|
"loss": 0.0722, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 17.63157894736842, |
|
"grad_norm": 0.4268607795238495, |
|
"learning_rate": 3.7155963302752295e-06, |
|
"loss": 0.036, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 17.649122807017545, |
|
"grad_norm": 5.627065181732178, |
|
"learning_rate": 3.688073394495413e-06, |
|
"loss": 0.2015, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 17.666666666666668, |
|
"grad_norm": 10.972799301147461, |
|
"learning_rate": 3.6605504587155965e-06, |
|
"loss": 0.0586, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 17.68421052631579, |
|
"grad_norm": 14.32602596282959, |
|
"learning_rate": 3.6330275229357803e-06, |
|
"loss": 0.2551, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 17.70175438596491, |
|
"grad_norm": 30.01763153076172, |
|
"learning_rate": 3.6055045871559636e-06, |
|
"loss": 0.1269, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 17.719298245614034, |
|
"grad_norm": 1.7071233987808228, |
|
"learning_rate": 3.577981651376147e-06, |
|
"loss": 0.0459, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 17.736842105263158, |
|
"grad_norm": 67.83972930908203, |
|
"learning_rate": 3.5504587155963307e-06, |
|
"loss": 0.2978, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 17.75438596491228, |
|
"grad_norm": 1.1684902906417847, |
|
"learning_rate": 3.522935779816514e-06, |
|
"loss": 0.0464, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 17.771929824561404, |
|
"grad_norm": 6.896801948547363, |
|
"learning_rate": 3.4954128440366977e-06, |
|
"loss": 0.0724, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 17.789473684210527, |
|
"grad_norm": 49.41828918457031, |
|
"learning_rate": 3.467889908256881e-06, |
|
"loss": 0.2514, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 17.80701754385965, |
|
"grad_norm": 1.8706743717193604, |
|
"learning_rate": 3.4403669724770644e-06, |
|
"loss": 0.0441, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 17.82456140350877, |
|
"grad_norm": 4.06084680557251, |
|
"learning_rate": 3.412844036697248e-06, |
|
"loss": 0.3851, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 17.842105263157894, |
|
"grad_norm": 4.851496696472168, |
|
"learning_rate": 3.3853211009174314e-06, |
|
"loss": 0.1896, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 17.859649122807017, |
|
"grad_norm": 22.698795318603516, |
|
"learning_rate": 3.3577981651376148e-06, |
|
"loss": 0.1371, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 17.87719298245614, |
|
"grad_norm": 1.251635193824768, |
|
"learning_rate": 3.3302752293577985e-06, |
|
"loss": 0.0426, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 17.894736842105264, |
|
"grad_norm": 4.991325378417969, |
|
"learning_rate": 3.302752293577982e-06, |
|
"loss": 0.2134, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 17.912280701754387, |
|
"grad_norm": 39.215309143066406, |
|
"learning_rate": 3.2752293577981656e-06, |
|
"loss": 0.3352, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 17.92982456140351, |
|
"grad_norm": 25.196969985961914, |
|
"learning_rate": 3.247706422018349e-06, |
|
"loss": 0.4699, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 17.94736842105263, |
|
"grad_norm": 15.807039260864258, |
|
"learning_rate": 3.2201834862385322e-06, |
|
"loss": 0.3251, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 17.964912280701753, |
|
"grad_norm": 0.42076656222343445, |
|
"learning_rate": 3.192660550458716e-06, |
|
"loss": 0.041, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 17.982456140350877, |
|
"grad_norm": 16.122833251953125, |
|
"learning_rate": 3.1651376146788993e-06, |
|
"loss": 0.297, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"grad_norm": 0.5662638545036316, |
|
"learning_rate": 3.1376146788990826e-06, |
|
"loss": 0.0226, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"eval_accuracy": 0.7126436781609196, |
|
"eval_loss": 1.0820069313049316, |
|
"eval_roc_auc": 0.8867955974179313, |
|
"eval_runtime": 8.3763, |
|
"eval_samples_per_second": 31.159, |
|
"eval_steps_per_second": 2.03, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 18.017543859649123, |
|
"grad_norm": 9.32384967803955, |
|
"learning_rate": 3.1100917431192664e-06, |
|
"loss": 0.1557, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 18.035087719298247, |
|
"grad_norm": 2.3690552711486816, |
|
"learning_rate": 3.0825688073394497e-06, |
|
"loss": 0.0587, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 18.05263157894737, |
|
"grad_norm": 6.92120885848999, |
|
"learning_rate": 3.0550458715596334e-06, |
|
"loss": 0.1678, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 18.07017543859649, |
|
"grad_norm": 1.0494810342788696, |
|
"learning_rate": 3.0275229357798168e-06, |
|
"loss": 0.0578, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 18.087719298245613, |
|
"grad_norm": 7.725659370422363, |
|
"learning_rate": 3e-06, |
|
"loss": 0.1265, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 18.105263157894736, |
|
"grad_norm": 1.051193356513977, |
|
"learning_rate": 2.972477064220184e-06, |
|
"loss": 0.0619, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 18.12280701754386, |
|
"grad_norm": 6.573411464691162, |
|
"learning_rate": 2.944954128440367e-06, |
|
"loss": 0.0477, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 18.140350877192983, |
|
"grad_norm": 0.6530596017837524, |
|
"learning_rate": 2.917431192660551e-06, |
|
"loss": 0.0543, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 18.157894736842106, |
|
"grad_norm": 0.6607857346534729, |
|
"learning_rate": 2.8899082568807342e-06, |
|
"loss": 0.0616, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 18.17543859649123, |
|
"grad_norm": 8.91987419128418, |
|
"learning_rate": 2.8623853211009175e-06, |
|
"loss": 0.1471, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 18.19298245614035, |
|
"grad_norm": 8.405895233154297, |
|
"learning_rate": 2.8348623853211013e-06, |
|
"loss": 0.5738, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 18.210526315789473, |
|
"grad_norm": 5.3688554763793945, |
|
"learning_rate": 2.8073394495412846e-06, |
|
"loss": 0.1624, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 18.228070175438596, |
|
"grad_norm": 0.9315479397773743, |
|
"learning_rate": 2.779816513761468e-06, |
|
"loss": 0.0663, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 18.24561403508772, |
|
"grad_norm": 11.00497817993164, |
|
"learning_rate": 2.7522935779816517e-06, |
|
"loss": 0.0811, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 18.263157894736842, |
|
"grad_norm": 14.122636795043945, |
|
"learning_rate": 2.724770642201835e-06, |
|
"loss": 0.0813, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 18.280701754385966, |
|
"grad_norm": 0.315636545419693, |
|
"learning_rate": 2.6972477064220187e-06, |
|
"loss": 0.0353, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 18.29824561403509, |
|
"grad_norm": 7.388427257537842, |
|
"learning_rate": 2.669724770642202e-06, |
|
"loss": 0.0753, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 18.31578947368421, |
|
"grad_norm": 19.393030166625977, |
|
"learning_rate": 2.6422018348623854e-06, |
|
"loss": 0.11, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 18.333333333333332, |
|
"grad_norm": 7.012357234954834, |
|
"learning_rate": 2.614678899082569e-06, |
|
"loss": 0.1198, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 18.350877192982455, |
|
"grad_norm": 10.783536911010742, |
|
"learning_rate": 2.5871559633027525e-06, |
|
"loss": 0.1551, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 18.36842105263158, |
|
"grad_norm": 11.06667709350586, |
|
"learning_rate": 2.559633027522936e-06, |
|
"loss": 0.1406, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 18.385964912280702, |
|
"grad_norm": 3.9254019260406494, |
|
"learning_rate": 2.5321100917431195e-06, |
|
"loss": 0.1157, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 18.403508771929825, |
|
"grad_norm": 2.1191844940185547, |
|
"learning_rate": 2.504587155963303e-06, |
|
"loss": 0.0628, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 18.42105263157895, |
|
"grad_norm": 6.70393180847168, |
|
"learning_rate": 2.4770642201834866e-06, |
|
"loss": 0.1982, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 18.43859649122807, |
|
"grad_norm": 12.729414939880371, |
|
"learning_rate": 2.44954128440367e-06, |
|
"loss": 0.2587, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 18.45614035087719, |
|
"grad_norm": 2.260746717453003, |
|
"learning_rate": 2.4220183486238532e-06, |
|
"loss": 0.0777, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 18.473684210526315, |
|
"grad_norm": 8.489917755126953, |
|
"learning_rate": 2.394495412844037e-06, |
|
"loss": 0.2224, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 18.49122807017544, |
|
"grad_norm": 0.847894012928009, |
|
"learning_rate": 2.3669724770642203e-06, |
|
"loss": 0.065, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 18.50877192982456, |
|
"grad_norm": 58.93024826049805, |
|
"learning_rate": 2.339449541284404e-06, |
|
"loss": 0.2977, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 18.526315789473685, |
|
"grad_norm": 75.2459487915039, |
|
"learning_rate": 2.3119266055045874e-06, |
|
"loss": 0.0925, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 18.54385964912281, |
|
"grad_norm": 7.355698585510254, |
|
"learning_rate": 2.2844036697247707e-06, |
|
"loss": 0.2565, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 18.56140350877193, |
|
"grad_norm": 17.787094116210938, |
|
"learning_rate": 2.2568807339449544e-06, |
|
"loss": 0.137, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 18.57894736842105, |
|
"grad_norm": 9.457293510437012, |
|
"learning_rate": 2.2293577981651378e-06, |
|
"loss": 0.3046, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 18.596491228070175, |
|
"grad_norm": 9.453157424926758, |
|
"learning_rate": 2.201834862385321e-06, |
|
"loss": 0.1251, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 18.614035087719298, |
|
"grad_norm": 8.679370880126953, |
|
"learning_rate": 2.174311926605505e-06, |
|
"loss": 0.1853, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 18.63157894736842, |
|
"grad_norm": 0.7816475629806519, |
|
"learning_rate": 2.146788990825688e-06, |
|
"loss": 0.035, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 18.649122807017545, |
|
"grad_norm": 0.7611159682273865, |
|
"learning_rate": 2.119266055045872e-06, |
|
"loss": 0.0264, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 18.666666666666668, |
|
"grad_norm": 3.0506370067596436, |
|
"learning_rate": 2.0917431192660552e-06, |
|
"loss": 0.1705, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 18.68421052631579, |
|
"grad_norm": 0.23615388572216034, |
|
"learning_rate": 2.0642201834862385e-06, |
|
"loss": 0.0286, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 18.70175438596491, |
|
"grad_norm": 11.163158416748047, |
|
"learning_rate": 2.0366972477064223e-06, |
|
"loss": 0.0988, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 18.719298245614034, |
|
"grad_norm": 2.890137195587158, |
|
"learning_rate": 2.0091743119266056e-06, |
|
"loss": 0.1659, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 18.736842105263158, |
|
"grad_norm": 1.7421916723251343, |
|
"learning_rate": 1.9816513761467894e-06, |
|
"loss": 0.0289, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 18.75438596491228, |
|
"grad_norm": 9.106033325195312, |
|
"learning_rate": 1.9541284403669727e-06, |
|
"loss": 0.1112, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 18.771929824561404, |
|
"grad_norm": 15.5883150100708, |
|
"learning_rate": 1.926605504587156e-06, |
|
"loss": 0.2536, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 18.789473684210527, |
|
"grad_norm": 77.06293487548828, |
|
"learning_rate": 1.8990825688073395e-06, |
|
"loss": 0.6819, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 18.80701754385965, |
|
"grad_norm": 10.116018295288086, |
|
"learning_rate": 1.871559633027523e-06, |
|
"loss": 0.0762, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 18.82456140350877, |
|
"grad_norm": 9.84902572631836, |
|
"learning_rate": 1.8440366972477066e-06, |
|
"loss": 0.1751, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 18.842105263157894, |
|
"grad_norm": 8.737960815429688, |
|
"learning_rate": 1.8165137614678901e-06, |
|
"loss": 0.2718, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 18.859649122807017, |
|
"grad_norm": 3.291921854019165, |
|
"learning_rate": 1.7889908256880735e-06, |
|
"loss": 0.0605, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 18.87719298245614, |
|
"grad_norm": 16.412242889404297, |
|
"learning_rate": 1.761467889908257e-06, |
|
"loss": 0.2364, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 18.894736842105264, |
|
"grad_norm": 21.54041862487793, |
|
"learning_rate": 1.7339449541284405e-06, |
|
"loss": 0.2639, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 18.912280701754387, |
|
"grad_norm": 11.564841270446777, |
|
"learning_rate": 1.706422018348624e-06, |
|
"loss": 0.4344, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 18.92982456140351, |
|
"grad_norm": 2.3406782150268555, |
|
"learning_rate": 1.6788990825688074e-06, |
|
"loss": 0.0584, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 18.94736842105263, |
|
"grad_norm": 40.721405029296875, |
|
"learning_rate": 1.651376146788991e-06, |
|
"loss": 0.3595, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 18.964912280701753, |
|
"grad_norm": 55.34172821044922, |
|
"learning_rate": 1.6238532110091745e-06, |
|
"loss": 0.2543, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 18.982456140350877, |
|
"grad_norm": 38.30630874633789, |
|
"learning_rate": 1.596330275229358e-06, |
|
"loss": 0.2304, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"grad_norm": 6.872189044952393, |
|
"learning_rate": 1.5688073394495413e-06, |
|
"loss": 0.518, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"eval_accuracy": 0.7049808429118773, |
|
"eval_loss": 1.109078049659729, |
|
"eval_roc_auc": 0.8868706722016183, |
|
"eval_runtime": 9.7123, |
|
"eval_samples_per_second": 26.873, |
|
"eval_steps_per_second": 1.75, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 19.017543859649123, |
|
"grad_norm": 14.515957832336426, |
|
"learning_rate": 1.5412844036697248e-06, |
|
"loss": 0.1908, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 19.035087719298247, |
|
"grad_norm": 81.20832824707031, |
|
"learning_rate": 1.5137614678899084e-06, |
|
"loss": 0.2916, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 19.05263157894737, |
|
"grad_norm": 0.3983971178531647, |
|
"learning_rate": 1.486238532110092e-06, |
|
"loss": 0.0263, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 19.07017543859649, |
|
"grad_norm": 7.605863571166992, |
|
"learning_rate": 1.4587155963302754e-06, |
|
"loss": 0.0736, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 19.087719298245613, |
|
"grad_norm": 0.3646264672279358, |
|
"learning_rate": 1.4311926605504588e-06, |
|
"loss": 0.0362, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 19.105263157894736, |
|
"grad_norm": 0.5156516432762146, |
|
"learning_rate": 1.4036697247706423e-06, |
|
"loss": 0.0324, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 19.12280701754386, |
|
"grad_norm": 16.83617401123047, |
|
"learning_rate": 1.3761467889908258e-06, |
|
"loss": 0.2, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 19.140350877192983, |
|
"grad_norm": 0.9557402729988098, |
|
"learning_rate": 1.3486238532110094e-06, |
|
"loss": 0.039, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 19.157894736842106, |
|
"grad_norm": 9.392996788024902, |
|
"learning_rate": 1.3211009174311927e-06, |
|
"loss": 0.0845, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 19.17543859649123, |
|
"grad_norm": 7.63551664352417, |
|
"learning_rate": 1.2935779816513762e-06, |
|
"loss": 0.2157, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 19.19298245614035, |
|
"grad_norm": 1.086582899093628, |
|
"learning_rate": 1.2660550458715598e-06, |
|
"loss": 0.0387, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 19.210526315789473, |
|
"grad_norm": 28.10886573791504, |
|
"learning_rate": 1.2385321100917433e-06, |
|
"loss": 0.2219, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 19.228070175438596, |
|
"grad_norm": 1.2850357294082642, |
|
"learning_rate": 1.2110091743119266e-06, |
|
"loss": 0.0351, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 19.24561403508772, |
|
"grad_norm": 16.688474655151367, |
|
"learning_rate": 1.1834862385321102e-06, |
|
"loss": 0.2437, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 19.263157894736842, |
|
"grad_norm": 18.99778175354004, |
|
"learning_rate": 1.1559633027522937e-06, |
|
"loss": 0.0715, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 19.280701754385966, |
|
"grad_norm": 17.268903732299805, |
|
"learning_rate": 1.1284403669724772e-06, |
|
"loss": 0.0894, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 19.29824561403509, |
|
"grad_norm": 10.455500602722168, |
|
"learning_rate": 1.1009174311926605e-06, |
|
"loss": 0.2691, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 19.31578947368421, |
|
"grad_norm": 18.29836082458496, |
|
"learning_rate": 1.073394495412844e-06, |
|
"loss": 0.1576, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 19.333333333333332, |
|
"grad_norm": 4.013389587402344, |
|
"learning_rate": 1.0458715596330276e-06, |
|
"loss": 0.2029, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 19.350877192982455, |
|
"grad_norm": 2.9395945072174072, |
|
"learning_rate": 1.0183486238532111e-06, |
|
"loss": 0.0777, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 19.36842105263158, |
|
"grad_norm": 10.160873413085938, |
|
"learning_rate": 9.908256880733947e-07, |
|
"loss": 0.0871, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 19.385964912280702, |
|
"grad_norm": 7.076898574829102, |
|
"learning_rate": 9.63302752293578e-07, |
|
"loss": 0.2227, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 19.403508771929825, |
|
"grad_norm": 6.733850955963135, |
|
"learning_rate": 9.357798165137615e-07, |
|
"loss": 0.1146, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 19.42105263157895, |
|
"grad_norm": 10.067296981811523, |
|
"learning_rate": 9.082568807339451e-07, |
|
"loss": 0.0321, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 19.43859649122807, |
|
"grad_norm": 17.009822845458984, |
|
"learning_rate": 8.807339449541285e-07, |
|
"loss": 0.3582, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 19.45614035087719, |
|
"grad_norm": 2.8545923233032227, |
|
"learning_rate": 8.53211009174312e-07, |
|
"loss": 0.0536, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 19.473684210526315, |
|
"grad_norm": 0.6949877738952637, |
|
"learning_rate": 8.256880733944955e-07, |
|
"loss": 0.0358, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 19.49122807017544, |
|
"grad_norm": 10.26220703125, |
|
"learning_rate": 7.98165137614679e-07, |
|
"loss": 0.1917, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 19.50877192982456, |
|
"grad_norm": 1.4607905149459839, |
|
"learning_rate": 7.706422018348624e-07, |
|
"loss": 0.0332, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 19.526315789473685, |
|
"grad_norm": 2.899170398712158, |
|
"learning_rate": 7.43119266055046e-07, |
|
"loss": 0.0991, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 19.54385964912281, |
|
"grad_norm": 26.827545166015625, |
|
"learning_rate": 7.155963302752294e-07, |
|
"loss": 0.1344, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 19.56140350877193, |
|
"grad_norm": 10.652003288269043, |
|
"learning_rate": 6.880733944954129e-07, |
|
"loss": 0.1295, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 19.57894736842105, |
|
"grad_norm": 8.26749324798584, |
|
"learning_rate": 6.605504587155963e-07, |
|
"loss": 0.0575, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 19.596491228070175, |
|
"grad_norm": 8.428291320800781, |
|
"learning_rate": 6.330275229357799e-07, |
|
"loss": 0.1199, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 19.614035087719298, |
|
"grad_norm": 0.29654863476753235, |
|
"learning_rate": 6.055045871559633e-07, |
|
"loss": 0.0405, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 19.63157894736842, |
|
"grad_norm": 833.8351440429688, |
|
"learning_rate": 5.779816513761468e-07, |
|
"loss": 0.2409, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 19.649122807017545, |
|
"grad_norm": 59.03996658325195, |
|
"learning_rate": 5.504587155963303e-07, |
|
"loss": 0.5176, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 19.666666666666668, |
|
"grad_norm": 13.176654815673828, |
|
"learning_rate": 5.229357798165138e-07, |
|
"loss": 0.277, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 19.68421052631579, |
|
"grad_norm": 13.697751998901367, |
|
"learning_rate": 4.954128440366973e-07, |
|
"loss": 0.1221, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 19.70175438596491, |
|
"grad_norm": 7.282000541687012, |
|
"learning_rate": 4.6788990825688077e-07, |
|
"loss": 0.1029, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 19.719298245614034, |
|
"grad_norm": 1.0873990058898926, |
|
"learning_rate": 4.4036697247706425e-07, |
|
"loss": 0.0236, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 19.736842105263158, |
|
"grad_norm": 5.933244705200195, |
|
"learning_rate": 4.1284403669724773e-07, |
|
"loss": 0.1786, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 19.75438596491228, |
|
"grad_norm": 5.1867804527282715, |
|
"learning_rate": 3.853211009174312e-07, |
|
"loss": 0.2596, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 19.771929824561404, |
|
"grad_norm": 59.37315368652344, |
|
"learning_rate": 3.577981651376147e-07, |
|
"loss": 0.4574, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 19.789473684210527, |
|
"grad_norm": 21.83843994140625, |
|
"learning_rate": 3.3027522935779817e-07, |
|
"loss": 0.072, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 19.80701754385965, |
|
"grad_norm": 9.869155883789062, |
|
"learning_rate": 3.0275229357798165e-07, |
|
"loss": 0.1032, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 19.82456140350877, |
|
"grad_norm": 1.299135684967041, |
|
"learning_rate": 2.7522935779816514e-07, |
|
"loss": 0.038, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 19.842105263157894, |
|
"grad_norm": 15.257731437683105, |
|
"learning_rate": 2.4770642201834867e-07, |
|
"loss": 0.298, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 19.859649122807017, |
|
"grad_norm": 10.865966796875, |
|
"learning_rate": 2.2018348623853212e-07, |
|
"loss": 0.1053, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 19.87719298245614, |
|
"grad_norm": 12.854349136352539, |
|
"learning_rate": 1.926605504587156e-07, |
|
"loss": 0.1945, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 19.894736842105264, |
|
"grad_norm": 2.953209400177002, |
|
"learning_rate": 1.6513761467889909e-07, |
|
"loss": 0.1341, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 19.912280701754387, |
|
"grad_norm": 9.027359962463379, |
|
"learning_rate": 1.3761467889908257e-07, |
|
"loss": 0.1263, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 19.92982456140351, |
|
"grad_norm": 10.229753494262695, |
|
"learning_rate": 1.1009174311926606e-07, |
|
"loss": 0.0754, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 19.94736842105263, |
|
"grad_norm": 10.905720710754395, |
|
"learning_rate": 8.256880733944954e-08, |
|
"loss": 0.1458, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 19.964912280701753, |
|
"grad_norm": 22.5546875, |
|
"learning_rate": 5.504587155963303e-08, |
|
"loss": 0.123, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 19.982456140350877, |
|
"grad_norm": 41.80849838256836, |
|
"learning_rate": 2.7522935779816516e-08, |
|
"loss": 0.1547, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"grad_norm": 12.38581657409668, |
|
"learning_rate": 0.0, |
|
"loss": 0.0801, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_accuracy": 0.7011494252873564, |
|
"eval_loss": 1.0942327976226807, |
|
"eval_roc_auc": 0.8879257685287163, |
|
"eval_runtime": 9.8914, |
|
"eval_samples_per_second": 26.387, |
|
"eval_steps_per_second": 1.719, |
|
"step": 1140 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1140, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.9624210581696e+17, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|