| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.994263862332696, |
| "eval_steps": 500, |
| "global_step": 522, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0057361376673040155, |
| "grad_norm": 5.963601353196484, |
| "learning_rate": 7.547169811320755e-07, |
| "loss": 0.8872, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.011472275334608031, |
| "grad_norm": 6.333467701978284, |
| "learning_rate": 1.509433962264151e-06, |
| "loss": 0.9165, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.017208413001912046, |
| "grad_norm": 6.231014140658969, |
| "learning_rate": 2.2641509433962266e-06, |
| "loss": 0.9408, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.022944550669216062, |
| "grad_norm": 5.646301524394669, |
| "learning_rate": 3.018867924528302e-06, |
| "loss": 0.8925, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.028680688336520075, |
| "grad_norm": 4.6749029765975045, |
| "learning_rate": 3.7735849056603777e-06, |
| "loss": 0.8569, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.03441682600382409, |
| "grad_norm": 4.477470504435412, |
| "learning_rate": 4.528301886792453e-06, |
| "loss": 0.8331, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.040152963671128104, |
| "grad_norm": 2.469128428269406, |
| "learning_rate": 5.283018867924529e-06, |
| "loss": 0.8192, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.045889101338432124, |
| "grad_norm": 3.1842562805095596, |
| "learning_rate": 6.037735849056604e-06, |
| "loss": 0.7936, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.05162523900573614, |
| "grad_norm": 4.1014744450012195, |
| "learning_rate": 6.792452830188679e-06, |
| "loss": 0.7749, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.05736137667304015, |
| "grad_norm": 4.176176317229083, |
| "learning_rate": 7.5471698113207555e-06, |
| "loss": 0.8084, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.06309751434034416, |
| "grad_norm": 3.5718306849844206, |
| "learning_rate": 8.301886792452832e-06, |
| "loss": 0.7579, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.06883365200764818, |
| "grad_norm": 2.9545908685425166, |
| "learning_rate": 9.056603773584907e-06, |
| "loss": 0.6838, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.0745697896749522, |
| "grad_norm": 2.484202012476629, |
| "learning_rate": 9.811320754716981e-06, |
| "loss": 0.7421, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.08030592734225621, |
| "grad_norm": 1.6256320736281968, |
| "learning_rate": 1.0566037735849058e-05, |
| "loss": 0.6922, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.08604206500956023, |
| "grad_norm": 1.8910779905338204, |
| "learning_rate": 1.1320754716981132e-05, |
| "loss": 0.695, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.09177820267686425, |
| "grad_norm": 2.1618666538191085, |
| "learning_rate": 1.2075471698113209e-05, |
| "loss": 0.6969, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.09751434034416825, |
| "grad_norm": 1.6536038409758815, |
| "learning_rate": 1.2830188679245283e-05, |
| "loss": 0.6608, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.10325047801147227, |
| "grad_norm": 1.1899674167485872, |
| "learning_rate": 1.3584905660377358e-05, |
| "loss": 0.6611, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.1089866156787763, |
| "grad_norm": 1.3839358683063427, |
| "learning_rate": 1.4339622641509435e-05, |
| "loss": 0.5937, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.1147227533460803, |
| "grad_norm": 1.2164557799105977, |
| "learning_rate": 1.5094339622641511e-05, |
| "loss": 0.6311, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.12045889101338432, |
| "grad_norm": 0.9617228211418869, |
| "learning_rate": 1.5849056603773586e-05, |
| "loss": 0.613, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.12619502868068833, |
| "grad_norm": 0.9112538434649583, |
| "learning_rate": 1.6603773584905664e-05, |
| "loss": 0.6066, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.13193116634799235, |
| "grad_norm": 0.9079396604897542, |
| "learning_rate": 1.735849056603774e-05, |
| "loss": 0.5748, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.13766730401529637, |
| "grad_norm": 1.0350466631308726, |
| "learning_rate": 1.8113207547169813e-05, |
| "loss": 0.6075, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.14340344168260039, |
| "grad_norm": 0.7428776620391337, |
| "learning_rate": 1.8867924528301888e-05, |
| "loss": 0.5943, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.1491395793499044, |
| "grad_norm": 0.8363548105185957, |
| "learning_rate": 1.9622641509433963e-05, |
| "loss": 0.5737, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.15487571701720843, |
| "grad_norm": 1.110084813808486, |
| "learning_rate": 2.037735849056604e-05, |
| "loss": 0.6161, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.16061185468451242, |
| "grad_norm": 0.6819468789186882, |
| "learning_rate": 2.1132075471698115e-05, |
| "loss": 0.594, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.16634799235181644, |
| "grad_norm": 1.0895642393194147, |
| "learning_rate": 2.188679245283019e-05, |
| "loss": 0.5809, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.17208413001912046, |
| "grad_norm": 0.9651676521976803, |
| "learning_rate": 2.2641509433962265e-05, |
| "loss": 0.5813, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.17782026768642448, |
| "grad_norm": 0.8124804726360065, |
| "learning_rate": 2.339622641509434e-05, |
| "loss": 0.5523, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.1835564053537285, |
| "grad_norm": 0.851159676858117, |
| "learning_rate": 2.4150943396226418e-05, |
| "loss": 0.5376, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.18929254302103252, |
| "grad_norm": 0.8124902743389897, |
| "learning_rate": 2.4905660377358492e-05, |
| "loss": 0.5652, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.1950286806883365, |
| "grad_norm": 0.8368258661976667, |
| "learning_rate": 2.5660377358490567e-05, |
| "loss": 0.5927, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.20076481835564053, |
| "grad_norm": 0.653616572010454, |
| "learning_rate": 2.641509433962264e-05, |
| "loss": 0.5668, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.20650095602294455, |
| "grad_norm": 0.7162845997163322, |
| "learning_rate": 2.7169811320754716e-05, |
| "loss": 0.5569, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.21223709369024857, |
| "grad_norm": 0.689294680809673, |
| "learning_rate": 2.7924528301886794e-05, |
| "loss": 0.5566, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.2179732313575526, |
| "grad_norm": 0.872345722524905, |
| "learning_rate": 2.867924528301887e-05, |
| "loss": 0.5757, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.2237093690248566, |
| "grad_norm": 0.8894547605087567, |
| "learning_rate": 2.9433962264150944e-05, |
| "loss": 0.5794, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.2294455066921606, |
| "grad_norm": 0.6582795524486181, |
| "learning_rate": 3.0188679245283022e-05, |
| "loss": 0.5319, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.23518164435946462, |
| "grad_norm": 2.7193829449558033, |
| "learning_rate": 3.09433962264151e-05, |
| "loss": 0.587, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.24091778202676864, |
| "grad_norm": 0.9717090471397071, |
| "learning_rate": 3.169811320754717e-05, |
| "loss": 0.5268, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.24665391969407266, |
| "grad_norm": 0.9224645496458574, |
| "learning_rate": 3.245283018867925e-05, |
| "loss": 0.5802, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.25239005736137665, |
| "grad_norm": 0.8938202665523605, |
| "learning_rate": 3.320754716981133e-05, |
| "loss": 0.5766, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.25812619502868067, |
| "grad_norm": 0.9583812403407647, |
| "learning_rate": 3.39622641509434e-05, |
| "loss": 0.561, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.2638623326959847, |
| "grad_norm": 1.228357544928824, |
| "learning_rate": 3.471698113207548e-05, |
| "loss": 0.5193, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.2695984703632887, |
| "grad_norm": 1.1698249381113741, |
| "learning_rate": 3.547169811320755e-05, |
| "loss": 0.5639, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.27533460803059273, |
| "grad_norm": 0.8489845786136891, |
| "learning_rate": 3.6226415094339626e-05, |
| "loss": 0.549, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.28107074569789675, |
| "grad_norm": 1.57687219740748, |
| "learning_rate": 3.6981132075471704e-05, |
| "loss": 0.5742, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.28680688336520077, |
| "grad_norm": 0.8718037736589133, |
| "learning_rate": 3.7735849056603776e-05, |
| "loss": 0.5326, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.2925430210325048, |
| "grad_norm": 1.2871827675643652, |
| "learning_rate": 3.8490566037735854e-05, |
| "loss": 0.5245, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.2982791586998088, |
| "grad_norm": 1.0013962294494378, |
| "learning_rate": 3.9245283018867925e-05, |
| "loss": 0.5621, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.30401529636711283, |
| "grad_norm": 1.1733505550953005, |
| "learning_rate": 4e-05, |
| "loss": 0.5527, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.30975143403441685, |
| "grad_norm": 0.8228183926039204, |
| "learning_rate": 3.999955130375398e-05, |
| "loss": 0.5207, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.3154875717017208, |
| "grad_norm": 0.9720081493962336, |
| "learning_rate": 3.999820523514873e-05, |
| "loss": 0.5388, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.32122370936902483, |
| "grad_norm": 0.8768396168768982, |
| "learning_rate": 3.9995961854581855e-05, |
| "loss": 0.5443, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.32695984703632885, |
| "grad_norm": 0.9885103652652149, |
| "learning_rate": 3.999282126271299e-05, |
| "loss": 0.5475, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.3326959847036329, |
| "grad_norm": 0.7263822248844192, |
| "learning_rate": 3.998878360045933e-05, |
| "loss": 0.5471, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.3384321223709369, |
| "grad_norm": 0.7898997625317963, |
| "learning_rate": 3.998384904898926e-05, |
| "loss": 0.532, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.3441682600382409, |
| "grad_norm": 0.7388781217225304, |
| "learning_rate": 3.997801782971423e-05, |
| "loss": 0.5196, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.34990439770554493, |
| "grad_norm": 0.7257057362841633, |
| "learning_rate": 3.997129020427888e-05, |
| "loss": 0.5485, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.35564053537284895, |
| "grad_norm": 0.6438468755093205, |
| "learning_rate": 3.996366647454924e-05, |
| "loss": 0.5466, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.361376673040153, |
| "grad_norm": 0.6663878490068373, |
| "learning_rate": 3.9955146982599194e-05, |
| "loss": 0.5468, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.367112810707457, |
| "grad_norm": 0.6745599593845647, |
| "learning_rate": 3.9945732110695145e-05, |
| "loss": 0.525, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.372848948374761, |
| "grad_norm": 0.7081809867814687, |
| "learning_rate": 3.993542228127886e-05, |
| "loss": 0.5382, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.37858508604206503, |
| "grad_norm": 0.7366611396520816, |
| "learning_rate": 3.992421795694853e-05, |
| "loss": 0.5431, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.384321223709369, |
| "grad_norm": 0.7027778770800306, |
| "learning_rate": 3.9912119640437965e-05, |
| "loss": 0.5154, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.390057361376673, |
| "grad_norm": 0.6666822720365386, |
| "learning_rate": 3.989912787459409e-05, |
| "loss": 0.527, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.39579349904397704, |
| "grad_norm": 0.5660749653496684, |
| "learning_rate": 3.9885243242352565e-05, |
| "loss": 0.5351, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.40152963671128106, |
| "grad_norm": 0.5576960477632893, |
| "learning_rate": 3.987046636671162e-05, |
| "loss": 0.4824, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.4072657743785851, |
| "grad_norm": 0.658340932840387, |
| "learning_rate": 3.9854797910704124e-05, |
| "loss": 0.5364, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.4130019120458891, |
| "grad_norm": 0.6117526542482664, |
| "learning_rate": 3.9838238577367816e-05, |
| "loss": 0.5014, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.4187380497131931, |
| "grad_norm": 0.5936168327692725, |
| "learning_rate": 3.982078910971376e-05, |
| "loss": 0.5349, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.42447418738049714, |
| "grad_norm": 0.687524043308455, |
| "learning_rate": 3.980245029069302e-05, |
| "loss": 0.5334, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.43021032504780116, |
| "grad_norm": 0.6835492486583609, |
| "learning_rate": 3.978322294316153e-05, |
| "loss": 0.5509, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.4359464627151052, |
| "grad_norm": 0.5286499638619044, |
| "learning_rate": 3.976310792984315e-05, |
| "loss": 0.5323, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.4416826003824092, |
| "grad_norm": 0.6642388755587706, |
| "learning_rate": 3.974210615329098e-05, |
| "loss": 0.5451, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.4474187380497132, |
| "grad_norm": 0.4999354398609915, |
| "learning_rate": 3.9720218555846834e-05, |
| "loss": 0.5339, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.45315487571701724, |
| "grad_norm": 0.642751208835366, |
| "learning_rate": 3.9697446119599015e-05, |
| "loss": 0.544, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.4588910133843212, |
| "grad_norm": 0.719086166881077, |
| "learning_rate": 3.967378986633818e-05, |
| "loss": 0.5256, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.4646271510516252, |
| "grad_norm": 0.6270310635224952, |
| "learning_rate": 3.964925085751152e-05, |
| "loss": 0.53, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.47036328871892924, |
| "grad_norm": 0.6955843238682398, |
| "learning_rate": 3.9623830194175175e-05, |
| "loss": 0.552, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.47609942638623326, |
| "grad_norm": 0.6300967187312061, |
| "learning_rate": 3.9597529016944746e-05, |
| "loss": 0.5265, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.4818355640535373, |
| "grad_norm": 0.6927514450309129, |
| "learning_rate": 3.9570348505944185e-05, |
| "loss": 0.5389, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.4875717017208413, |
| "grad_norm": 0.6581382765482165, |
| "learning_rate": 3.9542289880752827e-05, |
| "loss": 0.5258, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.4933078393881453, |
| "grad_norm": 0.6347863955072047, |
| "learning_rate": 3.951335440035063e-05, |
| "loss": 0.5295, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.49904397705544934, |
| "grad_norm": 0.7550386421946482, |
| "learning_rate": 3.948354336306176e-05, |
| "loss": 0.5516, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.5047801147227533, |
| "grad_norm": 0.5280267134452366, |
| "learning_rate": 3.945285810649626e-05, |
| "loss": 0.5315, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.5105162523900574, |
| "grad_norm": 0.649947445862538, |
| "learning_rate": 3.942130000749008e-05, |
| "loss": 0.5415, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.5162523900573613, |
| "grad_norm": 0.5480518529513726, |
| "learning_rate": 3.938887048204326e-05, |
| "loss": 0.5465, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.5219885277246654, |
| "grad_norm": 0.5610828231392423, |
| "learning_rate": 3.935557098525644e-05, |
| "loss": 0.5005, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.5277246653919694, |
| "grad_norm": 0.5706185940759887, |
| "learning_rate": 3.9321403011265546e-05, |
| "loss": 0.5189, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.5334608030592735, |
| "grad_norm": 0.5626627671265411, |
| "learning_rate": 3.9286368093174745e-05, |
| "loss": 0.5277, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.5391969407265774, |
| "grad_norm": 0.6290463734820702, |
| "learning_rate": 3.925046780298764e-05, |
| "loss": 0.5237, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.5449330783938815, |
| "grad_norm": 0.5701628353252369, |
| "learning_rate": 3.921370375153681e-05, |
| "loss": 0.5348, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.5506692160611855, |
| "grad_norm": 0.6186142129858236, |
| "learning_rate": 3.917607758841141e-05, |
| "loss": 0.5349, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.5564053537284895, |
| "grad_norm": 0.4973723214529781, |
| "learning_rate": 3.913759100188327e-05, |
| "loss": 0.5061, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.5621414913957935, |
| "grad_norm": 0.682743340827704, |
| "learning_rate": 3.9098245718831076e-05, |
| "loss": 0.5615, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.5678776290630975, |
| "grad_norm": 0.6357989822661191, |
| "learning_rate": 3.905804350466291e-05, |
| "loss": 0.5243, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.5736137667304015, |
| "grad_norm": 0.5449769248133205, |
| "learning_rate": 3.901698616323703e-05, |
| "loss": 0.5155, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.5793499043977055, |
| "grad_norm": 0.6266775770075623, |
| "learning_rate": 3.897507553678093e-05, |
| "loss": 0.5451, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.5850860420650096, |
| "grad_norm": 0.7475144523577113, |
| "learning_rate": 3.893231350580869e-05, |
| "loss": 0.5383, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.5908221797323135, |
| "grad_norm": 0.5434812815586433, |
| "learning_rate": 3.888870198903658e-05, |
| "loss": 0.5316, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.5965583173996176, |
| "grad_norm": 0.6008597770179454, |
| "learning_rate": 3.8844242943297e-05, |
| "loss": 0.523, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.6022944550669216, |
| "grad_norm": 0.6384325056734049, |
| "learning_rate": 3.879893836345062e-05, |
| "loss": 0.5153, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.6080305927342257, |
| "grad_norm": 0.5480218749548357, |
| "learning_rate": 3.875279028229695e-05, |
| "loss": 0.5432, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.6137667304015296, |
| "grad_norm": 0.6160281910387104, |
| "learning_rate": 3.870580077048307e-05, |
| "loss": 0.5296, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.6195028680688337, |
| "grad_norm": 0.6894563819781858, |
| "learning_rate": 3.865797193641072e-05, |
| "loss": 0.5355, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.6252390057361377, |
| "grad_norm": 0.4774944165107399, |
| "learning_rate": 3.8609305926141735e-05, |
| "loss": 0.4964, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.6309751434034416, |
| "grad_norm": 0.7099537904032711, |
| "learning_rate": 3.855980492330173e-05, |
| "loss": 0.5399, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.6367112810707457, |
| "grad_norm": 0.6001030131052923, |
| "learning_rate": 3.850947114898212e-05, |
| "loss": 0.54, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.6424474187380497, |
| "grad_norm": 0.6169400599723738, |
| "learning_rate": 3.8458306861640465e-05, |
| "loss": 0.5478, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.6481835564053537, |
| "grad_norm": 0.6331995497335126, |
| "learning_rate": 3.840631435699912e-05, |
| "loss": 0.5089, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.6539196940726577, |
| "grad_norm": 0.6758544384585873, |
| "learning_rate": 3.835349596794226e-05, |
| "loss": 0.524, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.6596558317399618, |
| "grad_norm": 0.6066427921666631, |
| "learning_rate": 3.829985406441118e-05, |
| "loss": 0.5371, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.6653919694072657, |
| "grad_norm": 0.6205375496679291, |
| "learning_rate": 3.8245391053297936e-05, |
| "loss": 0.4979, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.6711281070745698, |
| "grad_norm": 0.5806033479161776, |
| "learning_rate": 3.8190109378337413e-05, |
| "loss": 0.5239, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.6768642447418738, |
| "grad_norm": 0.6460511560696024, |
| "learning_rate": 3.813401151999759e-05, |
| "loss": 0.5475, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.6826003824091779, |
| "grad_norm": 0.5453484149762959, |
| "learning_rate": 3.807709999536834e-05, |
| "loss": 0.5395, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.6883365200764818, |
| "grad_norm": 0.5318167624766734, |
| "learning_rate": 3.801937735804838e-05, |
| "loss": 0.5087, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.6940726577437859, |
| "grad_norm": 0.8271638416761657, |
| "learning_rate": 3.7960846198030804e-05, |
| "loss": 0.5453, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.6998087954110899, |
| "grad_norm": 0.6479532137119753, |
| "learning_rate": 3.790150914158677e-05, |
| "loss": 0.5043, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.7055449330783938, |
| "grad_norm": 0.7435964195190743, |
| "learning_rate": 3.784136885114773e-05, |
| "loss": 0.5129, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.7112810707456979, |
| "grad_norm": 0.8946003605016197, |
| "learning_rate": 3.7780428025185954e-05, |
| "loss": 0.5533, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.7170172084130019, |
| "grad_norm": 0.6464116296407747, |
| "learning_rate": 3.77186893980934e-05, |
| "loss": 0.5421, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.722753346080306, |
| "grad_norm": 0.7126674023402929, |
| "learning_rate": 3.765615574005911e-05, |
| "loss": 0.5139, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.7284894837476099, |
| "grad_norm": 0.8135984381109922, |
| "learning_rate": 3.7592829856944835e-05, |
| "loss": 0.511, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.734225621414914, |
| "grad_norm": 0.7828468225319082, |
| "learning_rate": 3.752871459015918e-05, |
| "loss": 0.5286, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.739961759082218, |
| "grad_norm": 0.642287015277123, |
| "learning_rate": 3.74638128165301e-05, |
| "loss": 0.5329, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.745697896749522, |
| "grad_norm": 0.5442493722807862, |
| "learning_rate": 3.739812744817581e-05, |
| "loss": 0.5362, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.751434034416826, |
| "grad_norm": 0.5568845351714643, |
| "learning_rate": 3.733166143237413e-05, |
| "loss": 0.5035, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.7571701720841301, |
| "grad_norm": 0.49191832987393536, |
| "learning_rate": 3.7264417751430244e-05, |
| "loss": 0.5085, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.762906309751434, |
| "grad_norm": 0.6805242608433807, |
| "learning_rate": 3.7196399422542866e-05, |
| "loss": 0.5065, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.768642447418738, |
| "grad_norm": 0.5897633089426585, |
| "learning_rate": 3.712760949766888e-05, |
| "loss": 0.5038, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.7743785850860421, |
| "grad_norm": 0.6094976285430489, |
| "learning_rate": 3.70580510633864e-05, |
| "loss": 0.5008, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.780114722753346, |
| "grad_norm": 0.5467744310721907, |
| "learning_rate": 3.6987727240756246e-05, |
| "loss": 0.5142, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.7858508604206501, |
| "grad_norm": 0.5880627703365717, |
| "learning_rate": 3.691664118518195e-05, |
| "loss": 0.4994, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.7915869980879541, |
| "grad_norm": 0.5006717341236084, |
| "learning_rate": 3.6844796086268136e-05, |
| "loss": 0.5164, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.7973231357552581, |
| "grad_norm": 0.5642072424368477, |
| "learning_rate": 3.677219516767743e-05, |
| "loss": 0.5349, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.8030592734225621, |
| "grad_norm": 0.5741831253551103, |
| "learning_rate": 3.669884168698578e-05, |
| "loss": 0.559, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.8087954110898662, |
| "grad_norm": 0.5577991231255239, |
| "learning_rate": 3.6624738935536336e-05, |
| "loss": 0.5082, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.8145315487571702, |
| "grad_norm": 0.6263509656905389, |
| "learning_rate": 3.654989023829174e-05, |
| "loss": 0.5401, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.8202676864244742, |
| "grad_norm": 0.562818325762248, |
| "learning_rate": 3.6474298953684945e-05, |
| "loss": 0.5071, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.8260038240917782, |
| "grad_norm": 0.5540399435211395, |
| "learning_rate": 3.639796847346849e-05, |
| "loss": 0.5343, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.8317399617590823, |
| "grad_norm": 0.5580743271553504, |
| "learning_rate": 3.6320902222562405e-05, |
| "loss": 0.515, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.8374760994263862, |
| "grad_norm": 0.5846568206692573, |
| "learning_rate": 3.62431036589004e-05, |
| "loss": 0.5235, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.8432122370936902, |
| "grad_norm": 0.6959565954811155, |
| "learning_rate": 3.616457627327484e-05, |
| "loss": 0.5075, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.8489483747609943, |
| "grad_norm": 0.565057362201928, |
| "learning_rate": 3.6085323589180046e-05, |
| "loss": 0.5092, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.8546845124282982, |
| "grad_norm": 0.6281295561065597, |
| "learning_rate": 3.600534916265419e-05, |
| "loss": 0.4943, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.8604206500956023, |
| "grad_norm": 0.6023614171022365, |
| "learning_rate": 3.592465658211977e-05, |
| "loss": 0.5133, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.8661567877629063, |
| "grad_norm": 0.6315201878036348, |
| "learning_rate": 3.5843249468222585e-05, |
| "loss": 0.5264, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.8718929254302104, |
| "grad_norm": 0.6634687345841984, |
| "learning_rate": 3.5761131473669285e-05, |
| "loss": 0.4759, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.8776290630975143, |
| "grad_norm": 0.6850113724699497, |
| "learning_rate": 3.567830628306344e-05, |
| "loss": 0.5335, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.8833652007648184, |
| "grad_norm": 0.7083577814147579, |
| "learning_rate": 3.5594777612740263e-05, |
| "loss": 0.4993, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.8891013384321224, |
| "grad_norm": 0.7378709536755256, |
| "learning_rate": 3.551054921059985e-05, |
| "loss": 0.5176, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.8948374760994264, |
| "grad_norm": 0.7648244850245753, |
| "learning_rate": 3.542562485593897e-05, |
| "loss": 0.5238, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.9005736137667304, |
| "grad_norm": 0.6956157652186921, |
| "learning_rate": 3.5340008359281546e-05, |
| "loss": 0.501, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.9063097514340345, |
| "grad_norm": 0.7645790590701518, |
| "learning_rate": 3.525370356220764e-05, |
| "loss": 0.5345, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.9120458891013384, |
| "grad_norm": 0.6517490678032604, |
| "learning_rate": 3.5166714337181104e-05, |
| "loss": 0.5018, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.9177820267686424, |
| "grad_norm": 0.5764447491349046, |
| "learning_rate": 3.50790445873758e-05, |
| "loss": 0.5078, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.9235181644359465, |
| "grad_norm": 0.6747308853100928, |
| "learning_rate": 3.4990698246500484e-05, |
| "loss": 0.5256, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.9292543021032504, |
| "grad_norm": 0.47711018256334037, |
| "learning_rate": 3.490167927862233e-05, |
| "loss": 0.4722, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.9349904397705545, |
| "grad_norm": 0.6778658770774263, |
| "learning_rate": 3.481199167798899e-05, |
| "loss": 0.5223, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.9407265774378585, |
| "grad_norm": 0.4558533116863511, |
| "learning_rate": 3.4721639468849436e-05, |
| "loss": 0.5213, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.9464627151051626, |
| "grad_norm": 0.6385762668945828, |
| "learning_rate": 3.463062670527338e-05, |
| "loss": 0.5273, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.9521988527724665, |
| "grad_norm": 0.4616066439052544, |
| "learning_rate": 3.4538957470969365e-05, |
| "loss": 0.4904, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.9579349904397706, |
| "grad_norm": 0.6031035010764613, |
| "learning_rate": 3.444663587910151e-05, |
| "loss": 0.4922, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.9636711281070746, |
| "grad_norm": 0.4890495218308744, |
| "learning_rate": 3.435366607210499e-05, |
| "loss": 0.4877, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.9694072657743786, |
| "grad_norm": 0.6620804135308059, |
| "learning_rate": 3.426005222150014e-05, |
| "loss": 0.5106, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.9751434034416826, |
| "grad_norm": 0.5095511322768239, |
| "learning_rate": 3.41657985277053e-05, |
| "loss": 0.5008, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.9808795411089866, |
| "grad_norm": 0.6827188978217906, |
| "learning_rate": 3.407090921984832e-05, |
| "loss": 0.5477, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.9866156787762906, |
| "grad_norm": 0.5541219385903691, |
| "learning_rate": 3.397538855557684e-05, |
| "loss": 0.5009, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.9923518164435946, |
| "grad_norm": 0.6443008399746032, |
| "learning_rate": 3.387924082086718e-05, |
| "loss": 0.5332, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.9980879541108987, |
| "grad_norm": 0.4936165614613112, |
| "learning_rate": 3.378247032983213e-05, |
| "loss": 0.522, |
| "step": 174 |
| }, |
| { |
| "epoch": 1.0038240917782026, |
| "grad_norm": 0.9746986187989416, |
| "learning_rate": 3.368508142452728e-05, |
| "loss": 0.7212, |
| "step": 175 |
| }, |
| { |
| "epoch": 1.0095602294455066, |
| "grad_norm": 0.6720620316515331, |
| "learning_rate": 3.358707847475626e-05, |
| "loss": 0.4198, |
| "step": 176 |
| }, |
| { |
| "epoch": 1.0152963671128108, |
| "grad_norm": 0.6268806895754554, |
| "learning_rate": 3.348846587787462e-05, |
| "loss": 0.461, |
| "step": 177 |
| }, |
| { |
| "epoch": 1.0210325047801148, |
| "grad_norm": 0.7216148037661133, |
| "learning_rate": 3.3389248058592576e-05, |
| "loss": 0.4066, |
| "step": 178 |
| }, |
| { |
| "epoch": 1.0267686424474187, |
| "grad_norm": 0.6818445339544676, |
| "learning_rate": 3.328942946877644e-05, |
| "loss": 0.3977, |
| "step": 179 |
| }, |
| { |
| "epoch": 1.0325047801147227, |
| "grad_norm": 0.6950993166160578, |
| "learning_rate": 3.318901458724885e-05, |
| "loss": 0.4345, |
| "step": 180 |
| }, |
| { |
| "epoch": 1.0382409177820269, |
| "grad_norm": 0.6428355368495283, |
| "learning_rate": 3.308800791958785e-05, |
| "loss": 0.3577, |
| "step": 181 |
| }, |
| { |
| "epoch": 1.0439770554493308, |
| "grad_norm": 0.5373947494002963, |
| "learning_rate": 3.2986413997924706e-05, |
| "loss": 0.4369, |
| "step": 182 |
| }, |
| { |
| "epoch": 1.0497131931166348, |
| "grad_norm": 0.6925796288170799, |
| "learning_rate": 3.2884237380740545e-05, |
| "loss": 0.4328, |
| "step": 183 |
| }, |
| { |
| "epoch": 1.0554493307839388, |
| "grad_norm": 0.5233829993400329, |
| "learning_rate": 3.278148265266182e-05, |
| "loss": 0.4174, |
| "step": 184 |
| }, |
| { |
| "epoch": 1.0611854684512427, |
| "grad_norm": 0.6185297505581466, |
| "learning_rate": 3.267815442425459e-05, |
| "loss": 0.3828, |
| "step": 185 |
| }, |
| { |
| "epoch": 1.066921606118547, |
| "grad_norm": 0.6274152460265636, |
| "learning_rate": 3.25742573318177e-05, |
| "loss": 0.4549, |
| "step": 186 |
| }, |
| { |
| "epoch": 1.0726577437858509, |
| "grad_norm": 0.6366347848494356, |
| "learning_rate": 3.246979603717467e-05, |
| "loss": 0.4203, |
| "step": 187 |
| }, |
| { |
| "epoch": 1.0783938814531548, |
| "grad_norm": 0.6227490389766099, |
| "learning_rate": 3.236477522746459e-05, |
| "loss": 0.4675, |
| "step": 188 |
| }, |
| { |
| "epoch": 1.0841300191204588, |
| "grad_norm": 0.5094295149535436, |
| "learning_rate": 3.2259199614931745e-05, |
| "loss": 0.3542, |
| "step": 189 |
| }, |
| { |
| "epoch": 1.089866156787763, |
| "grad_norm": 0.7037793847977521, |
| "learning_rate": 3.215307393671426e-05, |
| "loss": 0.4785, |
| "step": 190 |
| }, |
| { |
| "epoch": 1.095602294455067, |
| "grad_norm": 0.5619740614381676, |
| "learning_rate": 3.204640295463146e-05, |
| "loss": 0.3886, |
| "step": 191 |
| }, |
| { |
| "epoch": 1.101338432122371, |
| "grad_norm": 0.519062950155675, |
| "learning_rate": 3.193919145497028e-05, |
| "loss": 0.3842, |
| "step": 192 |
| }, |
| { |
| "epoch": 1.107074569789675, |
| "grad_norm": 0.5853788545600012, |
| "learning_rate": 3.1831444248270455e-05, |
| "loss": 0.423, |
| "step": 193 |
| }, |
| { |
| "epoch": 1.1128107074569789, |
| "grad_norm": 0.4775535757004622, |
| "learning_rate": 3.17231661691087e-05, |
| "loss": 0.4239, |
| "step": 194 |
| }, |
| { |
| "epoch": 1.118546845124283, |
| "grad_norm": 0.5172569912420082, |
| "learning_rate": 3.161436207588178e-05, |
| "loss": 0.425, |
| "step": 195 |
| }, |
| { |
| "epoch": 1.124282982791587, |
| "grad_norm": 0.5147855471578512, |
| "learning_rate": 3.1505036850588516e-05, |
| "loss": 0.3557, |
| "step": 196 |
| }, |
| { |
| "epoch": 1.130019120458891, |
| "grad_norm": 0.5476099573273011, |
| "learning_rate": 3.139519539861074e-05, |
| "loss": 0.4791, |
| "step": 197 |
| }, |
| { |
| "epoch": 1.135755258126195, |
| "grad_norm": 0.5619357244873963, |
| "learning_rate": 3.128484264849314e-05, |
| "loss": 0.4214, |
| "step": 198 |
| }, |
| { |
| "epoch": 1.1414913957934991, |
| "grad_norm": 0.48517078514017925, |
| "learning_rate": 3.1173983551722205e-05, |
| "loss": 0.3698, |
| "step": 199 |
| }, |
| { |
| "epoch": 1.147227533460803, |
| "grad_norm": 0.6343332709061799, |
| "learning_rate": 3.106262308250399e-05, |
| "loss": 0.3882, |
| "step": 200 |
| }, |
| { |
| "epoch": 1.152963671128107, |
| "grad_norm": 0.4706809203743212, |
| "learning_rate": 3.0950766237540946e-05, |
| "loss": 0.3937, |
| "step": 201 |
| }, |
| { |
| "epoch": 1.158699808795411, |
| "grad_norm": 0.6399176928230911, |
| "learning_rate": 3.083841803580771e-05, |
| "loss": 0.4585, |
| "step": 202 |
| }, |
| { |
| "epoch": 1.1644359464627152, |
| "grad_norm": 0.4919902699693636, |
| "learning_rate": 3.072558351832592e-05, |
| "loss": 0.3764, |
| "step": 203 |
| }, |
| { |
| "epoch": 1.1701720841300192, |
| "grad_norm": 0.6185212786022399, |
| "learning_rate": 3.0612267747938015e-05, |
| "loss": 0.4736, |
| "step": 204 |
| }, |
| { |
| "epoch": 1.1759082217973231, |
| "grad_norm": 0.5805762135464895, |
| "learning_rate": 3.0498475809080082e-05, |
| "loss": 0.4208, |
| "step": 205 |
| }, |
| { |
| "epoch": 1.181644359464627, |
| "grad_norm": 0.5097240529240963, |
| "learning_rate": 3.03842128075537e-05, |
| "loss": 0.4024, |
| "step": 206 |
| }, |
| { |
| "epoch": 1.1873804971319313, |
| "grad_norm": 0.591913249803808, |
| "learning_rate": 3.026948387029684e-05, |
| "loss": 0.4055, |
| "step": 207 |
| }, |
| { |
| "epoch": 1.1931166347992352, |
| "grad_norm": 0.5073228529458789, |
| "learning_rate": 3.0154294145153865e-05, |
| "loss": 0.4051, |
| "step": 208 |
| }, |
| { |
| "epoch": 1.1988527724665392, |
| "grad_norm": 0.5492611084308193, |
| "learning_rate": 3.003864880064449e-05, |
| "loss": 0.41, |
| "step": 209 |
| }, |
| { |
| "epoch": 1.2045889101338432, |
| "grad_norm": 0.49487668182682537, |
| "learning_rate": 2.9922553025731907e-05, |
| "loss": 0.4053, |
| "step": 210 |
| }, |
| { |
| "epoch": 1.2103250478011471, |
| "grad_norm": 0.5668757218011339, |
| "learning_rate": 2.9806012029589966e-05, |
| "loss": 0.4272, |
| "step": 211 |
| }, |
| { |
| "epoch": 1.2160611854684513, |
| "grad_norm": 0.4975892481813516, |
| "learning_rate": 2.9689031041369406e-05, |
| "loss": 0.4182, |
| "step": 212 |
| }, |
| { |
| "epoch": 1.2217973231357553, |
| "grad_norm": 0.5294244433263008, |
| "learning_rate": 2.9571615309963255e-05, |
| "loss": 0.3871, |
| "step": 213 |
| }, |
| { |
| "epoch": 1.2275334608030593, |
| "grad_norm": 0.5243327767554817, |
| "learning_rate": 2.9453770103771308e-05, |
| "loss": 0.39, |
| "step": 214 |
| }, |
| { |
| "epoch": 1.2332695984703632, |
| "grad_norm": 0.48558339355845753, |
| "learning_rate": 2.9335500710463725e-05, |
| "loss": 0.435, |
| "step": 215 |
| }, |
| { |
| "epoch": 1.2390057361376674, |
| "grad_norm": 0.5519168520623398, |
| "learning_rate": 2.9216812436743788e-05, |
| "loss": 0.4313, |
| "step": 216 |
| }, |
| { |
| "epoch": 1.2447418738049714, |
| "grad_norm": 0.4381015639162095, |
| "learning_rate": 2.9097710608109776e-05, |
| "loss": 0.3601, |
| "step": 217 |
| }, |
| { |
| "epoch": 1.2504780114722753, |
| "grad_norm": 0.5354350555855661, |
| "learning_rate": 2.8978200568616036e-05, |
| "loss": 0.3711, |
| "step": 218 |
| }, |
| { |
| "epoch": 1.2562141491395793, |
| "grad_norm": 0.48392108469684075, |
| "learning_rate": 2.885828768063317e-05, |
| "loss": 0.4385, |
| "step": 219 |
| }, |
| { |
| "epoch": 1.2619502868068833, |
| "grad_norm": 0.5147905754022055, |
| "learning_rate": 2.8737977324607466e-05, |
| "loss": 0.4318, |
| "step": 220 |
| }, |
| { |
| "epoch": 1.2676864244741874, |
| "grad_norm": 0.45875460028412374, |
| "learning_rate": 2.861727489881941e-05, |
| "loss": 0.4165, |
| "step": 221 |
| }, |
| { |
| "epoch": 1.2734225621414914, |
| "grad_norm": 0.46767203325180406, |
| "learning_rate": 2.8496185819141547e-05, |
| "loss": 0.4346, |
| "step": 222 |
| }, |
| { |
| "epoch": 1.2791586998087954, |
| "grad_norm": 0.5325780804765026, |
| "learning_rate": 2.837471551879543e-05, |
| "loss": 0.4488, |
| "step": 223 |
| }, |
| { |
| "epoch": 1.2848948374760996, |
| "grad_norm": 0.45604591940023564, |
| "learning_rate": 2.825286944810783e-05, |
| "loss": 0.4172, |
| "step": 224 |
| }, |
| { |
| "epoch": 1.2906309751434035, |
| "grad_norm": 0.48182478279469737, |
| "learning_rate": 2.8130653074266188e-05, |
| "loss": 0.3899, |
| "step": 225 |
| }, |
| { |
| "epoch": 1.2963671128107075, |
| "grad_norm": 0.4285203303941567, |
| "learning_rate": 2.8008071881073342e-05, |
| "loss": 0.4119, |
| "step": 226 |
| }, |
| { |
| "epoch": 1.3021032504780115, |
| "grad_norm": 0.5203451219224136, |
| "learning_rate": 2.78851313687014e-05, |
| "loss": 0.4167, |
| "step": 227 |
| }, |
| { |
| "epoch": 1.3078393881453154, |
| "grad_norm": 0.44867616077783923, |
| "learning_rate": 2.7761837053444994e-05, |
| "loss": 0.4428, |
| "step": 228 |
| }, |
| { |
| "epoch": 1.3135755258126194, |
| "grad_norm": 0.5176774633567216, |
| "learning_rate": 2.7638194467473776e-05, |
| "loss": 0.4623, |
| "step": 229 |
| }, |
| { |
| "epoch": 1.3193116634799236, |
| "grad_norm": 0.41453804129004856, |
| "learning_rate": 2.7514209158584164e-05, |
| "loss": 0.3855, |
| "step": 230 |
| }, |
| { |
| "epoch": 1.3250478011472275, |
| "grad_norm": 0.44139671451368523, |
| "learning_rate": 2.7389886689950428e-05, |
| "loss": 0.3704, |
| "step": 231 |
| }, |
| { |
| "epoch": 1.3307839388145315, |
| "grad_norm": 0.5091495369537983, |
| "learning_rate": 2.7265232639875043e-05, |
| "loss": 0.4346, |
| "step": 232 |
| }, |
| { |
| "epoch": 1.3365200764818357, |
| "grad_norm": 0.4627275608117313, |
| "learning_rate": 2.7140252601538466e-05, |
| "loss": 0.4339, |
| "step": 233 |
| }, |
| { |
| "epoch": 1.3422562141491396, |
| "grad_norm": 0.48547168450981104, |
| "learning_rate": 2.7014952182748083e-05, |
| "loss": 0.4307, |
| "step": 234 |
| }, |
| { |
| "epoch": 1.3479923518164436, |
| "grad_norm": 0.47854324223280925, |
| "learning_rate": 2.688933700568666e-05, |
| "loss": 0.4091, |
| "step": 235 |
| }, |
| { |
| "epoch": 1.3537284894837476, |
| "grad_norm": 0.4845854941728673, |
| "learning_rate": 2.676341270666003e-05, |
| "loss": 0.4544, |
| "step": 236 |
| }, |
| { |
| "epoch": 1.3594646271510515, |
| "grad_norm": 0.45196894051562736, |
| "learning_rate": 2.663718493584422e-05, |
| "loss": 0.3461, |
| "step": 237 |
| }, |
| { |
| "epoch": 1.3652007648183555, |
| "grad_norm": 0.543415301575274, |
| "learning_rate": 2.6510659357031917e-05, |
| "loss": 0.4336, |
| "step": 238 |
| }, |
| { |
| "epoch": 1.3709369024856597, |
| "grad_norm": 0.4471607463074475, |
| "learning_rate": 2.6383841647378356e-05, |
| "loss": 0.38, |
| "step": 239 |
| }, |
| { |
| "epoch": 1.3766730401529637, |
| "grad_norm": 0.6022022438673463, |
| "learning_rate": 2.6256737497146544e-05, |
| "loss": 0.4709, |
| "step": 240 |
| }, |
| { |
| "epoch": 1.3824091778202676, |
| "grad_norm": 0.44800638598268433, |
| "learning_rate": 2.6129352609452003e-05, |
| "loss": 0.4105, |
| "step": 241 |
| }, |
| { |
| "epoch": 1.3881453154875718, |
| "grad_norm": 0.4385968231933793, |
| "learning_rate": 2.600169270000682e-05, |
| "loss": 0.3782, |
| "step": 242 |
| }, |
| { |
| "epoch": 1.3938814531548758, |
| "grad_norm": 0.48914377606117937, |
| "learning_rate": 2.5873763496863203e-05, |
| "loss": 0.3954, |
| "step": 243 |
| }, |
| { |
| "epoch": 1.3996175908221797, |
| "grad_norm": 0.5851945762803631, |
| "learning_rate": 2.574557074015648e-05, |
| "loss": 0.458, |
| "step": 244 |
| }, |
| { |
| "epoch": 1.4053537284894837, |
| "grad_norm": 0.46457478071444297, |
| "learning_rate": 2.561712018184752e-05, |
| "loss": 0.438, |
| "step": 245 |
| }, |
| { |
| "epoch": 1.4110898661567877, |
| "grad_norm": 0.5204965149805247, |
| "learning_rate": 2.5488417585464648e-05, |
| "loss": 0.4171, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.4168260038240919, |
| "grad_norm": 0.47781885084741565, |
| "learning_rate": 2.535946872584506e-05, |
| "loss": 0.4265, |
| "step": 247 |
| }, |
| { |
| "epoch": 1.4225621414913958, |
| "grad_norm": 0.5009930687067327, |
| "learning_rate": 2.523027938887567e-05, |
| "loss": 0.39, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.4282982791586998, |
| "grad_norm": 0.5407445516705907, |
| "learning_rate": 2.5100855371233533e-05, |
| "loss": 0.4255, |
| "step": 249 |
| }, |
| { |
| "epoch": 1.4340344168260037, |
| "grad_norm": 0.4288547555150977, |
| "learning_rate": 2.4971202480125737e-05, |
| "loss": 0.3925, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.439770554493308, |
| "grad_norm": 0.4567682163636214, |
| "learning_rate": 2.4841326533028838e-05, |
| "loss": 0.4283, |
| "step": 251 |
| }, |
| { |
| "epoch": 1.445506692160612, |
| "grad_norm": 0.47732054782696987, |
| "learning_rate": 2.4711233357427817e-05, |
| "loss": 0.429, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.4512428298279159, |
| "grad_norm": 0.3827981714632121, |
| "learning_rate": 2.458092879055464e-05, |
| "loss": 0.3684, |
| "step": 253 |
| }, |
| { |
| "epoch": 1.4569789674952198, |
| "grad_norm": 0.4566303964863475, |
| "learning_rate": 2.445041867912629e-05, |
| "loss": 0.4141, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.4627151051625238, |
| "grad_norm": 0.41932149526107115, |
| "learning_rate": 2.431970887908249e-05, |
| "loss": 0.428, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.468451242829828, |
| "grad_norm": 0.4777606018140004, |
| "learning_rate": 2.4188805255322895e-05, |
| "loss": 0.422, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.474187380497132, |
| "grad_norm": 0.4146884687116362, |
| "learning_rate": 2.405771368144395e-05, |
| "loss": 0.4407, |
| "step": 257 |
| }, |
| { |
| "epoch": 1.479923518164436, |
| "grad_norm": 0.43435634926442834, |
| "learning_rate": 2.3926440039475382e-05, |
| "loss": 0.4004, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.48565965583174, |
| "grad_norm": 0.4184729261005328, |
| "learning_rate": 2.3794990219616217e-05, |
| "loss": 0.433, |
| "step": 259 |
| }, |
| { |
| "epoch": 1.491395793499044, |
| "grad_norm": 0.4475976249485021, |
| "learning_rate": 2.3663370119970527e-05, |
| "loss": 0.4281, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.497131931166348, |
| "grad_norm": 0.4366125484429576, |
| "learning_rate": 2.353158564628277e-05, |
| "loss": 0.3873, |
| "step": 261 |
| }, |
| { |
| "epoch": 1.502868068833652, |
| "grad_norm": 0.4323743190887814, |
| "learning_rate": 2.339964271167282e-05, |
| "loss": 0.3906, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.508604206500956, |
| "grad_norm": 0.4349582181643138, |
| "learning_rate": 2.3267547236370604e-05, |
| "loss": 0.4153, |
| "step": 263 |
| }, |
| { |
| "epoch": 1.51434034416826, |
| "grad_norm": 0.4300055874488965, |
| "learning_rate": 2.3135305147450525e-05, |
| "loss": 0.4243, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.520076481835564, |
| "grad_norm": 0.41191944546871323, |
| "learning_rate": 2.3002922378565468e-05, |
| "loss": 0.379, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.525812619502868, |
| "grad_norm": 0.4205849071289299, |
| "learning_rate": 2.2870404869680573e-05, |
| "loss": 0.3992, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.5315487571701722, |
| "grad_norm": 0.4167651488072966, |
| "learning_rate": 2.273775856680672e-05, |
| "loss": 0.4029, |
| "step": 267 |
| }, |
| { |
| "epoch": 1.5372848948374762, |
| "grad_norm": 0.39362974229347514, |
| "learning_rate": 2.2604989421733718e-05, |
| "loss": 0.3964, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.5430210325047802, |
| "grad_norm": 0.4554936101687405, |
| "learning_rate": 2.2472103391763268e-05, |
| "loss": 0.4573, |
| "step": 269 |
| }, |
| { |
| "epoch": 1.5487571701720841, |
| "grad_norm": 0.4225697521408909, |
| "learning_rate": 2.2339106439441656e-05, |
| "loss": 0.4331, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.554493307839388, |
| "grad_norm": 0.3839293540385542, |
| "learning_rate": 2.2206004532292192e-05, |
| "loss": 0.4301, |
| "step": 271 |
| }, |
| { |
| "epoch": 1.560229445506692, |
| "grad_norm": 0.39780099781282624, |
| "learning_rate": 2.20728036425475e-05, |
| "loss": 0.4029, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.565965583173996, |
| "grad_norm": 0.39896699881977027, |
| "learning_rate": 2.1939509746881486e-05, |
| "loss": 0.4204, |
| "step": 273 |
| }, |
| { |
| "epoch": 1.5717017208413002, |
| "grad_norm": 0.4011711852263823, |
| "learning_rate": 2.1806128826141223e-05, |
| "loss": 0.4556, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.5774378585086042, |
| "grad_norm": 0.3780449285964431, |
| "learning_rate": 2.1672666865078544e-05, |
| "loss": 0.3854, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.5831739961759084, |
| "grad_norm": 0.3815103294469426, |
| "learning_rate": 2.153912985208154e-05, |
| "loss": 0.4304, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.5889101338432123, |
| "grad_norm": 0.39420124826230263, |
| "learning_rate": 2.140552377890586e-05, |
| "loss": 0.413, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.5946462715105163, |
| "grad_norm": 0.37547444058482843, |
| "learning_rate": 2.1271854640405856e-05, |
| "loss": 0.4033, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.6003824091778203, |
| "grad_norm": 0.3676214604101024, |
| "learning_rate": 2.1138128434265583e-05, |
| "loss": 0.3809, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.6061185468451242, |
| "grad_norm": 0.402628978813413, |
| "learning_rate": 2.1004351160729714e-05, |
| "loss": 0.4356, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.6118546845124282, |
| "grad_norm": 0.4259466481121817, |
| "learning_rate": 2.0870528822334296e-05, |
| "loss": 0.4071, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.6175908221797322, |
| "grad_norm": 0.3980303656074581, |
| "learning_rate": 2.073666742363742e-05, |
| "loss": 0.4566, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.6233269598470363, |
| "grad_norm": 0.3793866215066068, |
| "learning_rate": 2.0602772970949777e-05, |
| "loss": 0.4084, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.6290630975143403, |
| "grad_norm": 0.4043370635632085, |
| "learning_rate": 2.0468851472065213e-05, |
| "loss": 0.4276, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.6347992351816445, |
| "grad_norm": 0.4295543543276556, |
| "learning_rate": 2.03349089359911e-05, |
| "loss": 0.3797, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.6405353728489485, |
| "grad_norm": 0.4415259564693047, |
| "learning_rate": 2.020095137267876e-05, |
| "loss": 0.4344, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.6462715105162524, |
| "grad_norm": 0.3894742791133544, |
| "learning_rate": 2.0066984792753757e-05, |
| "loss": 0.4187, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.6520076481835564, |
| "grad_norm": 0.4128327836910975, |
| "learning_rate": 1.993301520724625e-05, |
| "loss": 0.4036, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.6577437858508604, |
| "grad_norm": 0.35013244959729184, |
| "learning_rate": 1.9799048627321247e-05, |
| "loss": 0.4141, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.6634799235181643, |
| "grad_norm": 0.3924223109946273, |
| "learning_rate": 1.966509106400891e-05, |
| "loss": 0.4098, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.6692160611854685, |
| "grad_norm": 0.375681252496487, |
| "learning_rate": 1.9531148527934794e-05, |
| "loss": 0.372, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.6749521988527725, |
| "grad_norm": 0.3583704837247699, |
| "learning_rate": 1.9397227029050233e-05, |
| "loss": 0.3977, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.6806883365200764, |
| "grad_norm": 0.4225858356870719, |
| "learning_rate": 1.9263332576362586e-05, |
| "loss": 0.4654, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.6864244741873806, |
| "grad_norm": 0.43936703075105055, |
| "learning_rate": 1.9129471177665714e-05, |
| "loss": 0.5054, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.6921606118546846, |
| "grad_norm": 0.36215467983870736, |
| "learning_rate": 1.899564883927029e-05, |
| "loss": 0.3574, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.6978967495219885, |
| "grad_norm": 0.3732898278812787, |
| "learning_rate": 1.8861871565734427e-05, |
| "loss": 0.408, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.7036328871892925, |
| "grad_norm": 0.4033046527093117, |
| "learning_rate": 1.8728145359594147e-05, |
| "loss": 0.414, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.7093690248565965, |
| "grad_norm": 0.48233034016482407, |
| "learning_rate": 1.859447622109415e-05, |
| "loss": 0.4452, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.7151051625239004, |
| "grad_norm": 0.35022422393865005, |
| "learning_rate": 1.8460870147918464e-05, |
| "loss": 0.4098, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.7208413001912046, |
| "grad_norm": 0.3503756135613716, |
| "learning_rate": 1.832733313492147e-05, |
| "loss": 0.3665, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.7265774378585086, |
| "grad_norm": 0.453240573589918, |
| "learning_rate": 1.8193871173858784e-05, |
| "loss": 0.4017, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.7323135755258128, |
| "grad_norm": 0.4106965211287676, |
| "learning_rate": 1.8060490253118524e-05, |
| "loss": 0.457, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.7380497131931167, |
| "grad_norm": 0.3580924920858784, |
| "learning_rate": 1.7927196357452507e-05, |
| "loss": 0.3524, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.7437858508604207, |
| "grad_norm": 0.40465378202399943, |
| "learning_rate": 1.7793995467707808e-05, |
| "loss": 0.4125, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.7495219885277247, |
| "grad_norm": 0.42671419730243126, |
| "learning_rate": 1.766089356055835e-05, |
| "loss": 0.4564, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.7552581261950286, |
| "grad_norm": 0.392872725834381, |
| "learning_rate": 1.7527896608236735e-05, |
| "loss": 0.3906, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.7609942638623326, |
| "grad_norm": 0.37117518719191533, |
| "learning_rate": 1.7395010578266292e-05, |
| "loss": 0.4225, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.7667304015296366, |
| "grad_norm": 0.36968574125016307, |
| "learning_rate": 1.7262241433193284e-05, |
| "loss": 0.3477, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.7724665391969407, |
| "grad_norm": 0.38953378877838585, |
| "learning_rate": 1.712959513031943e-05, |
| "loss": 0.3823, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.7782026768642447, |
| "grad_norm": 0.36085297573229624, |
| "learning_rate": 1.6997077621434535e-05, |
| "loss": 0.4066, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.783938814531549, |
| "grad_norm": 0.4252176810480222, |
| "learning_rate": 1.6864694852549478e-05, |
| "loss": 0.4268, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.7896749521988529, |
| "grad_norm": 0.4412549537519878, |
| "learning_rate": 1.6732452763629396e-05, |
| "loss": 0.4326, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.7954110898661568, |
| "grad_norm": 0.3812059474272243, |
| "learning_rate": 1.6600357288327186e-05, |
| "loss": 0.379, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.8011472275334608, |
| "grad_norm": 0.47503505255362904, |
| "learning_rate": 1.646841435371723e-05, |
| "loss": 0.432, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.8068833652007648, |
| "grad_norm": 0.41481678543056555, |
| "learning_rate": 1.6336629880029483e-05, |
| "loss": 0.4023, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.8126195028680687, |
| "grad_norm": 0.4124532353847639, |
| "learning_rate": 1.6205009780383783e-05, |
| "loss": 0.4308, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.8183556405353727, |
| "grad_norm": 0.38098821964138535, |
| "learning_rate": 1.6073559960524624e-05, |
| "loss": 0.3976, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.8240917782026769, |
| "grad_norm": 0.3811565535669034, |
| "learning_rate": 1.594228631855605e-05, |
| "loss": 0.3835, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.8298279158699808, |
| "grad_norm": 0.36589564234813393, |
| "learning_rate": 1.5811194744677116e-05, |
| "loss": 0.4129, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.835564053537285, |
| "grad_norm": 0.3653636034932473, |
| "learning_rate": 1.5680291120917512e-05, |
| "loss": 0.36, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.841300191204589, |
| "grad_norm": 0.40122714496956696, |
| "learning_rate": 1.5549581320873715e-05, |
| "loss": 0.3892, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.847036328871893, |
| "grad_norm": 0.38832451390149825, |
| "learning_rate": 1.541907120944537e-05, |
| "loss": 0.3865, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.852772466539197, |
| "grad_norm": 0.437571883751778, |
| "learning_rate": 1.528876664257219e-05, |
| "loss": 0.4356, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.8585086042065009, |
| "grad_norm": 0.36968778383694306, |
| "learning_rate": 1.5158673466971168e-05, |
| "loss": 0.4257, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.8642447418738048, |
| "grad_norm": 0.35073154051519584, |
| "learning_rate": 1.502879751987427e-05, |
| "loss": 0.3947, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.869980879541109, |
| "grad_norm": 0.3805701203026406, |
| "learning_rate": 1.4899144628766473e-05, |
| "loss": 0.4197, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.875717017208413, |
| "grad_norm": 0.36051788842397653, |
| "learning_rate": 1.4769720611124342e-05, |
| "loss": 0.3862, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.8814531548757172, |
| "grad_norm": 0.390822308734316, |
| "learning_rate": 1.4640531274154946e-05, |
| "loss": 0.4547, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.8871892925430211, |
| "grad_norm": 0.35188013033472365, |
| "learning_rate": 1.4511582414535359e-05, |
| "loss": 0.372, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.892925430210325, |
| "grad_norm": 0.3492988111143338, |
| "learning_rate": 1.4382879818152486e-05, |
| "loss": 0.391, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.898661567877629, |
| "grad_norm": 0.3506998309752745, |
| "learning_rate": 1.425442925984353e-05, |
| "loss": 0.3807, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.904397705544933, |
| "grad_norm": 0.4030721263174961, |
| "learning_rate": 1.41262365031368e-05, |
| "loss": 0.376, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.910133843212237, |
| "grad_norm": 0.3504037718749043, |
| "learning_rate": 1.399830729999319e-05, |
| "loss": 0.3761, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.915869980879541, |
| "grad_norm": 0.3571642792832744, |
| "learning_rate": 1.3870647390548004e-05, |
| "loss": 0.3989, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.9216061185468452, |
| "grad_norm": 0.3995469257986992, |
| "learning_rate": 1.3743262502853458e-05, |
| "loss": 0.4614, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.9273422562141491, |
| "grad_norm": 0.3374515530857516, |
| "learning_rate": 1.3616158352621653e-05, |
| "loss": 0.3545, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.9330783938814533, |
| "grad_norm": 0.3785045797006251, |
| "learning_rate": 1.3489340642968088e-05, |
| "loss": 0.3725, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.9388145315487573, |
| "grad_norm": 0.3651730179303691, |
| "learning_rate": 1.336281506415579e-05, |
| "loss": 0.4091, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.9445506692160612, |
| "grad_norm": 0.35782792229426036, |
| "learning_rate": 1.3236587293339974e-05, |
| "loss": 0.4097, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.9502868068833652, |
| "grad_norm": 0.3715183571626738, |
| "learning_rate": 1.3110662994313343e-05, |
| "loss": 0.4238, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.9560229445506692, |
| "grad_norm": 0.3754585864645323, |
| "learning_rate": 1.2985047817251916e-05, |
| "loss": 0.4284, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.9617590822179731, |
| "grad_norm": 0.37208739976821165, |
| "learning_rate": 1.2859747398461542e-05, |
| "loss": 0.4178, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.967495219885277, |
| "grad_norm": 0.3608587542718569, |
| "learning_rate": 1.2734767360124955e-05, |
| "loss": 0.3667, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.9732313575525813, |
| "grad_norm": 0.37784527768886184, |
| "learning_rate": 1.261011331004958e-05, |
| "loss": 0.3512, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.9789674952198852, |
| "grad_norm": 0.37238567685296753, |
| "learning_rate": 1.2485790841415834e-05, |
| "loss": 0.4461, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.9847036328871894, |
| "grad_norm": 0.3926981132666437, |
| "learning_rate": 1.2361805532526225e-05, |
| "loss": 0.4303, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.9904397705544934, |
| "grad_norm": 0.386796605292204, |
| "learning_rate": 1.2238162946555004e-05, |
| "loss": 0.4182, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.9961759082217974, |
| "grad_norm": 0.32573202133902224, |
| "learning_rate": 1.2114868631298608e-05, |
| "loss": 0.3843, |
| "step": 348 |
| }, |
| { |
| "epoch": 2.0019120458891013, |
| "grad_norm": 0.856534667430809, |
| "learning_rate": 1.1991928118926661e-05, |
| "loss": 0.5702, |
| "step": 349 |
| }, |
| { |
| "epoch": 2.0076481835564053, |
| "grad_norm": 0.4595192074096811, |
| "learning_rate": 1.1869346925733813e-05, |
| "loss": 0.3478, |
| "step": 350 |
| }, |
| { |
| "epoch": 2.0133843212237093, |
| "grad_norm": 0.8854412030509314, |
| "learning_rate": 1.1747130551892176e-05, |
| "loss": 0.3504, |
| "step": 351 |
| }, |
| { |
| "epoch": 2.019120458891013, |
| "grad_norm": 0.5095848588351017, |
| "learning_rate": 1.1625284481204577e-05, |
| "loss": 0.3247, |
| "step": 352 |
| }, |
| { |
| "epoch": 2.024856596558317, |
| "grad_norm": 0.45215495379418424, |
| "learning_rate": 1.1503814180858451e-05, |
| "loss": 0.3293, |
| "step": 353 |
| }, |
| { |
| "epoch": 2.0305927342256216, |
| "grad_norm": 0.4983547198588867, |
| "learning_rate": 1.1382725101180593e-05, |
| "loss": 0.3068, |
| "step": 354 |
| }, |
| { |
| "epoch": 2.0363288718929256, |
| "grad_norm": 0.4664207227186065, |
| "learning_rate": 1.1262022675392544e-05, |
| "loss": 0.3031, |
| "step": 355 |
| }, |
| { |
| "epoch": 2.0420650095602295, |
| "grad_norm": 0.4171482692909067, |
| "learning_rate": 1.1141712319366835e-05, |
| "loss": 0.3104, |
| "step": 356 |
| }, |
| { |
| "epoch": 2.0478011472275335, |
| "grad_norm": 0.48388082613280514, |
| "learning_rate": 1.1021799431383969e-05, |
| "loss": 0.3355, |
| "step": 357 |
| }, |
| { |
| "epoch": 2.0535372848948374, |
| "grad_norm": 0.4405839370097391, |
| "learning_rate": 1.0902289391890232e-05, |
| "loss": 0.3181, |
| "step": 358 |
| }, |
| { |
| "epoch": 2.0592734225621414, |
| "grad_norm": 0.38572618966224764, |
| "learning_rate": 1.0783187563256218e-05, |
| "loss": 0.255, |
| "step": 359 |
| }, |
| { |
| "epoch": 2.0650095602294454, |
| "grad_norm": 0.4124079647312535, |
| "learning_rate": 1.0664499289536283e-05, |
| "loss": 0.3179, |
| "step": 360 |
| }, |
| { |
| "epoch": 2.0707456978967493, |
| "grad_norm": 0.36220901351046625, |
| "learning_rate": 1.0546229896228692e-05, |
| "loss": 0.2607, |
| "step": 361 |
| }, |
| { |
| "epoch": 2.0764818355640537, |
| "grad_norm": 0.4049509171035621, |
| "learning_rate": 1.0428384690036749e-05, |
| "loss": 0.2878, |
| "step": 362 |
| }, |
| { |
| "epoch": 2.0822179732313577, |
| "grad_norm": 0.48057213951850675, |
| "learning_rate": 1.0310968958630601e-05, |
| "loss": 0.3607, |
| "step": 363 |
| }, |
| { |
| "epoch": 2.0879541108986617, |
| "grad_norm": 0.33983057117406296, |
| "learning_rate": 1.0193987970410046e-05, |
| "loss": 0.2914, |
| "step": 364 |
| }, |
| { |
| "epoch": 2.0936902485659656, |
| "grad_norm": 0.4031998241867442, |
| "learning_rate": 1.0077446974268098e-05, |
| "loss": 0.3372, |
| "step": 365 |
| }, |
| { |
| "epoch": 2.0994263862332696, |
| "grad_norm": 0.3811905438226048, |
| "learning_rate": 9.961351199355513e-06, |
| "loss": 0.311, |
| "step": 366 |
| }, |
| { |
| "epoch": 2.1051625239005736, |
| "grad_norm": 0.38408888784962325, |
| "learning_rate": 9.84570585484614e-06, |
| "loss": 0.3563, |
| "step": 367 |
| }, |
| { |
| "epoch": 2.1108986615678775, |
| "grad_norm": 0.33981693233156296, |
| "learning_rate": 9.730516129703158e-06, |
| "loss": 0.2871, |
| "step": 368 |
| }, |
| { |
| "epoch": 2.1166347992351815, |
| "grad_norm": 0.3518672236516868, |
| "learning_rate": 9.615787192446304e-06, |
| "loss": 0.3125, |
| "step": 369 |
| }, |
| { |
| "epoch": 2.1223709369024855, |
| "grad_norm": 0.3802739825741615, |
| "learning_rate": 9.50152419091992e-06, |
| "loss": 0.3254, |
| "step": 370 |
| }, |
| { |
| "epoch": 2.12810707456979, |
| "grad_norm": 0.3350283722430752, |
| "learning_rate": 9.38773225206199e-06, |
| "loss": 0.2669, |
| "step": 371 |
| }, |
| { |
| "epoch": 2.133843212237094, |
| "grad_norm": 0.37579814126338135, |
| "learning_rate": 9.274416481674084e-06, |
| "loss": 0.334, |
| "step": 372 |
| }, |
| { |
| "epoch": 2.139579349904398, |
| "grad_norm": 0.32704016552087445, |
| "learning_rate": 9.161581964192298e-06, |
| "loss": 0.2568, |
| "step": 373 |
| }, |
| { |
| "epoch": 2.1453154875717018, |
| "grad_norm": 0.414672276708601, |
| "learning_rate": 9.049233762459057e-06, |
| "loss": 0.4001, |
| "step": 374 |
| }, |
| { |
| "epoch": 2.1510516252390057, |
| "grad_norm": 0.3112648437196225, |
| "learning_rate": 8.937376917496012e-06, |
| "loss": 0.2447, |
| "step": 375 |
| }, |
| { |
| "epoch": 2.1567877629063097, |
| "grad_norm": 0.3535914113780811, |
| "learning_rate": 8.826016448277795e-06, |
| "loss": 0.3157, |
| "step": 376 |
| }, |
| { |
| "epoch": 2.1625239005736137, |
| "grad_norm": 0.3434081422193142, |
| "learning_rate": 8.715157351506864e-06, |
| "loss": 0.2992, |
| "step": 377 |
| }, |
| { |
| "epoch": 2.1682600382409176, |
| "grad_norm": 0.3422884659233757, |
| "learning_rate": 8.604804601389271e-06, |
| "loss": 0.3132, |
| "step": 378 |
| }, |
| { |
| "epoch": 2.173996175908222, |
| "grad_norm": 0.3430270987205145, |
| "learning_rate": 8.494963149411489e-06, |
| "loss": 0.2712, |
| "step": 379 |
| }, |
| { |
| "epoch": 2.179732313575526, |
| "grad_norm": 0.36832674053444375, |
| "learning_rate": 8.385637924118224e-06, |
| "loss": 0.2803, |
| "step": 380 |
| }, |
| { |
| "epoch": 2.18546845124283, |
| "grad_norm": 0.338652106601731, |
| "learning_rate": 8.276833830891312e-06, |
| "loss": 0.2937, |
| "step": 381 |
| }, |
| { |
| "epoch": 2.191204588910134, |
| "grad_norm": 0.347795356928778, |
| "learning_rate": 8.168555751729552e-06, |
| "loss": 0.3159, |
| "step": 382 |
| }, |
| { |
| "epoch": 2.196940726577438, |
| "grad_norm": 0.37649404917042467, |
| "learning_rate": 8.060808545029727e-06, |
| "loss": 0.3159, |
| "step": 383 |
| }, |
| { |
| "epoch": 2.202676864244742, |
| "grad_norm": 0.4275027691702314, |
| "learning_rate": 7.95359704536854e-06, |
| "loss": 0.3051, |
| "step": 384 |
| }, |
| { |
| "epoch": 2.208413001912046, |
| "grad_norm": 0.36709280229253105, |
| "learning_rate": 7.846926063285745e-06, |
| "loss": 0.3082, |
| "step": 385 |
| }, |
| { |
| "epoch": 2.21414913957935, |
| "grad_norm": 0.3197758439418705, |
| "learning_rate": 7.740800385068256e-06, |
| "loss": 0.3044, |
| "step": 386 |
| }, |
| { |
| "epoch": 2.2198852772466537, |
| "grad_norm": 0.36467098789844404, |
| "learning_rate": 7.63522477253542e-06, |
| "loss": 0.3192, |
| "step": 387 |
| }, |
| { |
| "epoch": 2.2256214149139577, |
| "grad_norm": 0.3792955542610236, |
| "learning_rate": 7.530203962825331e-06, |
| "loss": 0.3374, |
| "step": 388 |
| }, |
| { |
| "epoch": 2.231357552581262, |
| "grad_norm": 0.33901384143705177, |
| "learning_rate": 7.425742668182308e-06, |
| "loss": 0.3056, |
| "step": 389 |
| }, |
| { |
| "epoch": 2.237093690248566, |
| "grad_norm": 0.33544808925545067, |
| "learning_rate": 7.3218455757454125e-06, |
| "loss": 0.2674, |
| "step": 390 |
| }, |
| { |
| "epoch": 2.24282982791587, |
| "grad_norm": 0.32597707317340413, |
| "learning_rate": 7.218517347338194e-06, |
| "loss": 0.2843, |
| "step": 391 |
| }, |
| { |
| "epoch": 2.248565965583174, |
| "grad_norm": 0.3555015121926318, |
| "learning_rate": 7.115762619259459e-06, |
| "loss": 0.3234, |
| "step": 392 |
| }, |
| { |
| "epoch": 2.254302103250478, |
| "grad_norm": 0.3593977217633907, |
| "learning_rate": 7.013586002075297e-06, |
| "loss": 0.3573, |
| "step": 393 |
| }, |
| { |
| "epoch": 2.260038240917782, |
| "grad_norm": 0.326928535932432, |
| "learning_rate": 6.911992080412153e-06, |
| "loss": 0.3053, |
| "step": 394 |
| }, |
| { |
| "epoch": 2.265774378585086, |
| "grad_norm": 0.3661810608640239, |
| "learning_rate": 6.810985412751159e-06, |
| "loss": 0.3411, |
| "step": 395 |
| }, |
| { |
| "epoch": 2.27151051625239, |
| "grad_norm": 0.33182512098109557, |
| "learning_rate": 6.710570531223568e-06, |
| "loss": 0.2988, |
| "step": 396 |
| }, |
| { |
| "epoch": 2.2772466539196943, |
| "grad_norm": 0.37198993314527246, |
| "learning_rate": 6.610751941407423e-06, |
| "loss": 0.2987, |
| "step": 397 |
| }, |
| { |
| "epoch": 2.2829827915869982, |
| "grad_norm": 0.3605089217084053, |
| "learning_rate": 6.511534122125385e-06, |
| "loss": 0.3126, |
| "step": 398 |
| }, |
| { |
| "epoch": 2.288718929254302, |
| "grad_norm": 0.35417912926194517, |
| "learning_rate": 6.412921525243747e-06, |
| "loss": 0.3179, |
| "step": 399 |
| }, |
| { |
| "epoch": 2.294455066921606, |
| "grad_norm": 0.34739676511363254, |
| "learning_rate": 6.314918575472724e-06, |
| "loss": 0.3213, |
| "step": 400 |
| }, |
| { |
| "epoch": 2.30019120458891, |
| "grad_norm": 0.34538986538232874, |
| "learning_rate": 6.2175296701678765e-06, |
| "loss": 0.3267, |
| "step": 401 |
| }, |
| { |
| "epoch": 2.305927342256214, |
| "grad_norm": 0.33419006023335046, |
| "learning_rate": 6.120759179132825e-06, |
| "loss": 0.3022, |
| "step": 402 |
| }, |
| { |
| "epoch": 2.311663479923518, |
| "grad_norm": 0.3490791821453638, |
| "learning_rate": 6.024611444423167e-06, |
| "loss": 0.3331, |
| "step": 403 |
| }, |
| { |
| "epoch": 2.317399617590822, |
| "grad_norm": 0.3423057899783048, |
| "learning_rate": 5.929090780151683e-06, |
| "loss": 0.3235, |
| "step": 404 |
| }, |
| { |
| "epoch": 2.323135755258126, |
| "grad_norm": 0.3160266995434583, |
| "learning_rate": 5.8342014722947025e-06, |
| "loss": 0.2911, |
| "step": 405 |
| }, |
| { |
| "epoch": 2.3288718929254304, |
| "grad_norm": 0.3271039700649619, |
| "learning_rate": 5.739947778499866e-06, |
| "loss": 0.2625, |
| "step": 406 |
| }, |
| { |
| "epoch": 2.3346080305927344, |
| "grad_norm": 0.357397125079873, |
| "learning_rate": 5.6463339278950135e-06, |
| "loss": 0.2686, |
| "step": 407 |
| }, |
| { |
| "epoch": 2.3403441682600383, |
| "grad_norm": 0.3626417105072833, |
| "learning_rate": 5.553364120898495e-06, |
| "loss": 0.3078, |
| "step": 408 |
| }, |
| { |
| "epoch": 2.3460803059273423, |
| "grad_norm": 0.330368230833986, |
| "learning_rate": 5.461042529030643e-06, |
| "loss": 0.2669, |
| "step": 409 |
| }, |
| { |
| "epoch": 2.3518164435946463, |
| "grad_norm": 0.35751740719995606, |
| "learning_rate": 5.369373294726625e-06, |
| "loss": 0.3035, |
| "step": 410 |
| }, |
| { |
| "epoch": 2.35755258126195, |
| "grad_norm": 0.35220579168854316, |
| "learning_rate": 5.2783605311505705e-06, |
| "loss": 0.2959, |
| "step": 411 |
| }, |
| { |
| "epoch": 2.363288718929254, |
| "grad_norm": 0.3719450476826788, |
| "learning_rate": 5.188008322011022e-06, |
| "loss": 0.3271, |
| "step": 412 |
| }, |
| { |
| "epoch": 2.369024856596558, |
| "grad_norm": 0.3413354729055759, |
| "learning_rate": 5.098320721377677e-06, |
| "loss": 0.2821, |
| "step": 413 |
| }, |
| { |
| "epoch": 2.3747609942638626, |
| "grad_norm": 0.31970016663963907, |
| "learning_rate": 5.00930175349952e-06, |
| "loss": 0.2978, |
| "step": 414 |
| }, |
| { |
| "epoch": 2.3804971319311665, |
| "grad_norm": 0.3405014390669701, |
| "learning_rate": 4.920955412624206e-06, |
| "loss": 0.3298, |
| "step": 415 |
| }, |
| { |
| "epoch": 2.3862332695984705, |
| "grad_norm": 0.32022737076210306, |
| "learning_rate": 4.833285662818903e-06, |
| "loss": 0.3205, |
| "step": 416 |
| }, |
| { |
| "epoch": 2.3919694072657744, |
| "grad_norm": 0.326492045130511, |
| "learning_rate": 4.746296437792364e-06, |
| "loss": 0.3073, |
| "step": 417 |
| }, |
| { |
| "epoch": 2.3977055449330784, |
| "grad_norm": 0.32202773276443253, |
| "learning_rate": 4.65999164071846e-06, |
| "loss": 0.3132, |
| "step": 418 |
| }, |
| { |
| "epoch": 2.4034416826003824, |
| "grad_norm": 0.30026610991923225, |
| "learning_rate": 4.5743751440610315e-06, |
| "loss": 0.2999, |
| "step": 419 |
| }, |
| { |
| "epoch": 2.4091778202676863, |
| "grad_norm": 0.3263265838303172, |
| "learning_rate": 4.489450789400158e-06, |
| "loss": 0.3396, |
| "step": 420 |
| }, |
| { |
| "epoch": 2.4149139579349903, |
| "grad_norm": 0.31887864810796523, |
| "learning_rate": 4.405222387259737e-06, |
| "loss": 0.2785, |
| "step": 421 |
| }, |
| { |
| "epoch": 2.4206500956022943, |
| "grad_norm": 0.33803434408500127, |
| "learning_rate": 4.32169371693657e-06, |
| "loss": 0.3396, |
| "step": 422 |
| }, |
| { |
| "epoch": 2.4263862332695982, |
| "grad_norm": 0.30385763232746726, |
| "learning_rate": 4.238868526330722e-06, |
| "loss": 0.2888, |
| "step": 423 |
| }, |
| { |
| "epoch": 2.4321223709369026, |
| "grad_norm": 0.32177073146894075, |
| "learning_rate": 4.156750531777414e-06, |
| "loss": 0.2814, |
| "step": 424 |
| }, |
| { |
| "epoch": 2.4378585086042066, |
| "grad_norm": 0.3331670974871312, |
| "learning_rate": 4.075343417880233e-06, |
| "loss": 0.3166, |
| "step": 425 |
| }, |
| { |
| "epoch": 2.4435946462715106, |
| "grad_norm": 0.34367786083186397, |
| "learning_rate": 3.994650837345817e-06, |
| "loss": 0.2981, |
| "step": 426 |
| }, |
| { |
| "epoch": 2.4493307839388145, |
| "grad_norm": 0.3379368579815228, |
| "learning_rate": 3.914676410819957e-06, |
| "loss": 0.3439, |
| "step": 427 |
| }, |
| { |
| "epoch": 2.4550669216061185, |
| "grad_norm": 0.32413796456612975, |
| "learning_rate": 3.835423726725162e-06, |
| "loss": 0.3026, |
| "step": 428 |
| }, |
| { |
| "epoch": 2.4608030592734225, |
| "grad_norm": 0.33686529783092345, |
| "learning_rate": 3.7568963410996028e-06, |
| "loss": 0.3366, |
| "step": 429 |
| }, |
| { |
| "epoch": 2.4665391969407264, |
| "grad_norm": 0.29819612441201854, |
| "learning_rate": 3.6790977774376013e-06, |
| "loss": 0.2838, |
| "step": 430 |
| }, |
| { |
| "epoch": 2.472275334608031, |
| "grad_norm": 0.33348914715995587, |
| "learning_rate": 3.6020315265315087e-06, |
| "loss": 0.3133, |
| "step": 431 |
| }, |
| { |
| "epoch": 2.478011472275335, |
| "grad_norm": 0.30518415557095047, |
| "learning_rate": 3.5257010463150642e-06, |
| "loss": 0.3132, |
| "step": 432 |
| }, |
| { |
| "epoch": 2.4837476099426388, |
| "grad_norm": 0.33485851224923785, |
| "learning_rate": 3.4501097617082648e-06, |
| "loss": 0.3567, |
| "step": 433 |
| }, |
| { |
| "epoch": 2.4894837476099427, |
| "grad_norm": 0.3176169459499719, |
| "learning_rate": 3.375261064463666e-06, |
| "loss": 0.3047, |
| "step": 434 |
| }, |
| { |
| "epoch": 2.4952198852772467, |
| "grad_norm": 0.35821675236801026, |
| "learning_rate": 3.3011583130142277e-06, |
| "loss": 0.3054, |
| "step": 435 |
| }, |
| { |
| "epoch": 2.5009560229445507, |
| "grad_norm": 0.31447192397671203, |
| "learning_rate": 3.2278048323225764e-06, |
| "loss": 0.3002, |
| "step": 436 |
| }, |
| { |
| "epoch": 2.5066921606118546, |
| "grad_norm": 0.29814490505492547, |
| "learning_rate": 3.155203913731868e-06, |
| "loss": 0.3145, |
| "step": 437 |
| }, |
| { |
| "epoch": 2.5124282982791586, |
| "grad_norm": 0.31338084805310423, |
| "learning_rate": 3.0833588148180516e-06, |
| "loss": 0.316, |
| "step": 438 |
| }, |
| { |
| "epoch": 2.5181644359464626, |
| "grad_norm": 0.33273295840043154, |
| "learning_rate": 3.0122727592437574e-06, |
| "loss": 0.3052, |
| "step": 439 |
| }, |
| { |
| "epoch": 2.5239005736137665, |
| "grad_norm": 0.3586562727340161, |
| "learning_rate": 2.941948936613608e-06, |
| "loss": 0.3584, |
| "step": 440 |
| }, |
| { |
| "epoch": 2.5296367112810705, |
| "grad_norm": 0.3149636280687063, |
| "learning_rate": 2.8723905023311237e-06, |
| "loss": 0.3323, |
| "step": 441 |
| }, |
| { |
| "epoch": 2.535372848948375, |
| "grad_norm": 0.29354487416787345, |
| "learning_rate": 2.8036005774571373e-06, |
| "loss": 0.27, |
| "step": 442 |
| }, |
| { |
| "epoch": 2.541108986615679, |
| "grad_norm": 0.32300649834326484, |
| "learning_rate": 2.7355822485697615e-06, |
| "loss": 0.2927, |
| "step": 443 |
| }, |
| { |
| "epoch": 2.546845124282983, |
| "grad_norm": 0.32585893999440413, |
| "learning_rate": 2.668338567625872e-06, |
| "loss": 0.2917, |
| "step": 444 |
| }, |
| { |
| "epoch": 2.552581261950287, |
| "grad_norm": 0.3206957295285485, |
| "learning_rate": 2.601872551824196e-06, |
| "loss": 0.3305, |
| "step": 445 |
| }, |
| { |
| "epoch": 2.5583173996175907, |
| "grad_norm": 0.31706654167791043, |
| "learning_rate": 2.536187183469905e-06, |
| "loss": 0.3036, |
| "step": 446 |
| }, |
| { |
| "epoch": 2.5640535372848947, |
| "grad_norm": 0.29637565255206266, |
| "learning_rate": 2.4712854098408245e-06, |
| "loss": 0.296, |
| "step": 447 |
| }, |
| { |
| "epoch": 2.569789674952199, |
| "grad_norm": 0.3242672724124806, |
| "learning_rate": 2.407170143055173e-06, |
| "loss": 0.3165, |
| "step": 448 |
| }, |
| { |
| "epoch": 2.575525812619503, |
| "grad_norm": 0.31283308105656626, |
| "learning_rate": 2.3438442599408995e-06, |
| "loss": 0.2685, |
| "step": 449 |
| }, |
| { |
| "epoch": 2.581261950286807, |
| "grad_norm": 0.31850570032908976, |
| "learning_rate": 2.2813106019066055e-06, |
| "loss": 0.2677, |
| "step": 450 |
| }, |
| { |
| "epoch": 2.586998087954111, |
| "grad_norm": 0.33792122182008616, |
| "learning_rate": 2.219571974814059e-06, |
| "loss": 0.3382, |
| "step": 451 |
| }, |
| { |
| "epoch": 2.592734225621415, |
| "grad_norm": 0.31634274129393675, |
| "learning_rate": 2.1586311488522705e-06, |
| "loss": 0.2952, |
| "step": 452 |
| }, |
| { |
| "epoch": 2.598470363288719, |
| "grad_norm": 0.3044886559468434, |
| "learning_rate": 2.0984908584132356e-06, |
| "loss": 0.322, |
| "step": 453 |
| }, |
| { |
| "epoch": 2.604206500956023, |
| "grad_norm": 0.3025610232808951, |
| "learning_rate": 2.0391538019691983e-06, |
| "loss": 0.2895, |
| "step": 454 |
| }, |
| { |
| "epoch": 2.609942638623327, |
| "grad_norm": 0.31674447409176193, |
| "learning_rate": 1.9806226419516195e-06, |
| "loss": 0.3372, |
| "step": 455 |
| }, |
| { |
| "epoch": 2.615678776290631, |
| "grad_norm": 0.3163602428641736, |
| "learning_rate": 1.922900004631667e-06, |
| "loss": 0.2956, |
| "step": 456 |
| }, |
| { |
| "epoch": 2.621414913957935, |
| "grad_norm": 0.29617871455956285, |
| "learning_rate": 1.8659884800024119e-06, |
| "loss": 0.281, |
| "step": 457 |
| }, |
| { |
| "epoch": 2.6271510516252388, |
| "grad_norm": 0.3054337634119753, |
| "learning_rate": 1.8098906216625934e-06, |
| "loss": 0.2779, |
| "step": 458 |
| }, |
| { |
| "epoch": 2.632887189292543, |
| "grad_norm": 0.3473718579857251, |
| "learning_rate": 1.7546089467020677e-06, |
| "loss": 0.3322, |
| "step": 459 |
| }, |
| { |
| "epoch": 2.638623326959847, |
| "grad_norm": 0.3369008816792107, |
| "learning_rate": 1.700145935588826e-06, |
| "loss": 0.333, |
| "step": 460 |
| }, |
| { |
| "epoch": 2.644359464627151, |
| "grad_norm": 0.3127691638231103, |
| "learning_rate": 1.6465040320577408e-06, |
| "loss": 0.3012, |
| "step": 461 |
| }, |
| { |
| "epoch": 2.650095602294455, |
| "grad_norm": 0.30464242733494856, |
| "learning_rate": 1.593685643000884e-06, |
| "loss": 0.2875, |
| "step": 462 |
| }, |
| { |
| "epoch": 2.655831739961759, |
| "grad_norm": 0.33106117803370116, |
| "learning_rate": 1.5416931383595436e-06, |
| "loss": 0.3258, |
| "step": 463 |
| }, |
| { |
| "epoch": 2.661567877629063, |
| "grad_norm": 0.3246679490688405, |
| "learning_rate": 1.490528851017885e-06, |
| "loss": 0.3027, |
| "step": 464 |
| }, |
| { |
| "epoch": 2.667304015296367, |
| "grad_norm": 0.30458861657265374, |
| "learning_rate": 1.440195076698272e-06, |
| "loss": 0.2916, |
| "step": 465 |
| }, |
| { |
| "epoch": 2.6730401529636714, |
| "grad_norm": 0.3186347058123626, |
| "learning_rate": 1.3906940738582698e-06, |
| "loss": 0.2863, |
| "step": 466 |
| }, |
| { |
| "epoch": 2.6787762906309753, |
| "grad_norm": 0.31369585810148204, |
| "learning_rate": 1.3420280635892847e-06, |
| "loss": 0.3104, |
| "step": 467 |
| }, |
| { |
| "epoch": 2.6845124282982793, |
| "grad_norm": 0.316446279408055, |
| "learning_rate": 1.2941992295169369e-06, |
| "loss": 0.3303, |
| "step": 468 |
| }, |
| { |
| "epoch": 2.6902485659655833, |
| "grad_norm": 0.32320749119463515, |
| "learning_rate": 1.2472097177030485e-06, |
| "loss": 0.2923, |
| "step": 469 |
| }, |
| { |
| "epoch": 2.6959847036328872, |
| "grad_norm": 0.3397456933911356, |
| "learning_rate": 1.2010616365493811e-06, |
| "loss": 0.3006, |
| "step": 470 |
| }, |
| { |
| "epoch": 2.701720841300191, |
| "grad_norm": 0.3135400345614286, |
| "learning_rate": 1.1557570567030108e-06, |
| "loss": 0.2936, |
| "step": 471 |
| }, |
| { |
| "epoch": 2.707456978967495, |
| "grad_norm": 0.3021698093355975, |
| "learning_rate": 1.1112980109634263e-06, |
| "loss": 0.3341, |
| "step": 472 |
| }, |
| { |
| "epoch": 2.713193116634799, |
| "grad_norm": 0.30592288821677366, |
| "learning_rate": 1.067686494191318e-06, |
| "loss": 0.318, |
| "step": 473 |
| }, |
| { |
| "epoch": 2.718929254302103, |
| "grad_norm": 0.3320062239605175, |
| "learning_rate": 1.0249244632190769e-06, |
| "loss": 0.3299, |
| "step": 474 |
| }, |
| { |
| "epoch": 2.724665391969407, |
| "grad_norm": 0.30860296916444196, |
| "learning_rate": 9.83013836762976e-07, |
| "loss": 0.2836, |
| "step": 475 |
| }, |
| { |
| "epoch": 2.730401529636711, |
| "grad_norm": 0.3361749125512604, |
| "learning_rate": 9.419564953370952e-07, |
| "loss": 0.3298, |
| "step": 476 |
| }, |
| { |
| "epoch": 2.7361376673040154, |
| "grad_norm": 0.30689246179518487, |
| "learning_rate": 9.017542811689272e-07, |
| "loss": 0.3033, |
| "step": 477 |
| }, |
| { |
| "epoch": 2.7418738049713194, |
| "grad_norm": 0.2838154253297985, |
| "learning_rate": 8.624089981167349e-07, |
| "loss": 0.2356, |
| "step": 478 |
| }, |
| { |
| "epoch": 2.7476099426386233, |
| "grad_norm": 0.31969377880658495, |
| "learning_rate": 8.239224115885957e-07, |
| "loss": 0.3049, |
| "step": 479 |
| }, |
| { |
| "epoch": 2.7533460803059273, |
| "grad_norm": 0.30060258657145644, |
| "learning_rate": 7.862962484631986e-07, |
| "loss": 0.3063, |
| "step": 480 |
| }, |
| { |
| "epoch": 2.7590822179732313, |
| "grad_norm": 0.3022284718933698, |
| "learning_rate": 7.49532197012357e-07, |
| "loss": 0.2886, |
| "step": 481 |
| }, |
| { |
| "epoch": 2.7648183556405352, |
| "grad_norm": 0.31537349530205033, |
| "learning_rate": 7.136319068252629e-07, |
| "loss": 0.327, |
| "step": 482 |
| }, |
| { |
| "epoch": 2.7705544933078396, |
| "grad_norm": 0.3214439225809495, |
| "learning_rate": 6.785969887344546e-07, |
| "loss": 0.3239, |
| "step": 483 |
| }, |
| { |
| "epoch": 2.7762906309751436, |
| "grad_norm": 0.30881041163907574, |
| "learning_rate": 6.444290147435617e-07, |
| "loss": 0.2923, |
| "step": 484 |
| }, |
| { |
| "epoch": 2.7820267686424476, |
| "grad_norm": 0.2968321026529436, |
| "learning_rate": 6.111295179567434e-07, |
| "loss": 0.2962, |
| "step": 485 |
| }, |
| { |
| "epoch": 2.7877629063097515, |
| "grad_norm": 0.32452460610512424, |
| "learning_rate": 5.786999925099257e-07, |
| "loss": 0.3019, |
| "step": 486 |
| }, |
| { |
| "epoch": 2.7934990439770555, |
| "grad_norm": 0.28214977997629037, |
| "learning_rate": 5.471418935037398e-07, |
| "loss": 0.2949, |
| "step": 487 |
| }, |
| { |
| "epoch": 2.7992351816443595, |
| "grad_norm": 0.30056333465804613, |
| "learning_rate": 5.164566369382407e-07, |
| "loss": 0.3396, |
| "step": 488 |
| }, |
| { |
| "epoch": 2.8049713193116634, |
| "grad_norm": 0.3025564251244908, |
| "learning_rate": 4.866455996493691e-07, |
| "loss": 0.2909, |
| "step": 489 |
| }, |
| { |
| "epoch": 2.8107074569789674, |
| "grad_norm": 0.30674628776531615, |
| "learning_rate": 4.577101192471811e-07, |
| "loss": 0.3128, |
| "step": 490 |
| }, |
| { |
| "epoch": 2.8164435946462714, |
| "grad_norm": 0.31971943977257217, |
| "learning_rate": 4.296514940558161e-07, |
| "loss": 0.3299, |
| "step": 491 |
| }, |
| { |
| "epoch": 2.8221797323135753, |
| "grad_norm": 0.32117510140100713, |
| "learning_rate": 4.0247098305525645e-07, |
| "loss": 0.3237, |
| "step": 492 |
| }, |
| { |
| "epoch": 2.8279158699808793, |
| "grad_norm": 0.30441550056900496, |
| "learning_rate": 3.7616980582482866e-07, |
| "loss": 0.3431, |
| "step": 493 |
| }, |
| { |
| "epoch": 2.8336520076481837, |
| "grad_norm": 0.2937689706082808, |
| "learning_rate": 3.507491424884779e-07, |
| "loss": 0.279, |
| "step": 494 |
| }, |
| { |
| "epoch": 2.8393881453154877, |
| "grad_norm": 0.3243702100672169, |
| "learning_rate": 3.262101336618262e-07, |
| "loss": 0.3258, |
| "step": 495 |
| }, |
| { |
| "epoch": 2.8451242829827916, |
| "grad_norm": 0.29048595506865943, |
| "learning_rate": 3.0255388040098864e-07, |
| "loss": 0.3202, |
| "step": 496 |
| }, |
| { |
| "epoch": 2.8508604206500956, |
| "grad_norm": 0.32709366282528524, |
| "learning_rate": 2.7978144415316656e-07, |
| "loss": 0.3105, |
| "step": 497 |
| }, |
| { |
| "epoch": 2.8565965583173996, |
| "grad_norm": 0.28160883870197856, |
| "learning_rate": 2.5789384670902753e-07, |
| "loss": 0.2349, |
| "step": 498 |
| }, |
| { |
| "epoch": 2.8623326959847035, |
| "grad_norm": 0.32964689233644207, |
| "learning_rate": 2.3689207015685334e-07, |
| "loss": 0.3767, |
| "step": 499 |
| }, |
| { |
| "epoch": 2.8680688336520075, |
| "grad_norm": 0.30156232678290884, |
| "learning_rate": 2.1677705683847082e-07, |
| "loss": 0.2827, |
| "step": 500 |
| }, |
| { |
| "epoch": 2.873804971319312, |
| "grad_norm": 0.29788501422144353, |
| "learning_rate": 1.9754970930698115e-07, |
| "loss": 0.2988, |
| "step": 501 |
| }, |
| { |
| "epoch": 2.879541108986616, |
| "grad_norm": 0.3025299588676407, |
| "learning_rate": 1.792108902862455e-07, |
| "loss": 0.2925, |
| "step": 502 |
| }, |
| { |
| "epoch": 2.88527724665392, |
| "grad_norm": 0.3248363141852775, |
| "learning_rate": 1.6176142263219173e-07, |
| "loss": 0.3148, |
| "step": 503 |
| }, |
| { |
| "epoch": 2.891013384321224, |
| "grad_norm": 0.3189914861271819, |
| "learning_rate": 1.4520208929587942e-07, |
| "loss": 0.2563, |
| "step": 504 |
| }, |
| { |
| "epoch": 2.8967495219885278, |
| "grad_norm": 0.2861804455486256, |
| "learning_rate": 1.2953363328838342e-07, |
| "loss": 0.2708, |
| "step": 505 |
| }, |
| { |
| "epoch": 2.9024856596558317, |
| "grad_norm": 0.3137865936214283, |
| "learning_rate": 1.1475675764743843e-07, |
| "loss": 0.3803, |
| "step": 506 |
| }, |
| { |
| "epoch": 2.9082217973231357, |
| "grad_norm": 0.321251565354294, |
| "learning_rate": 1.0087212540591307e-07, |
| "loss": 0.3431, |
| "step": 507 |
| }, |
| { |
| "epoch": 2.9139579349904396, |
| "grad_norm": 0.2951230413126827, |
| "learning_rate": 8.78803595620381e-08, |
| "loss": 0.27, |
| "step": 508 |
| }, |
| { |
| "epoch": 2.9196940726577436, |
| "grad_norm": 0.3247014517591854, |
| "learning_rate": 7.57820430514733e-08, |
| "loss": 0.2853, |
| "step": 509 |
| }, |
| { |
| "epoch": 2.9254302103250476, |
| "grad_norm": 0.3229719006514364, |
| "learning_rate": 6.457771872113716e-08, |
| "loss": 0.3274, |
| "step": 510 |
| }, |
| { |
| "epoch": 2.9311663479923515, |
| "grad_norm": 0.30781258564862424, |
| "learning_rate": 5.4267889304859824e-08, |
| "loss": 0.2917, |
| "step": 511 |
| }, |
| { |
| "epoch": 2.936902485659656, |
| "grad_norm": 0.3008043286280689, |
| "learning_rate": 4.485301740080994e-08, |
| "loss": 0.3029, |
| "step": 512 |
| }, |
| { |
| "epoch": 2.94263862332696, |
| "grad_norm": 0.2969030376672524, |
| "learning_rate": 3.633352545076241e-08, |
| "loss": 0.2627, |
| "step": 513 |
| }, |
| { |
| "epoch": 2.948374760994264, |
| "grad_norm": 0.2974333758480683, |
| "learning_rate": 2.8709795721117984e-08, |
| "loss": 0.3265, |
| "step": 514 |
| }, |
| { |
| "epoch": 2.954110898661568, |
| "grad_norm": 0.3173221368962474, |
| "learning_rate": 2.198217028577254e-08, |
| "loss": 0.3039, |
| "step": 515 |
| }, |
| { |
| "epoch": 2.959847036328872, |
| "grad_norm": 0.33002144156652646, |
| "learning_rate": 1.6150951010747152e-08, |
| "loss": 0.2785, |
| "step": 516 |
| }, |
| { |
| "epoch": 2.9655831739961758, |
| "grad_norm": 0.30529921643631025, |
| "learning_rate": 1.1216399540669998e-08, |
| "loss": 0.2788, |
| "step": 517 |
| }, |
| { |
| "epoch": 2.97131931166348, |
| "grad_norm": 0.31427963839680284, |
| "learning_rate": 7.178737287005799e-09, |
| "loss": 0.3469, |
| "step": 518 |
| }, |
| { |
| "epoch": 2.977055449330784, |
| "grad_norm": 0.31476300056888123, |
| "learning_rate": 4.038145418148176e-09, |
| "loss": 0.2671, |
| "step": 519 |
| }, |
| { |
| "epoch": 2.982791586998088, |
| "grad_norm": 0.3064284484610742, |
| "learning_rate": 1.7947648512728343e-09, |
| "loss": 0.32, |
| "step": 520 |
| }, |
| { |
| "epoch": 2.988527724665392, |
| "grad_norm": 0.30798998882484835, |
| "learning_rate": 4.486962460270583e-10, |
| "loss": 0.3146, |
| "step": 521 |
| }, |
| { |
| "epoch": 2.994263862332696, |
| "grad_norm": 0.28978511666305123, |
| "learning_rate": 0.0, |
| "loss": 0.2861, |
| "step": 522 |
| }, |
| { |
| "epoch": 2.994263862332696, |
| "step": 522, |
| "total_flos": 5.620362022127534e+17, |
| "train_loss": 0.4276364711452261, |
| "train_runtime": 8560.5176, |
| "train_samples_per_second": 5.856, |
| "train_steps_per_second": 0.061 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 522, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 5.620362022127534e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|