| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.933333333333334, | |
| "eval_steps": 150, | |
| "global_step": 335, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.014814814814814815, | |
| "grad_norm": 24.245065689086914, | |
| "learning_rate": 0.0, | |
| "loss": 1.5986, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.02962962962962963, | |
| "grad_norm": 24.59937286376953, | |
| "learning_rate": 1.965616322328226e-06, | |
| "loss": 1.5387, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.044444444444444446, | |
| "grad_norm": 22.017257690429688, | |
| "learning_rate": 3.1154281616956676e-06, | |
| "loss": 1.4677, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.05925925925925926, | |
| "grad_norm": 19.979198455810547, | |
| "learning_rate": 3.931232644656452e-06, | |
| "loss": 1.3971, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.07407407407407407, | |
| "grad_norm": 10.355873107910156, | |
| "learning_rate": 4.5640197625830816e-06, | |
| "loss": 1.1965, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.08888888888888889, | |
| "grad_norm": 7.20529842376709, | |
| "learning_rate": 5.081044484023894e-06, | |
| "loss": 1.0327, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.1037037037037037, | |
| "grad_norm": 6.543303489685059, | |
| "learning_rate": 5.518182657364912e-06, | |
| "loss": 1.0091, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.11851851851851852, | |
| "grad_norm": 5.638514041900635, | |
| "learning_rate": 5.896848966984678e-06, | |
| "loss": 0.8685, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.13333333333333333, | |
| "grad_norm": 4.796297073364258, | |
| "learning_rate": 6.230856323391335e-06, | |
| "loss": 0.816, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.14814814814814814, | |
| "grad_norm": 6.3762431144714355, | |
| "learning_rate": 6.5296360849113085e-06, | |
| "loss": 0.8679, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.16296296296296298, | |
| "grad_norm": 4.701241493225098, | |
| "learning_rate": 6.7999152555718276e-06, | |
| "loss": 0.7973, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.17777777777777778, | |
| "grad_norm": 4.334161281585693, | |
| "learning_rate": 7.04666080635212e-06, | |
| "loss": 0.6289, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.1925925925925926, | |
| "grad_norm": 2.9507179260253906, | |
| "learning_rate": 7.273644709769792e-06, | |
| "loss": 0.7202, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.2074074074074074, | |
| "grad_norm": 3.3083744049072266, | |
| "learning_rate": 7.483798979693137e-06, | |
| "loss": 0.6864, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 3.1791484355926514, | |
| "learning_rate": 7.679447924278749e-06, | |
| "loss": 0.7458, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.23703703703703705, | |
| "grad_norm": 2.6846699714660645, | |
| "learning_rate": 7.862465289312904e-06, | |
| "loss": 0.6543, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.2518518518518518, | |
| "grad_norm": 3.1359522342681885, | |
| "learning_rate": 8.034383677671775e-06, | |
| "loss": 0.531, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.26666666666666666, | |
| "grad_norm": 3.0577006340026855, | |
| "learning_rate": 8.19647264571956e-06, | |
| "loss": 0.6345, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.2814814814814815, | |
| "grad_norm": 3.066653251647949, | |
| "learning_rate": 8.349795656491867e-06, | |
| "loss": 0.5942, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.2962962962962963, | |
| "grad_norm": 3.3706552982330322, | |
| "learning_rate": 8.495252407239533e-06, | |
| "loss": 0.638, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.3111111111111111, | |
| "grad_norm": 2.55362606048584, | |
| "learning_rate": 8.633610819060579e-06, | |
| "loss": 0.602, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.32592592592592595, | |
| "grad_norm": 2.864042282104492, | |
| "learning_rate": 8.765531577900054e-06, | |
| "loss": 0.6849, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.34074074074074073, | |
| "grad_norm": 2.357649326324463, | |
| "learning_rate": 8.891587215888663e-06, | |
| "loss": 0.5194, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.35555555555555557, | |
| "grad_norm": 2.659815549850464, | |
| "learning_rate": 9.012277128680346e-06, | |
| "loss": 0.4647, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.37037037037037035, | |
| "grad_norm": 3.5252552032470703, | |
| "learning_rate": 9.128039525166163e-06, | |
| "loss": 0.6621, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.3851851851851852, | |
| "grad_norm": 2.833387613296509, | |
| "learning_rate": 9.239261032098019e-06, | |
| "loss": 0.5758, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 2.5673093795776367, | |
| "learning_rate": 9.346284485087002e-06, | |
| "loss": 0.4742, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.4148148148148148, | |
| "grad_norm": 3.8754830360412598, | |
| "learning_rate": 9.449415302021363e-06, | |
| "loss": 0.5632, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.42962962962962964, | |
| "grad_norm": 2.8832461833953857, | |
| "learning_rate": 9.548926737583076e-06, | |
| "loss": 0.5039, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 2.340471029281616, | |
| "learning_rate": 9.645064246606975e-06, | |
| "loss": 0.4832, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.45925925925925926, | |
| "grad_norm": 3.4840638637542725, | |
| "learning_rate": 9.738049131714717e-06, | |
| "loss": 0.6216, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.4740740740740741, | |
| "grad_norm": 1.927802562713623, | |
| "learning_rate": 9.828081611641132e-06, | |
| "loss": 0.4316, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.4888888888888889, | |
| "grad_norm": 1.9146533012390137, | |
| "learning_rate": 9.915343417267494e-06, | |
| "loss": 0.4261, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.5037037037037037, | |
| "grad_norm": 2.636239767074585, | |
| "learning_rate": 1e-05, | |
| "loss": 0.4445, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.5185185185185185, | |
| "grad_norm": 2.7775421142578125, | |
| "learning_rate": 1e-05, | |
| "loss": 0.5895, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.5333333333333333, | |
| "grad_norm": 3.3831987380981445, | |
| "learning_rate": 9.966777408637874e-06, | |
| "loss": 0.5247, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.5481481481481482, | |
| "grad_norm": 2.883329391479492, | |
| "learning_rate": 9.933554817275748e-06, | |
| "loss": 0.4712, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.562962962962963, | |
| "grad_norm": 2.908726215362549, | |
| "learning_rate": 9.900332225913623e-06, | |
| "loss": 0.4991, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.5777777777777777, | |
| "grad_norm": 2.307657480239868, | |
| "learning_rate": 9.867109634551495e-06, | |
| "loss": 0.4174, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.5925925925925926, | |
| "grad_norm": 2.3641955852508545, | |
| "learning_rate": 9.83388704318937e-06, | |
| "loss": 0.5014, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.6074074074074074, | |
| "grad_norm": 2.850147247314453, | |
| "learning_rate": 9.800664451827243e-06, | |
| "loss": 0.5025, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.6222222222222222, | |
| "grad_norm": 2.9469687938690186, | |
| "learning_rate": 9.767441860465117e-06, | |
| "loss": 0.4127, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.6370370370370371, | |
| "grad_norm": 2.0221896171569824, | |
| "learning_rate": 9.734219269102992e-06, | |
| "loss": 0.4793, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.6518518518518519, | |
| "grad_norm": 2.2807424068450928, | |
| "learning_rate": 9.700996677740865e-06, | |
| "loss": 0.4363, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 1.9812514781951904, | |
| "learning_rate": 9.66777408637874e-06, | |
| "loss": 0.472, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.6814814814814815, | |
| "grad_norm": 2.3653407096862793, | |
| "learning_rate": 9.634551495016612e-06, | |
| "loss": 0.4747, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.6962962962962963, | |
| "grad_norm": 1.875173807144165, | |
| "learning_rate": 9.601328903654485e-06, | |
| "loss": 0.4466, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.7111111111111111, | |
| "grad_norm": 2.5644772052764893, | |
| "learning_rate": 9.56810631229236e-06, | |
| "loss": 0.4908, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.725925925925926, | |
| "grad_norm": 2.5701663494110107, | |
| "learning_rate": 9.534883720930234e-06, | |
| "loss": 0.5709, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.7407407407407407, | |
| "grad_norm": 3.4693243503570557, | |
| "learning_rate": 9.501661129568107e-06, | |
| "loss": 0.4436, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.7555555555555555, | |
| "grad_norm": 1.7804033756256104, | |
| "learning_rate": 9.468438538205981e-06, | |
| "loss": 0.5096, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.7703703703703704, | |
| "grad_norm": 2.171896457672119, | |
| "learning_rate": 9.435215946843854e-06, | |
| "loss": 0.4063, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.7851851851851852, | |
| "grad_norm": 2.830214500427246, | |
| "learning_rate": 9.401993355481728e-06, | |
| "loss": 0.6981, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 3.3130273818969727, | |
| "learning_rate": 9.368770764119603e-06, | |
| "loss": 0.5553, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.8148148148148148, | |
| "grad_norm": 2.5522916316986084, | |
| "learning_rate": 9.335548172757476e-06, | |
| "loss": 0.5471, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.8296296296296296, | |
| "grad_norm": 1.8053007125854492, | |
| "learning_rate": 9.30232558139535e-06, | |
| "loss": 0.4449, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.8444444444444444, | |
| "grad_norm": 2.2297627925872803, | |
| "learning_rate": 9.269102990033223e-06, | |
| "loss": 0.4782, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.8592592592592593, | |
| "grad_norm": 2.388592004776001, | |
| "learning_rate": 9.235880398671098e-06, | |
| "loss": 0.4333, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.8740740740740741, | |
| "grad_norm": 2.2160580158233643, | |
| "learning_rate": 9.20265780730897e-06, | |
| "loss": 0.397, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.8888888888888888, | |
| "grad_norm": 2.1153295040130615, | |
| "learning_rate": 9.169435215946845e-06, | |
| "loss": 0.4976, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.9037037037037037, | |
| "grad_norm": 2.1571428775787354, | |
| "learning_rate": 9.136212624584718e-06, | |
| "loss": 0.5009, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.9185185185185185, | |
| "grad_norm": 1.9368093013763428, | |
| "learning_rate": 9.102990033222592e-06, | |
| "loss": 0.4225, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.9333333333333333, | |
| "grad_norm": 1.793280839920044, | |
| "learning_rate": 9.069767441860465e-06, | |
| "loss": 0.4407, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.9481481481481482, | |
| "grad_norm": 1.7864512205123901, | |
| "learning_rate": 9.03654485049834e-06, | |
| "loss": 0.481, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.9629629629629629, | |
| "grad_norm": 2.982163429260254, | |
| "learning_rate": 9.003322259136214e-06, | |
| "loss": 0.5735, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.9777777777777777, | |
| "grad_norm": 2.1836278438568115, | |
| "learning_rate": 8.970099667774087e-06, | |
| "loss": 0.4532, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.9925925925925926, | |
| "grad_norm": 2.726743221282959, | |
| "learning_rate": 8.93687707641196e-06, | |
| "loss": 0.5093, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 2.726743221282959, | |
| "learning_rate": 8.93687707641196e-06, | |
| "loss": 0.4299, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.0148148148148148, | |
| "grad_norm": 2.7928483486175537, | |
| "learning_rate": 8.903654485049834e-06, | |
| "loss": 0.3207, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.0296296296296297, | |
| "grad_norm": 1.4182758331298828, | |
| "learning_rate": 8.870431893687709e-06, | |
| "loss": 0.4154, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.0444444444444445, | |
| "grad_norm": 2.5209672451019287, | |
| "learning_rate": 8.837209302325582e-06, | |
| "loss": 0.4017, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.0592592592592593, | |
| "grad_norm": 1.528294324874878, | |
| "learning_rate": 8.803986710963456e-06, | |
| "loss": 0.3515, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.074074074074074, | |
| "grad_norm": 2.7746169567108154, | |
| "learning_rate": 8.770764119601329e-06, | |
| "loss": 0.4338, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.0888888888888888, | |
| "grad_norm": 1.9682352542877197, | |
| "learning_rate": 8.737541528239203e-06, | |
| "loss": 0.4154, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.1037037037037036, | |
| "grad_norm": 2.2559337615966797, | |
| "learning_rate": 8.704318936877078e-06, | |
| "loss": 0.368, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.1185185185185185, | |
| "grad_norm": 1.3220306634902954, | |
| "learning_rate": 8.67109634551495e-06, | |
| "loss": 0.3142, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.1333333333333333, | |
| "grad_norm": 2.455972671508789, | |
| "learning_rate": 8.637873754152825e-06, | |
| "loss": 0.4777, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.1481481481481481, | |
| "grad_norm": 2.32432222366333, | |
| "learning_rate": 8.604651162790698e-06, | |
| "loss": 0.4585, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.162962962962963, | |
| "grad_norm": 2.2095751762390137, | |
| "learning_rate": 8.571428571428571e-06, | |
| "loss": 0.4513, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.1777777777777778, | |
| "grad_norm": 2.29775071144104, | |
| "learning_rate": 8.538205980066447e-06, | |
| "loss": 0.3315, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.1925925925925926, | |
| "grad_norm": 2.390573740005493, | |
| "learning_rate": 8.50498338870432e-06, | |
| "loss": 0.4328, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.2074074074074075, | |
| "grad_norm": 2.1682214736938477, | |
| "learning_rate": 8.471760797342193e-06, | |
| "loss": 0.3708, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.2222222222222223, | |
| "grad_norm": 2.305809736251831, | |
| "learning_rate": 8.438538205980067e-06, | |
| "loss": 0.333, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.237037037037037, | |
| "grad_norm": 1.9154317378997803, | |
| "learning_rate": 8.40531561461794e-06, | |
| "loss": 0.3514, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.2518518518518518, | |
| "grad_norm": 2.1949751377105713, | |
| "learning_rate": 8.372093023255815e-06, | |
| "loss": 0.3423, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.2666666666666666, | |
| "grad_norm": 1.4846103191375732, | |
| "learning_rate": 8.338870431893689e-06, | |
| "loss": 0.3935, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.2814814814814814, | |
| "grad_norm": 2.3148951530456543, | |
| "learning_rate": 8.305647840531562e-06, | |
| "loss": 0.3683, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.2962962962962963, | |
| "grad_norm": 1.7715332508087158, | |
| "learning_rate": 8.272425249169436e-06, | |
| "loss": 0.3415, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.3111111111111111, | |
| "grad_norm": 1.6804319620132446, | |
| "learning_rate": 8.23920265780731e-06, | |
| "loss": 0.4075, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.325925925925926, | |
| "grad_norm": 1.976668119430542, | |
| "learning_rate": 8.205980066445184e-06, | |
| "loss": 0.295, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.3407407407407408, | |
| "grad_norm": 1.8023229837417603, | |
| "learning_rate": 8.172757475083057e-06, | |
| "loss": 0.389, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.3555555555555556, | |
| "grad_norm": 1.7759190797805786, | |
| "learning_rate": 8.139534883720931e-06, | |
| "loss": 0.3517, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.3703703703703702, | |
| "grad_norm": 1.3482650518417358, | |
| "learning_rate": 8.106312292358804e-06, | |
| "loss": 0.3606, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.3851851851851853, | |
| "grad_norm": 2.5421998500823975, | |
| "learning_rate": 8.073089700996678e-06, | |
| "loss": 0.5241, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 3.181865692138672, | |
| "learning_rate": 8.039867109634553e-06, | |
| "loss": 0.375, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.4148148148148147, | |
| "grad_norm": 1.7613658905029297, | |
| "learning_rate": 8.006644518272426e-06, | |
| "loss": 0.4339, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.4296296296296296, | |
| "grad_norm": 1.4631813764572144, | |
| "learning_rate": 7.9734219269103e-06, | |
| "loss": 0.3762, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.4444444444444444, | |
| "grad_norm": 1.71363365650177, | |
| "learning_rate": 7.940199335548173e-06, | |
| "loss": 0.3224, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.4592592592592593, | |
| "grad_norm": 2.1795740127563477, | |
| "learning_rate": 7.906976744186048e-06, | |
| "loss": 0.3816, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.474074074074074, | |
| "grad_norm": 1.5822690725326538, | |
| "learning_rate": 7.873754152823922e-06, | |
| "loss": 0.4523, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.488888888888889, | |
| "grad_norm": 1.7851877212524414, | |
| "learning_rate": 7.840531561461795e-06, | |
| "loss": 0.3665, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.5037037037037035, | |
| "grad_norm": 1.671633243560791, | |
| "learning_rate": 7.807308970099668e-06, | |
| "loss": 0.3217, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.5185185185185186, | |
| "grad_norm": 2.666489601135254, | |
| "learning_rate": 7.774086378737542e-06, | |
| "loss": 0.4801, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.5333333333333332, | |
| "grad_norm": 1.3500992059707642, | |
| "learning_rate": 7.740863787375415e-06, | |
| "loss": 0.3407, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.5481481481481483, | |
| "grad_norm": 1.4269295930862427, | |
| "learning_rate": 7.70764119601329e-06, | |
| "loss": 0.3082, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.5629629629629629, | |
| "grad_norm": 1.4882391691207886, | |
| "learning_rate": 7.674418604651164e-06, | |
| "loss": 0.3497, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.5777777777777777, | |
| "grad_norm": 1.4180535078048706, | |
| "learning_rate": 7.641196013289037e-06, | |
| "loss": 0.3963, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.5925925925925926, | |
| "grad_norm": 1.4838252067565918, | |
| "learning_rate": 7.6079734219269106e-06, | |
| "loss": 0.3956, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.6074074074074074, | |
| "grad_norm": 2.0815930366516113, | |
| "learning_rate": 7.574750830564784e-06, | |
| "loss": 0.4667, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.6222222222222222, | |
| "grad_norm": 1.80450439453125, | |
| "learning_rate": 7.541528239202659e-06, | |
| "loss": 0.5329, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.637037037037037, | |
| "grad_norm": 2.5666868686676025, | |
| "learning_rate": 7.508305647840532e-06, | |
| "loss": 0.5433, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.651851851851852, | |
| "grad_norm": 1.733236312866211, | |
| "learning_rate": 7.475083056478406e-06, | |
| "loss": 0.4304, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 2.059544086456299, | |
| "learning_rate": 7.44186046511628e-06, | |
| "loss": 0.3601, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.6814814814814816, | |
| "grad_norm": 1.8904452323913574, | |
| "learning_rate": 7.408637873754153e-06, | |
| "loss": 0.385, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.6962962962962962, | |
| "grad_norm": 1.3712379932403564, | |
| "learning_rate": 7.375415282392027e-06, | |
| "loss": 0.2972, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.7111111111111112, | |
| "grad_norm": 1.5603693723678589, | |
| "learning_rate": 7.342192691029902e-06, | |
| "loss": 0.3953, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.7259259259259259, | |
| "grad_norm": 1.4126758575439453, | |
| "learning_rate": 7.308970099667775e-06, | |
| "loss": 0.3582, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.7407407407407407, | |
| "grad_norm": 2.0345335006713867, | |
| "learning_rate": 7.275747508305648e-06, | |
| "loss": 0.5015, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.7555555555555555, | |
| "grad_norm": 2.6806693077087402, | |
| "learning_rate": 7.242524916943522e-06, | |
| "loss": 0.4006, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.7703703703703704, | |
| "grad_norm": 1.6245332956314087, | |
| "learning_rate": 7.209302325581395e-06, | |
| "loss": 0.4188, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.7851851851851852, | |
| "grad_norm": 1.6593953371047974, | |
| "learning_rate": 7.17607973421927e-06, | |
| "loss": 0.3694, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 1.595201849937439, | |
| "learning_rate": 7.1428571428571436e-06, | |
| "loss": 0.2838, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.8148148148148149, | |
| "grad_norm": 1.921108603477478, | |
| "learning_rate": 7.109634551495017e-06, | |
| "loss": 0.3044, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.8296296296296295, | |
| "grad_norm": 1.5152816772460938, | |
| "learning_rate": 7.076411960132891e-06, | |
| "loss": 0.3705, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.8444444444444446, | |
| "grad_norm": 1.7763299942016602, | |
| "learning_rate": 7.0431893687707646e-06, | |
| "loss": 0.3935, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.8592592592592592, | |
| "grad_norm": 1.3404872417449951, | |
| "learning_rate": 7.009966777408639e-06, | |
| "loss": 0.3951, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.8740740740740742, | |
| "grad_norm": 1.7480947971343994, | |
| "learning_rate": 6.976744186046513e-06, | |
| "loss": 0.3694, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.8888888888888888, | |
| "grad_norm": 1.3903143405914307, | |
| "learning_rate": 6.9435215946843855e-06, | |
| "loss": 0.3963, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.9037037037037037, | |
| "grad_norm": 1.579624056816101, | |
| "learning_rate": 6.910299003322259e-06, | |
| "loss": 0.4147, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.9185185185185185, | |
| "grad_norm": 1.6633076667785645, | |
| "learning_rate": 6.877076411960133e-06, | |
| "loss": 0.415, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.9333333333333333, | |
| "grad_norm": 1.4897836446762085, | |
| "learning_rate": 6.843853820598007e-06, | |
| "loss": 0.3438, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.9481481481481482, | |
| "grad_norm": 1.7405959367752075, | |
| "learning_rate": 6.810631229235881e-06, | |
| "loss": 0.3973, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.9629629629629628, | |
| "grad_norm": 1.6084774732589722, | |
| "learning_rate": 6.777408637873755e-06, | |
| "loss": 0.3904, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.9777777777777779, | |
| "grad_norm": 1.4988147020339966, | |
| "learning_rate": 6.744186046511628e-06, | |
| "loss": 0.4101, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.9925925925925925, | |
| "grad_norm": 2.449335813522339, | |
| "learning_rate": 6.710963455149502e-06, | |
| "loss": 0.3886, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 2.2566375732421875, | |
| "learning_rate": 6.6777408637873766e-06, | |
| "loss": 0.3651, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 2.0148148148148146, | |
| "grad_norm": 1.5977928638458252, | |
| "learning_rate": 6.64451827242525e-06, | |
| "loss": 0.3709, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 2.0296296296296297, | |
| "grad_norm": 2.1500344276428223, | |
| "learning_rate": 6.611295681063124e-06, | |
| "loss": 0.4124, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 2.0444444444444443, | |
| "grad_norm": 1.3736720085144043, | |
| "learning_rate": 6.578073089700997e-06, | |
| "loss": 0.3334, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 2.0592592592592593, | |
| "grad_norm": 1.6602332592010498, | |
| "learning_rate": 6.54485049833887e-06, | |
| "loss": 0.3443, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.074074074074074, | |
| "grad_norm": 1.5972793102264404, | |
| "learning_rate": 6.511627906976745e-06, | |
| "loss": 0.3969, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 2.088888888888889, | |
| "grad_norm": 1.5588147640228271, | |
| "learning_rate": 6.4784053156146185e-06, | |
| "loss": 0.2632, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 2.1037037037037036, | |
| "grad_norm": 1.4944829940795898, | |
| "learning_rate": 6.445182724252492e-06, | |
| "loss": 0.3403, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 2.1185185185185187, | |
| "grad_norm": 1.799228310585022, | |
| "learning_rate": 6.411960132890366e-06, | |
| "loss": 0.3669, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 2.1333333333333333, | |
| "grad_norm": 1.5002415180206299, | |
| "learning_rate": 6.3787375415282395e-06, | |
| "loss": 0.3037, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.148148148148148, | |
| "grad_norm": 1.5463218688964844, | |
| "learning_rate": 6.345514950166114e-06, | |
| "loss": 0.2999, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 2.162962962962963, | |
| "grad_norm": 1.8339827060699463, | |
| "learning_rate": 6.312292358803988e-06, | |
| "loss": 0.3781, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 2.1777777777777776, | |
| "grad_norm": 1.9693403244018555, | |
| "learning_rate": 6.279069767441861e-06, | |
| "loss": 0.3527, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 2.1925925925925926, | |
| "grad_norm": 1.3716813325881958, | |
| "learning_rate": 6.245847176079734e-06, | |
| "loss": 0.3193, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 2.2074074074074073, | |
| "grad_norm": 1.2879470586776733, | |
| "learning_rate": 6.212624584717608e-06, | |
| "loss": 0.3079, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.2222222222222223, | |
| "grad_norm": 1.3284484148025513, | |
| "learning_rate": 6.179401993355482e-06, | |
| "loss": 0.3552, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 2.237037037037037, | |
| "grad_norm": 1.1561126708984375, | |
| "learning_rate": 6.146179401993356e-06, | |
| "loss": 0.2879, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 2.251851851851852, | |
| "grad_norm": 1.2936991453170776, | |
| "learning_rate": 6.11295681063123e-06, | |
| "loss": 0.2926, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 2.2666666666666666, | |
| "grad_norm": 1.5603609085083008, | |
| "learning_rate": 6.079734219269103e-06, | |
| "loss": 0.3591, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 2.2814814814814817, | |
| "grad_norm": 1.408933162689209, | |
| "learning_rate": 6.046511627906977e-06, | |
| "loss": 0.3313, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.2962962962962963, | |
| "grad_norm": 1.9709101915359497, | |
| "learning_rate": 6.0132890365448515e-06, | |
| "loss": 0.3948, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 2.311111111111111, | |
| "grad_norm": 1.5349321365356445, | |
| "learning_rate": 5.980066445182725e-06, | |
| "loss": 0.3227, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.325925925925926, | |
| "grad_norm": 1.5960361957550049, | |
| "learning_rate": 5.946843853820599e-06, | |
| "loss": 0.3532, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 2.3407407407407406, | |
| "grad_norm": 1.8431493043899536, | |
| "learning_rate": 5.9136212624584725e-06, | |
| "loss": 0.3621, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 2.3555555555555556, | |
| "grad_norm": 1.875801920890808, | |
| "learning_rate": 5.880398671096345e-06, | |
| "loss": 0.4026, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.3703703703703702, | |
| "grad_norm": 1.7672104835510254, | |
| "learning_rate": 5.847176079734221e-06, | |
| "loss": 0.3889, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 2.3851851851851853, | |
| "grad_norm": 1.5263267755508423, | |
| "learning_rate": 5.8139534883720935e-06, | |
| "loss": 0.332, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 1.624315857887268, | |
| "learning_rate": 5.780730897009967e-06, | |
| "loss": 0.3167, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 2.414814814814815, | |
| "grad_norm": 1.9534611701965332, | |
| "learning_rate": 5.747508305647841e-06, | |
| "loss": 0.3733, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 2.4296296296296296, | |
| "grad_norm": 1.6925545930862427, | |
| "learning_rate": 5.7142857142857145e-06, | |
| "loss": 0.3631, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.4444444444444446, | |
| "grad_norm": 1.6101276874542236, | |
| "learning_rate": 5.681063122923588e-06, | |
| "loss": 0.3561, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 2.4592592592592593, | |
| "grad_norm": 1.5481266975402832, | |
| "learning_rate": 5.647840531561463e-06, | |
| "loss": 0.349, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 2.474074074074074, | |
| "grad_norm": 1.6006097793579102, | |
| "learning_rate": 5.614617940199336e-06, | |
| "loss": 0.3862, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 2.488888888888889, | |
| "grad_norm": 1.4279389381408691, | |
| "learning_rate": 5.58139534883721e-06, | |
| "loss": 0.3236, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 2.5037037037037035, | |
| "grad_norm": 1.2405160665512085, | |
| "learning_rate": 5.548172757475083e-06, | |
| "loss": 0.3013, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.5185185185185186, | |
| "grad_norm": 1.3768985271453857, | |
| "learning_rate": 5.5149501661129565e-06, | |
| "loss": 0.3626, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 2.533333333333333, | |
| "grad_norm": 1.6408584117889404, | |
| "learning_rate": 5.481727574750831e-06, | |
| "loss": 0.3196, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 2.5481481481481483, | |
| "grad_norm": 1.6769694089889526, | |
| "learning_rate": 5.448504983388705e-06, | |
| "loss": 0.3694, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 2.562962962962963, | |
| "grad_norm": 1.3804996013641357, | |
| "learning_rate": 5.415282392026578e-06, | |
| "loss": 0.3502, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 2.5777777777777775, | |
| "grad_norm": 1.4021036624908447, | |
| "learning_rate": 5.382059800664452e-06, | |
| "loss": 0.3306, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.5925925925925926, | |
| "grad_norm": 1.2684727907180786, | |
| "learning_rate": 5.348837209302326e-06, | |
| "loss": 0.3106, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 2.6074074074074076, | |
| "grad_norm": 1.5925308465957642, | |
| "learning_rate": 5.3156146179402e-06, | |
| "loss": 0.4211, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 2.6222222222222222, | |
| "grad_norm": 1.5546250343322754, | |
| "learning_rate": 5.282392026578074e-06, | |
| "loss": 0.3301, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 2.637037037037037, | |
| "grad_norm": 1.4001022577285767, | |
| "learning_rate": 5.2491694352159475e-06, | |
| "loss": 0.3746, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 2.651851851851852, | |
| "grad_norm": 1.4525564908981323, | |
| "learning_rate": 5.215946843853821e-06, | |
| "loss": 0.2681, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.6666666666666665, | |
| "grad_norm": 1.27578604221344, | |
| "learning_rate": 5.182724252491694e-06, | |
| "loss": 0.3106, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 2.6814814814814816, | |
| "grad_norm": 1.4694350957870483, | |
| "learning_rate": 5.149501661129569e-06, | |
| "loss": 0.3065, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 2.696296296296296, | |
| "grad_norm": 1.3258838653564453, | |
| "learning_rate": 5.116279069767442e-06, | |
| "loss": 0.3233, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 2.7111111111111112, | |
| "grad_norm": 1.1991218328475952, | |
| "learning_rate": 5.083056478405316e-06, | |
| "loss": 0.2786, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 2.725925925925926, | |
| "grad_norm": 1.134046196937561, | |
| "learning_rate": 5.0498338870431895e-06, | |
| "loss": 0.2788, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 2.7407407407407405, | |
| "grad_norm": 1.1837220191955566, | |
| "learning_rate": 5.016611295681063e-06, | |
| "loss": 0.2802, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 2.7555555555555555, | |
| "grad_norm": 1.5560253858566284, | |
| "learning_rate": 4.983388704318937e-06, | |
| "loss": 0.3686, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 2.7703703703703706, | |
| "grad_norm": 1.442020297050476, | |
| "learning_rate": 4.950166112956811e-06, | |
| "loss": 0.3648, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 2.785185185185185, | |
| "grad_norm": 1.2866986989974976, | |
| "learning_rate": 4.916943521594685e-06, | |
| "loss": 0.335, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 0.9921827912330627, | |
| "learning_rate": 4.883720930232559e-06, | |
| "loss": 0.3052, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.814814814814815, | |
| "grad_norm": 1.3171212673187256, | |
| "learning_rate": 4.850498338870432e-06, | |
| "loss": 0.3365, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 2.8296296296296295, | |
| "grad_norm": 1.202288031578064, | |
| "learning_rate": 4.817275747508306e-06, | |
| "loss": 0.2726, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 2.8444444444444446, | |
| "grad_norm": 1.5031251907348633, | |
| "learning_rate": 4.78405315614618e-06, | |
| "loss": 0.366, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 2.859259259259259, | |
| "grad_norm": 1.2141317129135132, | |
| "learning_rate": 4.750830564784053e-06, | |
| "loss": 0.2861, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 2.8740740740740742, | |
| "grad_norm": 1.64356529712677, | |
| "learning_rate": 4.717607973421927e-06, | |
| "loss": 0.3415, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.888888888888889, | |
| "grad_norm": 1.2897413969039917, | |
| "learning_rate": 4.6843853820598015e-06, | |
| "loss": 0.3334, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 2.9037037037037035, | |
| "grad_norm": 1.192264199256897, | |
| "learning_rate": 4.651162790697675e-06, | |
| "loss": 0.2927, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 2.9185185185185185, | |
| "grad_norm": 1.36125648021698, | |
| "learning_rate": 4.617940199335549e-06, | |
| "loss": 0.3129, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 2.9333333333333336, | |
| "grad_norm": 1.3408805131912231, | |
| "learning_rate": 4.5847176079734225e-06, | |
| "loss": 0.3277, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 2.948148148148148, | |
| "grad_norm": 1.3519293069839478, | |
| "learning_rate": 4.551495016611296e-06, | |
| "loss": 0.2881, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.962962962962963, | |
| "grad_norm": 1.2503290176391602, | |
| "learning_rate": 4.51827242524917e-06, | |
| "loss": 0.3282, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 2.977777777777778, | |
| "grad_norm": 1.357706069946289, | |
| "learning_rate": 4.4850498338870435e-06, | |
| "loss": 0.3319, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 2.9925925925925925, | |
| "grad_norm": 1.173913598060608, | |
| "learning_rate": 4.451827242524917e-06, | |
| "loss": 0.3099, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 1.173913598060608, | |
| "learning_rate": 4.451827242524917e-06, | |
| "loss": 0.2291, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 3.0148148148148146, | |
| "grad_norm": 1.3551340103149414, | |
| "learning_rate": 4.418604651162791e-06, | |
| "loss": 0.3068, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 3.0296296296296297, | |
| "grad_norm": 1.3206164836883545, | |
| "learning_rate": 4.3853820598006645e-06, | |
| "loss": 0.3416, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 3.0444444444444443, | |
| "grad_norm": 1.417013168334961, | |
| "learning_rate": 4.352159468438539e-06, | |
| "loss": 0.2771, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 3.0592592592592593, | |
| "grad_norm": 1.115646243095398, | |
| "learning_rate": 4.318936877076413e-06, | |
| "loss": 0.2828, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 3.074074074074074, | |
| "grad_norm": 1.4733177423477173, | |
| "learning_rate": 4.2857142857142855e-06, | |
| "loss": 0.3038, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 3.088888888888889, | |
| "grad_norm": 1.3604916334152222, | |
| "learning_rate": 4.25249169435216e-06, | |
| "loss": 0.3128, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 3.1037037037037036, | |
| "grad_norm": 1.6025428771972656, | |
| "learning_rate": 4.219269102990034e-06, | |
| "loss": 0.2808, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 3.1185185185185187, | |
| "grad_norm": 1.5683448314666748, | |
| "learning_rate": 4.186046511627907e-06, | |
| "loss": 0.3538, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 3.1333333333333333, | |
| "grad_norm": 1.5745620727539062, | |
| "learning_rate": 4.152823920265781e-06, | |
| "loss": 0.3071, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 3.148148148148148, | |
| "grad_norm": 1.26665461063385, | |
| "learning_rate": 4.119601328903655e-06, | |
| "loss": 0.2568, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 3.162962962962963, | |
| "grad_norm": 1.1559772491455078, | |
| "learning_rate": 4.086378737541528e-06, | |
| "loss": 0.2655, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 3.1777777777777776, | |
| "grad_norm": 1.3667874336242676, | |
| "learning_rate": 4.053156146179402e-06, | |
| "loss": 0.2943, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 3.1925925925925926, | |
| "grad_norm": 1.4076658487319946, | |
| "learning_rate": 4.0199335548172765e-06, | |
| "loss": 0.242, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 3.2074074074074073, | |
| "grad_norm": 1.461408257484436, | |
| "learning_rate": 3.98671096345515e-06, | |
| "loss": 0.2792, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 3.2222222222222223, | |
| "grad_norm": 1.3601031303405762, | |
| "learning_rate": 3.953488372093024e-06, | |
| "loss": 0.2788, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 3.237037037037037, | |
| "grad_norm": 1.9735394716262817, | |
| "learning_rate": 3.9202657807308975e-06, | |
| "loss": 0.3367, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 3.251851851851852, | |
| "grad_norm": 1.5413546562194824, | |
| "learning_rate": 3.887043189368771e-06, | |
| "loss": 0.2887, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 3.2666666666666666, | |
| "grad_norm": 1.5698870420455933, | |
| "learning_rate": 3.853820598006645e-06, | |
| "loss": 0.3121, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 3.2814814814814817, | |
| "grad_norm": 1.4351308345794678, | |
| "learning_rate": 3.8205980066445185e-06, | |
| "loss": 0.2859, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 3.2962962962962963, | |
| "grad_norm": 1.5523098707199097, | |
| "learning_rate": 3.787375415282392e-06, | |
| "loss": 0.3496, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 3.311111111111111, | |
| "grad_norm": 1.5026061534881592, | |
| "learning_rate": 3.754152823920266e-06, | |
| "loss": 0.2744, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 3.325925925925926, | |
| "grad_norm": 1.565753698348999, | |
| "learning_rate": 3.72093023255814e-06, | |
| "loss": 0.2761, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 3.3407407407407406, | |
| "grad_norm": 1.514609456062317, | |
| "learning_rate": 3.6877076411960135e-06, | |
| "loss": 0.2385, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 3.3555555555555556, | |
| "grad_norm": 1.233588695526123, | |
| "learning_rate": 3.6544850498338876e-06, | |
| "loss": 0.2849, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 3.3703703703703702, | |
| "grad_norm": 1.3447370529174805, | |
| "learning_rate": 3.621262458471761e-06, | |
| "loss": 0.3197, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 3.3851851851851853, | |
| "grad_norm": 1.3895092010498047, | |
| "learning_rate": 3.588039867109635e-06, | |
| "loss": 0.2869, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "grad_norm": 1.326200246810913, | |
| "learning_rate": 3.5548172757475086e-06, | |
| "loss": 0.2937, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 3.414814814814815, | |
| "grad_norm": 1.2751388549804688, | |
| "learning_rate": 3.5215946843853823e-06, | |
| "loss": 0.2611, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 3.4296296296296296, | |
| "grad_norm": 1.1317442655563354, | |
| "learning_rate": 3.4883720930232564e-06, | |
| "loss": 0.2733, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 3.4444444444444446, | |
| "grad_norm": 1.3669071197509766, | |
| "learning_rate": 3.4551495016611296e-06, | |
| "loss": 0.2967, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 3.4592592592592593, | |
| "grad_norm": 1.451213002204895, | |
| "learning_rate": 3.4219269102990037e-06, | |
| "loss": 0.3398, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 3.474074074074074, | |
| "grad_norm": 2.680572748184204, | |
| "learning_rate": 3.3887043189368774e-06, | |
| "loss": 0.2973, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 3.488888888888889, | |
| "grad_norm": 1.5603687763214111, | |
| "learning_rate": 3.355481727574751e-06, | |
| "loss": 0.2669, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 3.5037037037037035, | |
| "grad_norm": 1.8472412824630737, | |
| "learning_rate": 3.322259136212625e-06, | |
| "loss": 0.2956, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 3.5185185185185186, | |
| "grad_norm": 1.6677846908569336, | |
| "learning_rate": 3.2890365448504984e-06, | |
| "loss": 0.329, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 3.533333333333333, | |
| "grad_norm": 1.8696357011795044, | |
| "learning_rate": 3.2558139534883724e-06, | |
| "loss": 0.2735, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 3.5481481481481483, | |
| "grad_norm": 1.2237964868545532, | |
| "learning_rate": 3.222591362126246e-06, | |
| "loss": 0.3011, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 3.562962962962963, | |
| "grad_norm": 1.2561728954315186, | |
| "learning_rate": 3.1893687707641198e-06, | |
| "loss": 0.2568, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 3.5777777777777775, | |
| "grad_norm": 1.1599830389022827, | |
| "learning_rate": 3.156146179401994e-06, | |
| "loss": 0.3025, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 3.5925925925925926, | |
| "grad_norm": 1.7549623250961304, | |
| "learning_rate": 3.122923588039867e-06, | |
| "loss": 0.3252, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 3.6074074074074076, | |
| "grad_norm": 1.5652899742126465, | |
| "learning_rate": 3.089700996677741e-06, | |
| "loss": 0.341, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 3.6222222222222222, | |
| "grad_norm": 1.7946059703826904, | |
| "learning_rate": 3.056478405315615e-06, | |
| "loss": 0.3244, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 3.637037037037037, | |
| "grad_norm": 1.5564749240875244, | |
| "learning_rate": 3.0232558139534885e-06, | |
| "loss": 0.2686, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 3.651851851851852, | |
| "grad_norm": 1.439971923828125, | |
| "learning_rate": 2.9900332225913626e-06, | |
| "loss": 0.3198, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 3.6666666666666665, | |
| "grad_norm": 1.590828537940979, | |
| "learning_rate": 2.9568106312292363e-06, | |
| "loss": 0.2874, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 3.6814814814814816, | |
| "grad_norm": 1.2883110046386719, | |
| "learning_rate": 2.9235880398671104e-06, | |
| "loss": 0.2475, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 3.696296296296296, | |
| "grad_norm": 1.2013444900512695, | |
| "learning_rate": 2.8903654485049836e-06, | |
| "loss": 0.2911, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 3.7111111111111112, | |
| "grad_norm": 1.5711296796798706, | |
| "learning_rate": 2.8571428571428573e-06, | |
| "loss": 0.3113, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 3.725925925925926, | |
| "grad_norm": 1.5199639797210693, | |
| "learning_rate": 2.8239202657807313e-06, | |
| "loss": 0.2462, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 3.7407407407407405, | |
| "grad_norm": 1.2884533405303955, | |
| "learning_rate": 2.790697674418605e-06, | |
| "loss": 0.245, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 3.7555555555555555, | |
| "grad_norm": 1.4477325677871704, | |
| "learning_rate": 2.7574750830564782e-06, | |
| "loss": 0.2409, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 3.7703703703703706, | |
| "grad_norm": 1.2022032737731934, | |
| "learning_rate": 2.7242524916943523e-06, | |
| "loss": 0.2806, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 3.785185185185185, | |
| "grad_norm": 1.467168927192688, | |
| "learning_rate": 2.691029900332226e-06, | |
| "loss": 0.2866, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "grad_norm": 2.0435290336608887, | |
| "learning_rate": 2.6578073089701e-06, | |
| "loss": 0.3211, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 3.814814814814815, | |
| "grad_norm": 1.2129874229431152, | |
| "learning_rate": 2.6245847176079738e-06, | |
| "loss": 0.2765, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 3.8296296296296295, | |
| "grad_norm": 1.4887040853500366, | |
| "learning_rate": 2.591362126245847e-06, | |
| "loss": 0.282, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 3.8444444444444446, | |
| "grad_norm": 1.3326512575149536, | |
| "learning_rate": 2.558139534883721e-06, | |
| "loss": 0.307, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 3.859259259259259, | |
| "grad_norm": 1.2139685153961182, | |
| "learning_rate": 2.5249169435215947e-06, | |
| "loss": 0.3053, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 3.8740740740740742, | |
| "grad_norm": 1.6648317575454712, | |
| "learning_rate": 2.4916943521594684e-06, | |
| "loss": 0.2859, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 3.888888888888889, | |
| "grad_norm": 1.550366997718811, | |
| "learning_rate": 2.4584717607973425e-06, | |
| "loss": 0.3527, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 3.9037037037037035, | |
| "grad_norm": 1.641121506690979, | |
| "learning_rate": 2.425249169435216e-06, | |
| "loss": 0.2735, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 3.9185185185185185, | |
| "grad_norm": 1.244675874710083, | |
| "learning_rate": 2.39202657807309e-06, | |
| "loss": 0.3208, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 3.9333333333333336, | |
| "grad_norm": 1.4783977270126343, | |
| "learning_rate": 2.3588039867109635e-06, | |
| "loss": 0.2869, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 3.948148148148148, | |
| "grad_norm": 1.5731481313705444, | |
| "learning_rate": 2.3255813953488376e-06, | |
| "loss": 0.4189, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 3.962962962962963, | |
| "grad_norm": 1.654645562171936, | |
| "learning_rate": 2.2923588039867112e-06, | |
| "loss": 0.2766, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 3.977777777777778, | |
| "grad_norm": 1.6379601955413818, | |
| "learning_rate": 2.259136212624585e-06, | |
| "loss": 0.3227, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 3.9925925925925925, | |
| "grad_norm": 1.3923341035842896, | |
| "learning_rate": 2.2259136212624586e-06, | |
| "loss": 0.3051, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 1.9158486127853394, | |
| "learning_rate": 2.1926910299003322e-06, | |
| "loss": 0.2157, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 4.014814814814815, | |
| "grad_norm": 1.2792537212371826, | |
| "learning_rate": 2.1594684385382063e-06, | |
| "loss": 0.2568, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 4.029629629629629, | |
| "grad_norm": 1.1235581636428833, | |
| "learning_rate": 2.12624584717608e-06, | |
| "loss": 0.2664, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 4.044444444444444, | |
| "grad_norm": 1.2936333417892456, | |
| "learning_rate": 2.0930232558139536e-06, | |
| "loss": 0.2966, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 4.059259259259259, | |
| "grad_norm": 1.2000664472579956, | |
| "learning_rate": 2.0598006644518273e-06, | |
| "loss": 0.2487, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 4.074074074074074, | |
| "grad_norm": 1.4936013221740723, | |
| "learning_rate": 2.026578073089701e-06, | |
| "loss": 0.283, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 4.088888888888889, | |
| "grad_norm": 1.551798939704895, | |
| "learning_rate": 1.993355481727575e-06, | |
| "loss": 0.275, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 4.103703703703704, | |
| "grad_norm": 1.8705427646636963, | |
| "learning_rate": 1.9601328903654487e-06, | |
| "loss": 0.3045, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 4.118518518518519, | |
| "grad_norm": 1.5146915912628174, | |
| "learning_rate": 1.9269102990033224e-06, | |
| "loss": 0.2575, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 4.133333333333334, | |
| "grad_norm": 2.1336557865142822, | |
| "learning_rate": 1.893687707641196e-06, | |
| "loss": 0.293, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 4.148148148148148, | |
| "grad_norm": 1.673853874206543, | |
| "learning_rate": 1.86046511627907e-06, | |
| "loss": 0.2228, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 4.162962962962963, | |
| "grad_norm": 1.4035648107528687, | |
| "learning_rate": 1.8272425249169438e-06, | |
| "loss": 0.236, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 4.177777777777778, | |
| "grad_norm": 1.5520694255828857, | |
| "learning_rate": 1.7940199335548175e-06, | |
| "loss": 0.2381, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 4.192592592592592, | |
| "grad_norm": 1.8831232786178589, | |
| "learning_rate": 1.7607973421926911e-06, | |
| "loss": 0.3056, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 4.207407407407407, | |
| "grad_norm": 1.524223804473877, | |
| "learning_rate": 1.7275747508305648e-06, | |
| "loss": 0.2288, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 4.222222222222222, | |
| "grad_norm": 1.7227911949157715, | |
| "learning_rate": 1.6943521594684387e-06, | |
| "loss": 0.3001, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 4.237037037037037, | |
| "grad_norm": 1.6464650630950928, | |
| "learning_rate": 1.6611295681063126e-06, | |
| "loss": 0.2767, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 4.2518518518518515, | |
| "grad_norm": 1.462727665901184, | |
| "learning_rate": 1.6279069767441862e-06, | |
| "loss": 0.2583, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 4.266666666666667, | |
| "grad_norm": 1.3373020887374878, | |
| "learning_rate": 1.5946843853820599e-06, | |
| "loss": 0.1861, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 4.281481481481482, | |
| "grad_norm": 1.3069071769714355, | |
| "learning_rate": 1.5614617940199335e-06, | |
| "loss": 0.2279, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 4.296296296296296, | |
| "grad_norm": 2.0804059505462646, | |
| "learning_rate": 1.5282392026578074e-06, | |
| "loss": 0.3373, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 4.311111111111111, | |
| "grad_norm": 1.5276566743850708, | |
| "learning_rate": 1.4950166112956813e-06, | |
| "loss": 0.2211, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 4.325925925925926, | |
| "grad_norm": 1.7238777875900269, | |
| "learning_rate": 1.4617940199335552e-06, | |
| "loss": 0.2492, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 4.340740740740741, | |
| "grad_norm": 1.7892625331878662, | |
| "learning_rate": 1.4285714285714286e-06, | |
| "loss": 0.2521, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 4.355555555555555, | |
| "grad_norm": 1.83062744140625, | |
| "learning_rate": 1.3953488372093025e-06, | |
| "loss": 0.2445, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 4.37037037037037, | |
| "grad_norm": 1.665940284729004, | |
| "learning_rate": 1.3621262458471762e-06, | |
| "loss": 0.2188, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 4.385185185185185, | |
| "grad_norm": 1.7071560621261597, | |
| "learning_rate": 1.32890365448505e-06, | |
| "loss": 0.2294, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "grad_norm": 1.5902904272079468, | |
| "learning_rate": 1.2956810631229235e-06, | |
| "loss": 0.2443, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 4.4148148148148145, | |
| "grad_norm": 1.6071012020111084, | |
| "learning_rate": 1.2624584717607974e-06, | |
| "loss": 0.2268, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 4.42962962962963, | |
| "grad_norm": 1.481217384338379, | |
| "learning_rate": 1.2292358803986712e-06, | |
| "loss": 0.2245, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 4.444444444444445, | |
| "grad_norm": 1.7338011264801025, | |
| "learning_rate": 1.196013289036545e-06, | |
| "loss": 0.2481, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 4.459259259259259, | |
| "grad_norm": 1.83079195022583, | |
| "learning_rate": 1.1627906976744188e-06, | |
| "loss": 0.2771, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 4.474074074074074, | |
| "grad_norm": 1.6256608963012695, | |
| "learning_rate": 1.1295681063122925e-06, | |
| "loss": 0.2418, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 4.488888888888889, | |
| "grad_norm": 1.7368595600128174, | |
| "learning_rate": 1.0963455149501661e-06, | |
| "loss": 0.2325, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 4.503703703703704, | |
| "grad_norm": 1.8986256122589111, | |
| "learning_rate": 1.06312292358804e-06, | |
| "loss": 0.2685, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 4.518518518518518, | |
| "grad_norm": 1.9530152082443237, | |
| "learning_rate": 1.0299003322259137e-06, | |
| "loss": 0.2734, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 4.533333333333333, | |
| "grad_norm": 2.2950448989868164, | |
| "learning_rate": 9.966777408637875e-07, | |
| "loss": 0.2775, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 4.548148148148148, | |
| "grad_norm": 1.7885551452636719, | |
| "learning_rate": 9.634551495016612e-07, | |
| "loss": 0.2299, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 4.562962962962963, | |
| "grad_norm": 1.8106021881103516, | |
| "learning_rate": 9.30232558139535e-07, | |
| "loss": 0.2665, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 4.5777777777777775, | |
| "grad_norm": 1.7523531913757324, | |
| "learning_rate": 8.970099667774087e-07, | |
| "loss": 0.2514, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 4.592592592592593, | |
| "grad_norm": 1.5606251955032349, | |
| "learning_rate": 8.637873754152824e-07, | |
| "loss": 0.216, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 4.607407407407408, | |
| "grad_norm": 1.3957760334014893, | |
| "learning_rate": 8.305647840531563e-07, | |
| "loss": 0.2133, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 4.622222222222222, | |
| "grad_norm": 1.4215636253356934, | |
| "learning_rate": 7.973421926910299e-07, | |
| "loss": 0.2159, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 4.637037037037037, | |
| "grad_norm": 1.9139257669448853, | |
| "learning_rate": 7.641196013289037e-07, | |
| "loss": 0.2799, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 4.651851851851852, | |
| "grad_norm": 1.5956664085388184, | |
| "learning_rate": 7.308970099667776e-07, | |
| "loss": 0.2386, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 4.666666666666667, | |
| "grad_norm": 1.7055957317352295, | |
| "learning_rate": 6.976744186046513e-07, | |
| "loss": 0.2256, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 4.681481481481481, | |
| "grad_norm": 2.2495598793029785, | |
| "learning_rate": 6.64451827242525e-07, | |
| "loss": 0.2795, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 4.696296296296296, | |
| "grad_norm": 1.4281206130981445, | |
| "learning_rate": 6.312292358803987e-07, | |
| "loss": 0.2295, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 4.711111111111111, | |
| "grad_norm": 1.7233028411865234, | |
| "learning_rate": 5.980066445182725e-07, | |
| "loss": 0.2586, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 4.725925925925926, | |
| "grad_norm": 2.274329662322998, | |
| "learning_rate": 5.647840531561462e-07, | |
| "loss": 0.2477, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 4.7407407407407405, | |
| "grad_norm": 1.6101205348968506, | |
| "learning_rate": 5.3156146179402e-07, | |
| "loss": 0.2304, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 4.7555555555555555, | |
| "grad_norm": 1.9844639301300049, | |
| "learning_rate": 4.983388704318938e-07, | |
| "loss": 0.2888, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 4.770370370370371, | |
| "grad_norm": 1.7177938222885132, | |
| "learning_rate": 4.651162790697675e-07, | |
| "loss": 0.2543, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 4.785185185185185, | |
| "grad_norm": 1.7451181411743164, | |
| "learning_rate": 4.318936877076412e-07, | |
| "loss": 0.2362, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "grad_norm": 1.9255115985870361, | |
| "learning_rate": 3.9867109634551497e-07, | |
| "loss": 0.2689, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 4.814814814814815, | |
| "grad_norm": 1.6680419445037842, | |
| "learning_rate": 3.654485049833888e-07, | |
| "loss": 0.2505, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 4.82962962962963, | |
| "grad_norm": 1.8660964965820312, | |
| "learning_rate": 3.322259136212625e-07, | |
| "loss": 0.2671, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 4.844444444444444, | |
| "grad_norm": 1.9916861057281494, | |
| "learning_rate": 2.9900332225913623e-07, | |
| "loss": 0.2519, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 4.859259259259259, | |
| "grad_norm": 1.5888378620147705, | |
| "learning_rate": 2.6578073089701e-07, | |
| "loss": 0.2505, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 4.874074074074074, | |
| "grad_norm": 1.654725193977356, | |
| "learning_rate": 2.3255813953488374e-07, | |
| "loss": 0.2466, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 4.888888888888889, | |
| "grad_norm": 1.4593290090560913, | |
| "learning_rate": 1.9933554817275749e-07, | |
| "loss": 0.2039, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 4.9037037037037035, | |
| "grad_norm": 1.5956478118896484, | |
| "learning_rate": 1.6611295681063126e-07, | |
| "loss": 0.2221, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 4.9185185185185185, | |
| "grad_norm": 1.33909273147583, | |
| "learning_rate": 1.32890365448505e-07, | |
| "loss": 0.192, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 4.933333333333334, | |
| "grad_norm": 1.7868711948394775, | |
| "learning_rate": 9.966777408637874e-08, | |
| "loss": 0.2477, | |
| "step": 335 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 335, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 150, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4351170969600.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |