diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,7801 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 0.49992924461551524, + "eval_steps": 276, + "global_step": 1104, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.00045283446070245947, + "grad_norm": 4.009160995483398, + "learning_rate": 1.5e-06, + "loss": 1.777, + "step": 1 + }, + { + "epoch": 0.00045283446070245947, + "eval_loss": 1.9080619812011719, + "eval_runtime": 19.3772, + "eval_samples_per_second": 5.883, + "eval_steps_per_second": 0.774, + "step": 1 + }, + { + "epoch": 0.0009056689214049189, + "grad_norm": 3.954909086227417, + "learning_rate": 3e-06, + "loss": 1.7658, + "step": 2 + }, + { + "epoch": 0.0013585033821073783, + "grad_norm": 3.939889907836914, + "learning_rate": 4.5e-06, + "loss": 1.7708, + "step": 3 + }, + { + "epoch": 0.0018113378428098379, + "grad_norm": 2.411039352416992, + "learning_rate": 6e-06, + "loss": 1.7529, + "step": 4 + }, + { + "epoch": 0.0022641723035122974, + "grad_norm": 1.929020643234253, + "learning_rate": 7.5e-06, + "loss": 1.6849, + "step": 5 + }, + { + "epoch": 0.0027170067642147567, + "grad_norm": 2.1016135215759277, + "learning_rate": 9e-06, + "loss": 1.7142, + "step": 6 + }, + { + "epoch": 0.003169841224917216, + "grad_norm": 2.308230400085449, + "learning_rate": 1.05e-05, + "loss": 1.7518, + "step": 7 + }, + { + "epoch": 0.0036226756856196757, + "grad_norm": 1.8725852966308594, + "learning_rate": 1.2e-05, + "loss": 1.7033, + "step": 8 + }, + { + "epoch": 0.004075510146322135, + "grad_norm": 2.1712286472320557, + "learning_rate": 1.3500000000000001e-05, + "loss": 1.6949, + "step": 9 + }, + { + "epoch": 0.004528344607024595, + "grad_norm": 1.3823119401931763, + "learning_rate": 1.5e-05, + "loss": 1.647, + "step": 10 + }, + { + "epoch": 0.004981179067727054, + "grad_norm": 1.3454489707946777, + "learning_rate": 1.4999999153937153e-05, + "loss": 1.6661, + "step": 11 + }, + { + "epoch": 0.005434013528429513, + "grad_norm": 1.0993516445159912, + "learning_rate": 1.4999996615748808e-05, + "loss": 1.5813, + "step": 12 + }, + { + "epoch": 0.005886847989131973, + "grad_norm": 1.3689382076263428, + "learning_rate": 1.4999992385435533e-05, + "loss": 1.5947, + "step": 13 + }, + { + "epoch": 0.006339682449834432, + "grad_norm": 1.1131012439727783, + "learning_rate": 1.4999986462998284e-05, + "loss": 1.6002, + "step": 14 + }, + { + "epoch": 0.006792516910536892, + "grad_norm": 1.140008807182312, + "learning_rate": 1.4999978848438399e-05, + "loss": 1.5594, + "step": 15 + }, + { + "epoch": 0.0072453513712393515, + "grad_norm": 0.921599268913269, + "learning_rate": 1.4999969541757593e-05, + "loss": 1.5514, + "step": 16 + }, + { + "epoch": 0.007698185831941811, + "grad_norm": 0.9127187132835388, + "learning_rate": 1.4999958542957966e-05, + "loss": 1.5352, + "step": 17 + }, + { + "epoch": 0.00815102029264427, + "grad_norm": 0.7846677303314209, + "learning_rate": 1.4999945852042004e-05, + "loss": 1.5188, + "step": 18 + }, + { + "epoch": 0.00860385475334673, + "grad_norm": 0.843215823173523, + "learning_rate": 1.4999931469012563e-05, + "loss": 1.5185, + "step": 19 + }, + { + "epoch": 0.00905668921404919, + "grad_norm": 0.8540645241737366, + "learning_rate": 1.4999915393872895e-05, + "loss": 1.5068, + "step": 20 + }, + { + "epoch": 0.00950952367475165, + "grad_norm": 0.7134804725646973, + "learning_rate": 1.4999897626626621e-05, + "loss": 1.515, + "step": 21 + }, + { + "epoch": 0.009962358135454107, + "grad_norm": 0.870681643486023, + "learning_rate": 1.4999878167277757e-05, + "loss": 1.4962, + "step": 22 + }, + { + "epoch": 0.010415192596156567, + "grad_norm": 0.7976385951042175, + "learning_rate": 1.4999857015830684e-05, + "loss": 1.4902, + "step": 23 + }, + { + "epoch": 0.010868027056859027, + "grad_norm": 0.6572742462158203, + "learning_rate": 1.4999834172290182e-05, + "loss": 1.4845, + "step": 24 + }, + { + "epoch": 0.011320861517561486, + "grad_norm": 0.7284879684448242, + "learning_rate": 1.49998096366614e-05, + "loss": 1.4778, + "step": 25 + }, + { + "epoch": 0.011773695978263946, + "grad_norm": 0.7442657351493835, + "learning_rate": 1.4999783408949875e-05, + "loss": 1.4279, + "step": 26 + }, + { + "epoch": 0.012226530438966406, + "grad_norm": 0.7158825993537903, + "learning_rate": 1.4999755489161527e-05, + "loss": 1.4394, + "step": 27 + }, + { + "epoch": 0.012679364899668864, + "grad_norm": 0.6678286790847778, + "learning_rate": 1.4999725877302652e-05, + "loss": 1.4384, + "step": 28 + }, + { + "epoch": 0.013132199360371324, + "grad_norm": 0.6753094792366028, + "learning_rate": 1.4999694573379931e-05, + "loss": 1.4938, + "step": 29 + }, + { + "epoch": 0.013585033821073783, + "grad_norm": 0.8098469972610474, + "learning_rate": 1.499966157740043e-05, + "loss": 1.413, + "step": 30 + }, + { + "epoch": 0.014037868281776243, + "grad_norm": 0.6817219257354736, + "learning_rate": 1.4999626889371588e-05, + "loss": 1.4387, + "step": 31 + }, + { + "epoch": 0.014490702742478703, + "grad_norm": 0.6587890982627869, + "learning_rate": 1.4999590509301237e-05, + "loss": 1.3972, + "step": 32 + }, + { + "epoch": 0.014943537203181163, + "grad_norm": 0.6853736042976379, + "learning_rate": 1.4999552437197582e-05, + "loss": 1.4371, + "step": 33 + }, + { + "epoch": 0.015396371663883622, + "grad_norm": 0.6654878854751587, + "learning_rate": 1.4999512673069211e-05, + "loss": 1.3717, + "step": 34 + }, + { + "epoch": 0.015849206124586082, + "grad_norm": 0.6802534461021423, + "learning_rate": 1.4999471216925097e-05, + "loss": 1.3965, + "step": 35 + }, + { + "epoch": 0.01630204058528854, + "grad_norm": 0.638325035572052, + "learning_rate": 1.4999428068774597e-05, + "loss": 1.3658, + "step": 36 + }, + { + "epoch": 0.016754875045991, + "grad_norm": 0.6029091477394104, + "learning_rate": 1.499938322862744e-05, + "loss": 1.3593, + "step": 37 + }, + { + "epoch": 0.01720770950669346, + "grad_norm": 0.6502748727798462, + "learning_rate": 1.4999336696493746e-05, + "loss": 1.3406, + "step": 38 + }, + { + "epoch": 0.017660543967395918, + "grad_norm": 0.5874539613723755, + "learning_rate": 1.4999288472384011e-05, + "loss": 1.3959, + "step": 39 + }, + { + "epoch": 0.01811337842809838, + "grad_norm": 0.5745269060134888, + "learning_rate": 1.499923855630912e-05, + "loss": 1.3568, + "step": 40 + }, + { + "epoch": 0.018566212888800837, + "grad_norm": 0.6025118827819824, + "learning_rate": 1.4999186948280329e-05, + "loss": 1.3679, + "step": 41 + }, + { + "epoch": 0.0190190473495033, + "grad_norm": 0.6154868006706238, + "learning_rate": 1.4999133648309286e-05, + "loss": 1.362, + "step": 42 + }, + { + "epoch": 0.019471881810205757, + "grad_norm": 0.5797818303108215, + "learning_rate": 1.4999078656408013e-05, + "loss": 1.3357, + "step": 43 + }, + { + "epoch": 0.019924716270908215, + "grad_norm": 0.6571888327598572, + "learning_rate": 1.499902197258892e-05, + "loss": 1.3554, + "step": 44 + }, + { + "epoch": 0.020377550731610676, + "grad_norm": 0.6282890439033508, + "learning_rate": 1.4998963596864794e-05, + "loss": 1.3436, + "step": 45 + }, + { + "epoch": 0.020830385192313134, + "grad_norm": 0.541675865650177, + "learning_rate": 1.4998903529248806e-05, + "loss": 1.3465, + "step": 46 + }, + { + "epoch": 0.021283219653015596, + "grad_norm": 0.629550576210022, + "learning_rate": 1.499884176975451e-05, + "loss": 1.3405, + "step": 47 + }, + { + "epoch": 0.021736054113718054, + "grad_norm": 0.6273293495178223, + "learning_rate": 1.4998778318395837e-05, + "loss": 1.3425, + "step": 48 + }, + { + "epoch": 0.022188888574420515, + "grad_norm": 0.5797371864318848, + "learning_rate": 1.4998713175187105e-05, + "loss": 1.3066, + "step": 49 + }, + { + "epoch": 0.022641723035122973, + "grad_norm": 0.6254112720489502, + "learning_rate": 1.499864634014301e-05, + "loss": 1.287, + "step": 50 + }, + { + "epoch": 0.02309455749582543, + "grad_norm": 0.5634319186210632, + "learning_rate": 1.4998577813278631e-05, + "loss": 1.3163, + "step": 51 + }, + { + "epoch": 0.023547391956527892, + "grad_norm": 0.634515106678009, + "learning_rate": 1.4998507594609432e-05, + "loss": 1.293, + "step": 52 + }, + { + "epoch": 0.02400022641723035, + "grad_norm": 0.6572280526161194, + "learning_rate": 1.499843568415125e-05, + "loss": 1.3049, + "step": 53 + }, + { + "epoch": 0.024453060877932812, + "grad_norm": 0.627619206905365, + "learning_rate": 1.4998362081920313e-05, + "loss": 1.293, + "step": 54 + }, + { + "epoch": 0.02490589533863527, + "grad_norm": 0.5970916152000427, + "learning_rate": 1.4998286787933226e-05, + "loss": 1.3552, + "step": 55 + }, + { + "epoch": 0.025358729799337728, + "grad_norm": 0.5540199875831604, + "learning_rate": 1.4998209802206977e-05, + "loss": 1.3124, + "step": 56 + }, + { + "epoch": 0.02581156426004019, + "grad_norm": 0.5490045547485352, + "learning_rate": 1.4998131124758935e-05, + "loss": 1.2801, + "step": 57 + }, + { + "epoch": 0.026264398720742647, + "grad_norm": 0.6576207280158997, + "learning_rate": 1.4998050755606851e-05, + "loss": 1.2807, + "step": 58 + }, + { + "epoch": 0.02671723318144511, + "grad_norm": 0.617120623588562, + "learning_rate": 1.4997968694768856e-05, + "loss": 1.2434, + "step": 59 + }, + { + "epoch": 0.027170067642147567, + "grad_norm": 0.6149354577064514, + "learning_rate": 1.4997884942263468e-05, + "loss": 1.2947, + "step": 60 + }, + { + "epoch": 0.02762290210285003, + "grad_norm": 0.6856628060340881, + "learning_rate": 1.499779949810958e-05, + "loss": 1.304, + "step": 61 + }, + { + "epoch": 0.028075736563552486, + "grad_norm": 0.5651207566261292, + "learning_rate": 1.4997712362326468e-05, + "loss": 1.2328, + "step": 62 + }, + { + "epoch": 0.028528571024254944, + "grad_norm": 0.6854087114334106, + "learning_rate": 1.4997623534933796e-05, + "loss": 1.2766, + "step": 63 + }, + { + "epoch": 0.028981405484957406, + "grad_norm": 0.609596848487854, + "learning_rate": 1.4997533015951603e-05, + "loss": 1.2682, + "step": 64 + }, + { + "epoch": 0.029434239945659864, + "grad_norm": 0.575474202632904, + "learning_rate": 1.499744080540031e-05, + "loss": 1.1962, + "step": 65 + }, + { + "epoch": 0.029887074406362325, + "grad_norm": 0.6126519441604614, + "learning_rate": 1.4997346903300723e-05, + "loss": 1.2239, + "step": 66 + }, + { + "epoch": 0.030339908867064783, + "grad_norm": 0.5966687798500061, + "learning_rate": 1.4997251309674028e-05, + "loss": 1.2515, + "step": 67 + }, + { + "epoch": 0.030792743327767245, + "grad_norm": 0.7234688997268677, + "learning_rate": 1.499715402454179e-05, + "loss": 1.2173, + "step": 68 + }, + { + "epoch": 0.031245577788469703, + "grad_norm": 0.5534050464630127, + "learning_rate": 1.4997055047925962e-05, + "loss": 1.2269, + "step": 69 + }, + { + "epoch": 0.031698412249172164, + "grad_norm": 0.7033390402793884, + "learning_rate": 1.499695437984887e-05, + "loss": 1.2371, + "step": 70 + }, + { + "epoch": 0.03215124670987462, + "grad_norm": 0.7099838852882385, + "learning_rate": 1.4996852020333232e-05, + "loss": 1.2639, + "step": 71 + }, + { + "epoch": 0.03260408117057708, + "grad_norm": 0.5796670913696289, + "learning_rate": 1.4996747969402139e-05, + "loss": 1.2291, + "step": 72 + }, + { + "epoch": 0.03305691563127954, + "grad_norm": 0.6576344966888428, + "learning_rate": 1.4996642227079065e-05, + "loss": 1.1869, + "step": 73 + }, + { + "epoch": 0.033509750091982, + "grad_norm": 0.6396910548210144, + "learning_rate": 1.4996534793387871e-05, + "loss": 1.2227, + "step": 74 + }, + { + "epoch": 0.03396258455268446, + "grad_norm": 0.6648237109184265, + "learning_rate": 1.4996425668352793e-05, + "loss": 1.2715, + "step": 75 + }, + { + "epoch": 0.03441541901338692, + "grad_norm": 0.6780776381492615, + "learning_rate": 1.4996314851998453e-05, + "loss": 1.226, + "step": 76 + }, + { + "epoch": 0.03486825347408938, + "grad_norm": 0.5589417815208435, + "learning_rate": 1.4996202344349853e-05, + "loss": 1.2053, + "step": 77 + }, + { + "epoch": 0.035321087934791835, + "grad_norm": 0.5351216793060303, + "learning_rate": 1.4996088145432375e-05, + "loss": 1.2103, + "step": 78 + }, + { + "epoch": 0.0357739223954943, + "grad_norm": 0.4985811114311218, + "learning_rate": 1.4995972255271787e-05, + "loss": 1.2327, + "step": 79 + }, + { + "epoch": 0.03622675685619676, + "grad_norm": 0.5302553176879883, + "learning_rate": 1.499585467389423e-05, + "loss": 1.1468, + "step": 80 + }, + { + "epoch": 0.03667959131689921, + "grad_norm": 0.6337281465530396, + "learning_rate": 1.499573540132624e-05, + "loss": 1.2117, + "step": 81 + }, + { + "epoch": 0.037132425777601674, + "grad_norm": 0.8564434051513672, + "learning_rate": 1.4995614437594721e-05, + "loss": 1.1857, + "step": 82 + }, + { + "epoch": 0.037585260238304136, + "grad_norm": 0.7630729675292969, + "learning_rate": 1.4995491782726968e-05, + "loss": 1.2389, + "step": 83 + }, + { + "epoch": 0.0380380946990066, + "grad_norm": 0.7095062732696533, + "learning_rate": 1.499536743675065e-05, + "loss": 1.162, + "step": 84 + }, + { + "epoch": 0.03849092915970905, + "grad_norm": 0.7915695309638977, + "learning_rate": 1.4995241399693827e-05, + "loss": 1.2049, + "step": 85 + }, + { + "epoch": 0.03894376362041151, + "grad_norm": 0.6757071018218994, + "learning_rate": 1.4995113671584933e-05, + "loss": 1.2344, + "step": 86 + }, + { + "epoch": 0.039396598081113975, + "grad_norm": 0.8018056750297546, + "learning_rate": 1.4994984252452782e-05, + "loss": 1.1507, + "step": 87 + }, + { + "epoch": 0.03984943254181643, + "grad_norm": 0.8417953848838806, + "learning_rate": 1.4994853142326578e-05, + "loss": 1.1799, + "step": 88 + }, + { + "epoch": 0.04030226700251889, + "grad_norm": 0.7193191051483154, + "learning_rate": 1.4994720341235898e-05, + "loss": 1.1755, + "step": 89 + }, + { + "epoch": 0.04075510146322135, + "grad_norm": 1.2904716730117798, + "learning_rate": 1.4994585849210707e-05, + "loss": 1.1643, + "step": 90 + }, + { + "epoch": 0.041207935923923814, + "grad_norm": 0.7058910727500916, + "learning_rate": 1.4994449666281348e-05, + "loss": 1.1737, + "step": 91 + }, + { + "epoch": 0.04166077038462627, + "grad_norm": 0.9418469667434692, + "learning_rate": 1.4994311792478543e-05, + "loss": 1.1885, + "step": 92 + }, + { + "epoch": 0.04211360484532873, + "grad_norm": 0.8901066780090332, + "learning_rate": 1.4994172227833402e-05, + "loss": 1.1702, + "step": 93 + }, + { + "epoch": 0.04256643930603119, + "grad_norm": 0.5510401129722595, + "learning_rate": 1.4994030972377414e-05, + "loss": 1.1792, + "step": 94 + }, + { + "epoch": 0.043019273766733646, + "grad_norm": 0.7584707140922546, + "learning_rate": 1.4993888026142448e-05, + "loss": 1.1627, + "step": 95 + }, + { + "epoch": 0.04347210822743611, + "grad_norm": 0.7134795188903809, + "learning_rate": 1.499374338916075e-05, + "loss": 1.236, + "step": 96 + }, + { + "epoch": 0.04392494268813857, + "grad_norm": 0.6071385145187378, + "learning_rate": 1.499359706146496e-05, + "loss": 1.1651, + "step": 97 + }, + { + "epoch": 0.04437777714884103, + "grad_norm": 0.6142598390579224, + "learning_rate": 1.4993449043088088e-05, + "loss": 1.129, + "step": 98 + }, + { + "epoch": 0.044830611609543485, + "grad_norm": 0.713774561882019, + "learning_rate": 1.4993299334063528e-05, + "loss": 1.1959, + "step": 99 + }, + { + "epoch": 0.045283446070245946, + "grad_norm": 0.6220033764839172, + "learning_rate": 1.499314793442506e-05, + "loss": 1.1464, + "step": 100 + }, + { + "epoch": 0.04573628053094841, + "grad_norm": 0.9813457727432251, + "learning_rate": 1.4992994844206843e-05, + "loss": 1.1228, + "step": 101 + }, + { + "epoch": 0.04618911499165086, + "grad_norm": 0.9431173205375671, + "learning_rate": 1.499284006344341e-05, + "loss": 1.1478, + "step": 102 + }, + { + "epoch": 0.04664194945235332, + "grad_norm": 0.5895239114761353, + "learning_rate": 1.4992683592169691e-05, + "loss": 1.1035, + "step": 103 + }, + { + "epoch": 0.047094783913055785, + "grad_norm": 1.2074604034423828, + "learning_rate": 1.4992525430420984e-05, + "loss": 1.1472, + "step": 104 + }, + { + "epoch": 0.047547618373758246, + "grad_norm": 0.8212599158287048, + "learning_rate": 1.499236557823297e-05, + "loss": 1.1222, + "step": 105 + }, + { + "epoch": 0.0480004528344607, + "grad_norm": 0.7460811138153076, + "learning_rate": 1.4992204035641721e-05, + "loss": 1.1551, + "step": 106 + }, + { + "epoch": 0.04845328729516316, + "grad_norm": 0.9674639105796814, + "learning_rate": 1.499204080268368e-05, + "loss": 1.15, + "step": 107 + }, + { + "epoch": 0.048906121755865624, + "grad_norm": 0.7379394173622131, + "learning_rate": 1.4991875879395677e-05, + "loss": 1.096, + "step": 108 + }, + { + "epoch": 0.04935895621656808, + "grad_norm": 0.7826714515686035, + "learning_rate": 1.4991709265814918e-05, + "loss": 1.1086, + "step": 109 + }, + { + "epoch": 0.04981179067727054, + "grad_norm": 0.7406508326530457, + "learning_rate": 1.4991540961978997e-05, + "loss": 1.1203, + "step": 110 + }, + { + "epoch": 0.050264625137973, + "grad_norm": 0.6145086884498596, + "learning_rate": 1.4991370967925882e-05, + "loss": 1.1688, + "step": 111 + }, + { + "epoch": 0.050717459598675456, + "grad_norm": 0.7194088697433472, + "learning_rate": 1.4991199283693933e-05, + "loss": 1.0994, + "step": 112 + }, + { + "epoch": 0.05117029405937792, + "grad_norm": 0.6828657984733582, + "learning_rate": 1.499102590932188e-05, + "loss": 1.1362, + "step": 113 + }, + { + "epoch": 0.05162312852008038, + "grad_norm": 0.6402639150619507, + "learning_rate": 1.4990850844848841e-05, + "loss": 1.1146, + "step": 114 + }, + { + "epoch": 0.05207596298078284, + "grad_norm": 0.7166230082511902, + "learning_rate": 1.4990674090314313e-05, + "loss": 1.085, + "step": 115 + }, + { + "epoch": 0.052528797441485295, + "grad_norm": 0.6384372711181641, + "learning_rate": 1.4990495645758174e-05, + "loss": 1.0982, + "step": 116 + }, + { + "epoch": 0.052981631902187756, + "grad_norm": 0.6809221506118774, + "learning_rate": 1.4990315511220685e-05, + "loss": 1.1414, + "step": 117 + }, + { + "epoch": 0.05343446636289022, + "grad_norm": 0.6124834418296814, + "learning_rate": 1.4990133686742488e-05, + "loss": 1.0886, + "step": 118 + }, + { + "epoch": 0.05388730082359267, + "grad_norm": 0.7249141335487366, + "learning_rate": 1.4989950172364603e-05, + "loss": 1.1568, + "step": 119 + }, + { + "epoch": 0.054340135284295134, + "grad_norm": 0.6189077496528625, + "learning_rate": 1.498976496812844e-05, + "loss": 1.0731, + "step": 120 + }, + { + "epoch": 0.054792969744997595, + "grad_norm": 0.8025009036064148, + "learning_rate": 1.4989578074075777e-05, + "loss": 1.1252, + "step": 121 + }, + { + "epoch": 0.05524580420570006, + "grad_norm": 0.6181627511978149, + "learning_rate": 1.4989389490248783e-05, + "loss": 1.0713, + "step": 122 + }, + { + "epoch": 0.05569863866640251, + "grad_norm": 0.8665769696235657, + "learning_rate": 1.4989199216690006e-05, + "loss": 1.1481, + "step": 123 + }, + { + "epoch": 0.05615147312710497, + "grad_norm": 0.7832885384559631, + "learning_rate": 1.4989007253442377e-05, + "loss": 1.0775, + "step": 124 + }, + { + "epoch": 0.056604307587807434, + "grad_norm": 0.7431057095527649, + "learning_rate": 1.4988813600549202e-05, + "loss": 1.1013, + "step": 125 + }, + { + "epoch": 0.05705714204850989, + "grad_norm": 1.1883816719055176, + "learning_rate": 1.4988618258054176e-05, + "loss": 1.1003, + "step": 126 + }, + { + "epoch": 0.05750997650921235, + "grad_norm": 0.6886972188949585, + "learning_rate": 1.498842122600137e-05, + "loss": 1.0754, + "step": 127 + }, + { + "epoch": 0.05796281096991481, + "grad_norm": 0.8941016793251038, + "learning_rate": 1.4988222504435235e-05, + "loss": 1.0577, + "step": 128 + }, + { + "epoch": 0.05841564543061727, + "grad_norm": 0.9873454570770264, + "learning_rate": 1.498802209340061e-05, + "loss": 1.089, + "step": 129 + }, + { + "epoch": 0.05886847989131973, + "grad_norm": 0.6695548892021179, + "learning_rate": 1.4987819992942712e-05, + "loss": 1.0799, + "step": 130 + }, + { + "epoch": 0.05932131435202219, + "grad_norm": 0.9906266331672668, + "learning_rate": 1.4987616203107134e-05, + "loss": 1.0753, + "step": 131 + }, + { + "epoch": 0.05977414881272465, + "grad_norm": 0.6169360876083374, + "learning_rate": 1.4987410723939857e-05, + "loss": 1.0863, + "step": 132 + }, + { + "epoch": 0.060226983273427105, + "grad_norm": 1.0210751295089722, + "learning_rate": 1.4987203555487242e-05, + "loss": 1.0493, + "step": 133 + }, + { + "epoch": 0.06067981773412957, + "grad_norm": 0.675287663936615, + "learning_rate": 1.4986994697796022e-05, + "loss": 1.0669, + "step": 134 + }, + { + "epoch": 0.06113265219483203, + "grad_norm": 0.8825522661209106, + "learning_rate": 1.4986784150913329e-05, + "loss": 1.0482, + "step": 135 + }, + { + "epoch": 0.06158548665553449, + "grad_norm": 0.6397138237953186, + "learning_rate": 1.4986571914886662e-05, + "loss": 1.0544, + "step": 136 + }, + { + "epoch": 0.062038321116236944, + "grad_norm": 1.0741486549377441, + "learning_rate": 1.4986357989763901e-05, + "loss": 1.037, + "step": 137 + }, + { + "epoch": 0.062491155576939406, + "grad_norm": 0.6419923901557922, + "learning_rate": 1.4986142375593315e-05, + "loss": 1.0751, + "step": 138 + }, + { + "epoch": 0.06294399003764187, + "grad_norm": 0.8368469476699829, + "learning_rate": 1.498592507242355e-05, + "loss": 1.0357, + "step": 139 + }, + { + "epoch": 0.06339682449834433, + "grad_norm": 0.7393881678581238, + "learning_rate": 1.4985706080303634e-05, + "loss": 1.0733, + "step": 140 + }, + { + "epoch": 0.06384965895904679, + "grad_norm": 0.8628896474838257, + "learning_rate": 1.4985485399282973e-05, + "loss": 1.0808, + "step": 141 + }, + { + "epoch": 0.06430249341974924, + "grad_norm": 0.6672838926315308, + "learning_rate": 1.4985263029411356e-05, + "loss": 1.0266, + "step": 142 + }, + { + "epoch": 0.0647553278804517, + "grad_norm": 0.7643623352050781, + "learning_rate": 1.4985038970738959e-05, + "loss": 1.0044, + "step": 143 + }, + { + "epoch": 0.06520816234115416, + "grad_norm": 0.8692808747291565, + "learning_rate": 1.4984813223316326e-05, + "loss": 1.0128, + "step": 144 + }, + { + "epoch": 0.06566099680185662, + "grad_norm": 0.7556530833244324, + "learning_rate": 1.4984585787194392e-05, + "loss": 1.0772, + "step": 145 + }, + { + "epoch": 0.06611383126255908, + "grad_norm": 0.8598385453224182, + "learning_rate": 1.4984356662424473e-05, + "loss": 1.0511, + "step": 146 + }, + { + "epoch": 0.06656666572326154, + "grad_norm": 0.6620029807090759, + "learning_rate": 1.498412584905826e-05, + "loss": 1.0401, + "step": 147 + }, + { + "epoch": 0.067019500183964, + "grad_norm": 0.8549764752388, + "learning_rate": 1.498389334714783e-05, + "loss": 1.0172, + "step": 148 + }, + { + "epoch": 0.06747233464466645, + "grad_norm": 0.6814466714859009, + "learning_rate": 1.498365915674564e-05, + "loss": 1.0638, + "step": 149 + }, + { + "epoch": 0.06792516910536892, + "grad_norm": 0.7864624261856079, + "learning_rate": 1.4983423277904526e-05, + "loss": 1.0069, + "step": 150 + }, + { + "epoch": 0.06837800356607138, + "grad_norm": 0.7862728238105774, + "learning_rate": 1.4983185710677705e-05, + "loss": 1.0092, + "step": 151 + }, + { + "epoch": 0.06883083802677384, + "grad_norm": 0.656066358089447, + "learning_rate": 1.498294645511878e-05, + "loss": 0.9948, + "step": 152 + }, + { + "epoch": 0.0692836724874763, + "grad_norm": 0.8510375022888184, + "learning_rate": 1.4982705511281728e-05, + "loss": 1.0411, + "step": 153 + }, + { + "epoch": 0.06973650694817876, + "grad_norm": 0.6606243848800659, + "learning_rate": 1.4982462879220911e-05, + "loss": 1.0054, + "step": 154 + }, + { + "epoch": 0.07018934140888121, + "grad_norm": 0.7300797700881958, + "learning_rate": 1.498221855899107e-05, + "loss": 0.9801, + "step": 155 + }, + { + "epoch": 0.07064217586958367, + "grad_norm": 0.8046183586120605, + "learning_rate": 1.4981972550647328e-05, + "loss": 0.9794, + "step": 156 + }, + { + "epoch": 0.07109501033028613, + "grad_norm": 0.6511421203613281, + "learning_rate": 1.4981724854245189e-05, + "loss": 1.065, + "step": 157 + }, + { + "epoch": 0.0715478447909886, + "grad_norm": 0.8642223477363586, + "learning_rate": 1.4981475469840538e-05, + "loss": 0.9808, + "step": 158 + }, + { + "epoch": 0.07200067925169105, + "grad_norm": 0.7534111738204956, + "learning_rate": 1.498122439748964e-05, + "loss": 1.0371, + "step": 159 + }, + { + "epoch": 0.07245351371239352, + "grad_norm": 0.7241458892822266, + "learning_rate": 1.4980971637249141e-05, + "loss": 0.9568, + "step": 160 + }, + { + "epoch": 0.07290634817309598, + "grad_norm": 0.6701076030731201, + "learning_rate": 1.4980717189176066e-05, + "loss": 1.0158, + "step": 161 + }, + { + "epoch": 0.07335918263379843, + "grad_norm": 0.6040867567062378, + "learning_rate": 1.4980461053327829e-05, + "loss": 0.9966, + "step": 162 + }, + { + "epoch": 0.07381201709450089, + "grad_norm": 0.7389962077140808, + "learning_rate": 1.4980203229762208e-05, + "loss": 0.962, + "step": 163 + }, + { + "epoch": 0.07426485155520335, + "grad_norm": 0.6864858269691467, + "learning_rate": 1.4979943718537383e-05, + "loss": 1.0563, + "step": 164 + }, + { + "epoch": 0.07471768601590581, + "grad_norm": 0.6471516489982605, + "learning_rate": 1.4979682519711897e-05, + "loss": 1.0448, + "step": 165 + }, + { + "epoch": 0.07517052047660827, + "grad_norm": 0.7508234977722168, + "learning_rate": 1.4979419633344686e-05, + "loss": 1.0353, + "step": 166 + }, + { + "epoch": 0.07562335493731073, + "grad_norm": 0.6425490379333496, + "learning_rate": 1.4979155059495056e-05, + "loss": 0.9819, + "step": 167 + }, + { + "epoch": 0.0760761893980132, + "grad_norm": 0.6431488990783691, + "learning_rate": 1.4978888798222703e-05, + "loss": 1.0394, + "step": 168 + }, + { + "epoch": 0.07652902385871564, + "grad_norm": 0.5777195692062378, + "learning_rate": 1.49786208495877e-05, + "loss": 0.9881, + "step": 169 + }, + { + "epoch": 0.0769818583194181, + "grad_norm": 0.7131640315055847, + "learning_rate": 1.4978351213650498e-05, + "loss": 1.0054, + "step": 170 + }, + { + "epoch": 0.07743469278012056, + "grad_norm": 0.7139365673065186, + "learning_rate": 1.4978079890471935e-05, + "loss": 1.0341, + "step": 171 + }, + { + "epoch": 0.07788752724082303, + "grad_norm": 0.6020310521125793, + "learning_rate": 1.4977806880113223e-05, + "loss": 0.9785, + "step": 172 + }, + { + "epoch": 0.07834036170152549, + "grad_norm": 0.7464612126350403, + "learning_rate": 1.4977532182635963e-05, + "loss": 0.9976, + "step": 173 + }, + { + "epoch": 0.07879319616222795, + "grad_norm": 0.8518685698509216, + "learning_rate": 1.4977255798102122e-05, + "loss": 0.9976, + "step": 174 + }, + { + "epoch": 0.07924603062293041, + "grad_norm": 0.6868304014205933, + "learning_rate": 1.4976977726574065e-05, + "loss": 0.9989, + "step": 175 + }, + { + "epoch": 0.07969886508363286, + "grad_norm": 0.7775158882141113, + "learning_rate": 1.4976697968114529e-05, + "loss": 0.9655, + "step": 176 + }, + { + "epoch": 0.08015169954433532, + "grad_norm": 0.9228640794754028, + "learning_rate": 1.4976416522786626e-05, + "loss": 0.9497, + "step": 177 + }, + { + "epoch": 0.08060453400503778, + "grad_norm": 0.6497952342033386, + "learning_rate": 1.4976133390653861e-05, + "loss": 0.94, + "step": 178 + }, + { + "epoch": 0.08105736846574024, + "grad_norm": 0.853291928768158, + "learning_rate": 1.497584857178011e-05, + "loss": 0.9267, + "step": 179 + }, + { + "epoch": 0.0815102029264427, + "grad_norm": 1.0284061431884766, + "learning_rate": 1.4975562066229635e-05, + "loss": 0.9772, + "step": 180 + }, + { + "epoch": 0.08196303738714517, + "grad_norm": 0.7046072483062744, + "learning_rate": 1.4975273874067078e-05, + "loss": 1.0039, + "step": 181 + }, + { + "epoch": 0.08241587184784763, + "grad_norm": 0.8511521220207214, + "learning_rate": 1.4974983995357456e-05, + "loss": 0.935, + "step": 182 + }, + { + "epoch": 0.08286870630855007, + "grad_norm": 0.9605690240859985, + "learning_rate": 1.497469243016617e-05, + "loss": 0.9885, + "step": 183 + }, + { + "epoch": 0.08332154076925254, + "grad_norm": 0.7198604345321655, + "learning_rate": 1.4974399178559007e-05, + "loss": 0.9757, + "step": 184 + }, + { + "epoch": 0.083774375229955, + "grad_norm": 0.8301750421524048, + "learning_rate": 1.4974104240602127e-05, + "loss": 0.9232, + "step": 185 + }, + { + "epoch": 0.08422720969065746, + "grad_norm": 0.8821402788162231, + "learning_rate": 1.497380761636207e-05, + "loss": 0.9657, + "step": 186 + }, + { + "epoch": 0.08468004415135992, + "grad_norm": 1.1182292699813843, + "learning_rate": 1.4973509305905762e-05, + "loss": 1.005, + "step": 187 + }, + { + "epoch": 0.08513287861206238, + "grad_norm": 0.7202256917953491, + "learning_rate": 1.4973209309300508e-05, + "loss": 0.916, + "step": 188 + }, + { + "epoch": 0.08558571307276484, + "grad_norm": 1.0314316749572754, + "learning_rate": 1.4972907626613993e-05, + "loss": 0.9722, + "step": 189 + }, + { + "epoch": 0.08603854753346729, + "grad_norm": 0.8789144158363342, + "learning_rate": 1.4972604257914277e-05, + "loss": 0.9835, + "step": 190 + }, + { + "epoch": 0.08649138199416975, + "grad_norm": 0.8964241743087769, + "learning_rate": 1.497229920326981e-05, + "loss": 0.9562, + "step": 191 + }, + { + "epoch": 0.08694421645487221, + "grad_norm": 0.9407457709312439, + "learning_rate": 1.4971992462749413e-05, + "loss": 0.935, + "step": 192 + }, + { + "epoch": 0.08739705091557468, + "grad_norm": 0.7482190132141113, + "learning_rate": 1.4971684036422295e-05, + "loss": 0.9561, + "step": 193 + }, + { + "epoch": 0.08784988537627714, + "grad_norm": 0.7358856201171875, + "learning_rate": 1.4971373924358041e-05, + "loss": 0.9106, + "step": 194 + }, + { + "epoch": 0.0883027198369796, + "grad_norm": 0.7911369204521179, + "learning_rate": 1.4971062126626617e-05, + "loss": 0.9625, + "step": 195 + }, + { + "epoch": 0.08875555429768206, + "grad_norm": 0.6739444136619568, + "learning_rate": 1.497074864329837e-05, + "loss": 0.936, + "step": 196 + }, + { + "epoch": 0.08920838875838451, + "grad_norm": 0.7796293497085571, + "learning_rate": 1.497043347444403e-05, + "loss": 0.9645, + "step": 197 + }, + { + "epoch": 0.08966122321908697, + "grad_norm": 0.7281574010848999, + "learning_rate": 1.4970116620134701e-05, + "loss": 0.985, + "step": 198 + }, + { + "epoch": 0.09011405767978943, + "grad_norm": 0.7205932140350342, + "learning_rate": 1.4969798080441872e-05, + "loss": 0.9253, + "step": 199 + }, + { + "epoch": 0.09056689214049189, + "grad_norm": 0.8215734958648682, + "learning_rate": 1.496947785543741e-05, + "loss": 0.9391, + "step": 200 + }, + { + "epoch": 0.09101972660119435, + "grad_norm": 0.6455587148666382, + "learning_rate": 1.4969155945193562e-05, + "loss": 0.8646, + "step": 201 + }, + { + "epoch": 0.09147256106189681, + "grad_norm": 0.6836207509040833, + "learning_rate": 1.496883234978296e-05, + "loss": 0.9728, + "step": 202 + }, + { + "epoch": 0.09192539552259928, + "grad_norm": 0.6182245016098022, + "learning_rate": 1.496850706927861e-05, + "loss": 0.8912, + "step": 203 + }, + { + "epoch": 0.09237822998330172, + "grad_norm": 1.1609197854995728, + "learning_rate": 1.4968180103753901e-05, + "loss": 0.9035, + "step": 204 + }, + { + "epoch": 0.09283106444400419, + "grad_norm": 0.7769509553909302, + "learning_rate": 1.4967851453282601e-05, + "loss": 0.8651, + "step": 205 + }, + { + "epoch": 0.09328389890470665, + "grad_norm": 0.8045849204063416, + "learning_rate": 1.4967521117938861e-05, + "loss": 0.9432, + "step": 206 + }, + { + "epoch": 0.09373673336540911, + "grad_norm": 0.747963547706604, + "learning_rate": 1.496718909779721e-05, + "loss": 0.9142, + "step": 207 + }, + { + "epoch": 0.09418956782611157, + "grad_norm": 0.8653061389923096, + "learning_rate": 1.4966855392932558e-05, + "loss": 0.9138, + "step": 208 + }, + { + "epoch": 0.09464240228681403, + "grad_norm": 0.7377287745475769, + "learning_rate": 1.4966520003420191e-05, + "loss": 0.9411, + "step": 209 + }, + { + "epoch": 0.09509523674751649, + "grad_norm": 0.8704388737678528, + "learning_rate": 1.496618292933578e-05, + "loss": 0.9223, + "step": 210 + }, + { + "epoch": 0.09554807120821894, + "grad_norm": 0.8722823262214661, + "learning_rate": 1.4965844170755376e-05, + "loss": 0.9261, + "step": 211 + }, + { + "epoch": 0.0960009056689214, + "grad_norm": 0.6991630792617798, + "learning_rate": 1.4965503727755408e-05, + "loss": 0.9053, + "step": 212 + }, + { + "epoch": 0.09645374012962386, + "grad_norm": 0.762243926525116, + "learning_rate": 1.4965161600412686e-05, + "loss": 0.8989, + "step": 213 + }, + { + "epoch": 0.09690657459032632, + "grad_norm": 0.6228227019309998, + "learning_rate": 1.4964817788804398e-05, + "loss": 0.897, + "step": 214 + }, + { + "epoch": 0.09735940905102879, + "grad_norm": 0.871157705783844, + "learning_rate": 1.4964472293008114e-05, + "loss": 0.9551, + "step": 215 + }, + { + "epoch": 0.09781224351173125, + "grad_norm": 0.7100203037261963, + "learning_rate": 1.4964125113101787e-05, + "loss": 0.9334, + "step": 216 + }, + { + "epoch": 0.0982650779724337, + "grad_norm": 0.6398739218711853, + "learning_rate": 1.4963776249163742e-05, + "loss": 0.9256, + "step": 217 + }, + { + "epoch": 0.09871791243313616, + "grad_norm": 0.7361502051353455, + "learning_rate": 1.496342570127269e-05, + "loss": 0.9207, + "step": 218 + }, + { + "epoch": 0.09917074689383862, + "grad_norm": 0.6598243713378906, + "learning_rate": 1.4963073469507722e-05, + "loss": 0.9029, + "step": 219 + }, + { + "epoch": 0.09962358135454108, + "grad_norm": 0.7405319213867188, + "learning_rate": 1.4962719553948306e-05, + "loss": 0.9661, + "step": 220 + }, + { + "epoch": 0.10007641581524354, + "grad_norm": 0.7439802289009094, + "learning_rate": 1.4962363954674294e-05, + "loss": 0.9133, + "step": 221 + }, + { + "epoch": 0.100529250275946, + "grad_norm": 0.7841500639915466, + "learning_rate": 1.4962006671765911e-05, + "loss": 0.8958, + "step": 222 + }, + { + "epoch": 0.10098208473664846, + "grad_norm": 0.8495052456855774, + "learning_rate": 1.4961647705303765e-05, + "loss": 0.9755, + "step": 223 + }, + { + "epoch": 0.10143491919735091, + "grad_norm": 0.7986276149749756, + "learning_rate": 1.4961287055368853e-05, + "loss": 0.994, + "step": 224 + }, + { + "epoch": 0.10188775365805337, + "grad_norm": 0.8611103892326355, + "learning_rate": 1.4960924722042536e-05, + "loss": 0.8454, + "step": 225 + }, + { + "epoch": 0.10234058811875583, + "grad_norm": 1.167007565498352, + "learning_rate": 1.4960560705406563e-05, + "loss": 0.9154, + "step": 226 + }, + { + "epoch": 0.1027934225794583, + "grad_norm": 0.7711225748062134, + "learning_rate": 1.4960195005543066e-05, + "loss": 0.8714, + "step": 227 + }, + { + "epoch": 0.10324625704016076, + "grad_norm": 0.7879083752632141, + "learning_rate": 1.4959827622534551e-05, + "loss": 0.8909, + "step": 228 + }, + { + "epoch": 0.10369909150086322, + "grad_norm": 0.964492678642273, + "learning_rate": 1.4959458556463905e-05, + "loss": 0.8881, + "step": 229 + }, + { + "epoch": 0.10415192596156568, + "grad_norm": 1.0750067234039307, + "learning_rate": 1.4959087807414397e-05, + "loss": 0.8857, + "step": 230 + }, + { + "epoch": 0.10460476042226813, + "grad_norm": 0.7446492910385132, + "learning_rate": 1.4958715375469674e-05, + "loss": 0.9002, + "step": 231 + }, + { + "epoch": 0.10505759488297059, + "grad_norm": 1.0399891138076782, + "learning_rate": 1.4958341260713762e-05, + "loss": 0.9695, + "step": 232 + }, + { + "epoch": 0.10551042934367305, + "grad_norm": 0.7288771867752075, + "learning_rate": 1.495796546323107e-05, + "loss": 0.9033, + "step": 233 + }, + { + "epoch": 0.10596326380437551, + "grad_norm": 1.0252315998077393, + "learning_rate": 1.495758798310638e-05, + "loss": 0.8462, + "step": 234 + }, + { + "epoch": 0.10641609826507797, + "grad_norm": 0.7485429644584656, + "learning_rate": 1.4957208820424859e-05, + "loss": 0.8822, + "step": 235 + }, + { + "epoch": 0.10686893272578044, + "grad_norm": 0.9481981992721558, + "learning_rate": 1.4956827975272054e-05, + "loss": 0.8892, + "step": 236 + }, + { + "epoch": 0.1073217671864829, + "grad_norm": 0.8089492917060852, + "learning_rate": 1.495644544773389e-05, + "loss": 0.8245, + "step": 237 + }, + { + "epoch": 0.10777460164718534, + "grad_norm": 0.9913555383682251, + "learning_rate": 1.4956061237896671e-05, + "loss": 0.8442, + "step": 238 + }, + { + "epoch": 0.1082274361078878, + "grad_norm": 0.8869263529777527, + "learning_rate": 1.4955675345847084e-05, + "loss": 0.9323, + "step": 239 + }, + { + "epoch": 0.10868027056859027, + "grad_norm": 0.8241413235664368, + "learning_rate": 1.4955287771672187e-05, + "loss": 0.8985, + "step": 240 + }, + { + "epoch": 0.10913310502929273, + "grad_norm": 0.7058445811271667, + "learning_rate": 1.4954898515459428e-05, + "loss": 0.817, + "step": 241 + }, + { + "epoch": 0.10958593948999519, + "grad_norm": 0.7939592599868774, + "learning_rate": 1.4954507577296628e-05, + "loss": 0.7496, + "step": 242 + }, + { + "epoch": 0.11003877395069765, + "grad_norm": 0.8440166115760803, + "learning_rate": 1.4954114957271988e-05, + "loss": 0.9368, + "step": 243 + }, + { + "epoch": 0.11049160841140011, + "grad_norm": 0.8787747621536255, + "learning_rate": 1.4953720655474092e-05, + "loss": 0.8562, + "step": 244 + }, + { + "epoch": 0.11094444287210256, + "grad_norm": 0.7784985899925232, + "learning_rate": 1.4953324671991897e-05, + "loss": 0.8718, + "step": 245 + }, + { + "epoch": 0.11139727733280502, + "grad_norm": 0.9408149719238281, + "learning_rate": 1.495292700691475e-05, + "loss": 0.9013, + "step": 246 + }, + { + "epoch": 0.11185011179350748, + "grad_norm": 0.8225075602531433, + "learning_rate": 1.4952527660332368e-05, + "loss": 0.8769, + "step": 247 + }, + { + "epoch": 0.11230294625420995, + "grad_norm": 0.7701689004898071, + "learning_rate": 1.4952126632334847e-05, + "loss": 0.8824, + "step": 248 + }, + { + "epoch": 0.11275578071491241, + "grad_norm": 0.7905251979827881, + "learning_rate": 1.4951723923012667e-05, + "loss": 0.8753, + "step": 249 + }, + { + "epoch": 0.11320861517561487, + "grad_norm": 0.7921510338783264, + "learning_rate": 1.495131953245669e-05, + "loss": 0.8127, + "step": 250 + }, + { + "epoch": 0.11366144963631733, + "grad_norm": 0.8098307251930237, + "learning_rate": 1.495091346075815e-05, + "loss": 0.9178, + "step": 251 + }, + { + "epoch": 0.11411428409701978, + "grad_norm": 0.789347767829895, + "learning_rate": 1.4950505708008665e-05, + "loss": 0.8268, + "step": 252 + }, + { + "epoch": 0.11456711855772224, + "grad_norm": 0.8333251476287842, + "learning_rate": 1.495009627430023e-05, + "loss": 0.8422, + "step": 253 + }, + { + "epoch": 0.1150199530184247, + "grad_norm": 0.9456163048744202, + "learning_rate": 1.4949685159725219e-05, + "loss": 0.8445, + "step": 254 + }, + { + "epoch": 0.11547278747912716, + "grad_norm": 0.7319315075874329, + "learning_rate": 1.4949272364376389e-05, + "loss": 0.8195, + "step": 255 + }, + { + "epoch": 0.11592562193982962, + "grad_norm": 0.9746232628822327, + "learning_rate": 1.494885788834687e-05, + "loss": 0.8909, + "step": 256 + }, + { + "epoch": 0.11637845640053208, + "grad_norm": 0.9130688905715942, + "learning_rate": 1.4948441731730177e-05, + "loss": 0.8685, + "step": 257 + }, + { + "epoch": 0.11683129086123455, + "grad_norm": 0.7087278962135315, + "learning_rate": 1.4948023894620201e-05, + "loss": 0.9413, + "step": 258 + }, + { + "epoch": 0.117284125321937, + "grad_norm": 0.9176363348960876, + "learning_rate": 1.4947604377111216e-05, + "loss": 0.8186, + "step": 259 + }, + { + "epoch": 0.11773695978263946, + "grad_norm": 0.8463951945304871, + "learning_rate": 1.494718317929787e-05, + "loss": 0.8986, + "step": 260 + }, + { + "epoch": 0.11818979424334192, + "grad_norm": 0.8302780389785767, + "learning_rate": 1.494676030127519e-05, + "loss": 0.8204, + "step": 261 + }, + { + "epoch": 0.11864262870404438, + "grad_norm": 0.7446224093437195, + "learning_rate": 1.4946335743138587e-05, + "loss": 0.8383, + "step": 262 + }, + { + "epoch": 0.11909546316474684, + "grad_norm": 0.7819026112556458, + "learning_rate": 1.4945909504983848e-05, + "loss": 0.8248, + "step": 263 + }, + { + "epoch": 0.1195482976254493, + "grad_norm": 0.7390825152397156, + "learning_rate": 1.4945481586907141e-05, + "loss": 0.8604, + "step": 264 + }, + { + "epoch": 0.12000113208615176, + "grad_norm": 0.7803400158882141, + "learning_rate": 1.494505198900501e-05, + "loss": 0.8302, + "step": 265 + }, + { + "epoch": 0.12045396654685421, + "grad_norm": 0.9145985841751099, + "learning_rate": 1.4944620711374377e-05, + "loss": 0.8732, + "step": 266 + }, + { + "epoch": 0.12090680100755667, + "grad_norm": 0.7339340448379517, + "learning_rate": 1.494418775411255e-05, + "loss": 0.8042, + "step": 267 + }, + { + "epoch": 0.12135963546825913, + "grad_norm": 0.7937312722206116, + "learning_rate": 1.4943753117317208e-05, + "loss": 0.8347, + "step": 268 + }, + { + "epoch": 0.1218124699289616, + "grad_norm": 0.7886615991592407, + "learning_rate": 1.4943316801086414e-05, + "loss": 0.8511, + "step": 269 + }, + { + "epoch": 0.12226530438966406, + "grad_norm": 0.8435689210891724, + "learning_rate": 1.4942878805518608e-05, + "loss": 0.8187, + "step": 270 + }, + { + "epoch": 0.12271813885036652, + "grad_norm": 0.6933366060256958, + "learning_rate": 1.494243913071261e-05, + "loss": 0.762, + "step": 271 + }, + { + "epoch": 0.12317097331106898, + "grad_norm": 0.743684709072113, + "learning_rate": 1.4941997776767616e-05, + "loss": 0.8686, + "step": 272 + }, + { + "epoch": 0.12362380777177143, + "grad_norm": 0.772577702999115, + "learning_rate": 1.4941554743783204e-05, + "loss": 0.8271, + "step": 273 + }, + { + "epoch": 0.12407664223247389, + "grad_norm": 0.7139607667922974, + "learning_rate": 1.4941110031859327e-05, + "loss": 0.7724, + "step": 274 + }, + { + "epoch": 0.12452947669317635, + "grad_norm": 0.6617469191551208, + "learning_rate": 1.4940663641096325e-05, + "loss": 0.8092, + "step": 275 + }, + { + "epoch": 0.12498231115387881, + "grad_norm": 0.841748833656311, + "learning_rate": 1.4940215571594908e-05, + "loss": 0.9361, + "step": 276 + }, + { + "epoch": 0.12498231115387881, + "eval_loss": 0.8086027503013611, + "eval_runtime": 19.4718, + "eval_samples_per_second": 5.855, + "eval_steps_per_second": 0.77, + "step": 276 + }, + { + "epoch": 0.12543514561458127, + "grad_norm": 0.6689454913139343, + "learning_rate": 1.4939765823456168e-05, + "loss": 0.8363, + "step": 277 + }, + { + "epoch": 0.12588798007528373, + "grad_norm": 0.7700071334838867, + "learning_rate": 1.4939314396781575e-05, + "loss": 0.8393, + "step": 278 + }, + { + "epoch": 0.1263408145359862, + "grad_norm": 0.7219974994659424, + "learning_rate": 1.4938861291672983e-05, + "loss": 0.8039, + "step": 279 + }, + { + "epoch": 0.12679364899668866, + "grad_norm": 0.6459200382232666, + "learning_rate": 1.4938406508232613e-05, + "loss": 0.819, + "step": 280 + }, + { + "epoch": 0.12724648345739112, + "grad_norm": 0.7447069883346558, + "learning_rate": 1.4937950046563075e-05, + "loss": 0.8567, + "step": 281 + }, + { + "epoch": 0.12769931791809358, + "grad_norm": 0.8317793011665344, + "learning_rate": 1.4937491906767357e-05, + "loss": 0.8555, + "step": 282 + }, + { + "epoch": 0.128152152378796, + "grad_norm": 0.6332393884658813, + "learning_rate": 1.4937032088948819e-05, + "loss": 0.851, + "step": 283 + }, + { + "epoch": 0.12860498683949848, + "grad_norm": 0.6920958757400513, + "learning_rate": 1.4936570593211203e-05, + "loss": 0.7737, + "step": 284 + }, + { + "epoch": 0.12905782130020094, + "grad_norm": 0.7432802319526672, + "learning_rate": 1.4936107419658635e-05, + "loss": 0.8177, + "step": 285 + }, + { + "epoch": 0.1295106557609034, + "grad_norm": 0.7308900952339172, + "learning_rate": 1.4935642568395613e-05, + "loss": 0.751, + "step": 286 + }, + { + "epoch": 0.12996349022160586, + "grad_norm": 0.9042837619781494, + "learning_rate": 1.4935176039527014e-05, + "loss": 0.8192, + "step": 287 + }, + { + "epoch": 0.13041632468230832, + "grad_norm": 0.7064964175224304, + "learning_rate": 1.4934707833158094e-05, + "loss": 0.7918, + "step": 288 + }, + { + "epoch": 0.13086915914301078, + "grad_norm": 1.04722261428833, + "learning_rate": 1.4934237949394492e-05, + "loss": 0.8003, + "step": 289 + }, + { + "epoch": 0.13132199360371324, + "grad_norm": 0.6335585117340088, + "learning_rate": 1.4933766388342215e-05, + "loss": 0.7773, + "step": 290 + }, + { + "epoch": 0.1317748280644157, + "grad_norm": 0.8007393479347229, + "learning_rate": 1.4933293150107663e-05, + "loss": 0.8016, + "step": 291 + }, + { + "epoch": 0.13222766252511817, + "grad_norm": 0.7477773427963257, + "learning_rate": 1.49328182347976e-05, + "loss": 0.7772, + "step": 292 + }, + { + "epoch": 0.13268049698582063, + "grad_norm": 0.8117051124572754, + "learning_rate": 1.4932341642519178e-05, + "loss": 0.8021, + "step": 293 + }, + { + "epoch": 0.1331333314465231, + "grad_norm": 1.086212158203125, + "learning_rate": 1.4931863373379923e-05, + "loss": 0.8271, + "step": 294 + }, + { + "epoch": 0.13358616590722555, + "grad_norm": 0.6811758279800415, + "learning_rate": 1.4931383427487741e-05, + "loss": 0.7714, + "step": 295 + }, + { + "epoch": 0.134039000367928, + "grad_norm": 0.810197114944458, + "learning_rate": 1.4930901804950918e-05, + "loss": 0.7556, + "step": 296 + }, + { + "epoch": 0.13449183482863045, + "grad_norm": 0.656467080116272, + "learning_rate": 1.4930418505878113e-05, + "loss": 0.7308, + "step": 297 + }, + { + "epoch": 0.1349446692893329, + "grad_norm": 0.7282403111457825, + "learning_rate": 1.4929933530378367e-05, + "loss": 0.7529, + "step": 298 + }, + { + "epoch": 0.13539750375003537, + "grad_norm": 0.7933338284492493, + "learning_rate": 1.4929446878561098e-05, + "loss": 0.8193, + "step": 299 + }, + { + "epoch": 0.13585033821073783, + "grad_norm": 0.7075956463813782, + "learning_rate": 1.4928958550536107e-05, + "loss": 0.7736, + "step": 300 + }, + { + "epoch": 0.1363031726714403, + "grad_norm": 0.6675400137901306, + "learning_rate": 1.4928468546413563e-05, + "loss": 0.7477, + "step": 301 + }, + { + "epoch": 0.13675600713214275, + "grad_norm": 0.8855708241462708, + "learning_rate": 1.4927976866304024e-05, + "loss": 0.7988, + "step": 302 + }, + { + "epoch": 0.13720884159284522, + "grad_norm": 0.7367263436317444, + "learning_rate": 1.4927483510318417e-05, + "loss": 0.8018, + "step": 303 + }, + { + "epoch": 0.13766167605354768, + "grad_norm": 0.7198050022125244, + "learning_rate": 1.4926988478568055e-05, + "loss": 0.8311, + "step": 304 + }, + { + "epoch": 0.13811451051425014, + "grad_norm": 0.6773768067359924, + "learning_rate": 1.4926491771164623e-05, + "loss": 0.8388, + "step": 305 + }, + { + "epoch": 0.1385673449749526, + "grad_norm": 0.7650995850563049, + "learning_rate": 1.4925993388220189e-05, + "loss": 0.7779, + "step": 306 + }, + { + "epoch": 0.13902017943565506, + "grad_norm": 0.7893260717391968, + "learning_rate": 1.4925493329847195e-05, + "loss": 0.7559, + "step": 307 + }, + { + "epoch": 0.13947301389635752, + "grad_norm": 0.7186692357063293, + "learning_rate": 1.4924991596158462e-05, + "loss": 0.8107, + "step": 308 + }, + { + "epoch": 0.13992584835705998, + "grad_norm": 0.7286010980606079, + "learning_rate": 1.492448818726719e-05, + "loss": 0.6999, + "step": 309 + }, + { + "epoch": 0.14037868281776242, + "grad_norm": 0.7160384058952332, + "learning_rate": 1.4923983103286957e-05, + "loss": 0.7403, + "step": 310 + }, + { + "epoch": 0.14083151727846488, + "grad_norm": 0.858772337436676, + "learning_rate": 1.492347634433172e-05, + "loss": 0.7424, + "step": 311 + }, + { + "epoch": 0.14128435173916734, + "grad_norm": 0.7329276204109192, + "learning_rate": 1.4922967910515809e-05, + "loss": 0.7232, + "step": 312 + }, + { + "epoch": 0.1417371861998698, + "grad_norm": 0.8894422054290771, + "learning_rate": 1.4922457801953934e-05, + "loss": 0.7737, + "step": 313 + }, + { + "epoch": 0.14219002066057226, + "grad_norm": 0.7823007106781006, + "learning_rate": 1.492194601876119e-05, + "loss": 0.7835, + "step": 314 + }, + { + "epoch": 0.14264285512127473, + "grad_norm": 0.8172221779823303, + "learning_rate": 1.492143256105304e-05, + "loss": 0.7721, + "step": 315 + }, + { + "epoch": 0.1430956895819772, + "grad_norm": 0.9521918892860413, + "learning_rate": 1.492091742894533e-05, + "loss": 0.7202, + "step": 316 + }, + { + "epoch": 0.14354852404267965, + "grad_norm": 0.6912105679512024, + "learning_rate": 1.492040062255428e-05, + "loss": 0.7555, + "step": 317 + }, + { + "epoch": 0.1440013585033821, + "grad_norm": 0.713433563709259, + "learning_rate": 1.4919882141996493e-05, + "loss": 0.7422, + "step": 318 + }, + { + "epoch": 0.14445419296408457, + "grad_norm": 0.6809953451156616, + "learning_rate": 1.4919361987388942e-05, + "loss": 0.7298, + "step": 319 + }, + { + "epoch": 0.14490702742478703, + "grad_norm": 0.8265045285224915, + "learning_rate": 1.491884015884899e-05, + "loss": 0.8056, + "step": 320 + }, + { + "epoch": 0.1453598618854895, + "grad_norm": 0.7027236819267273, + "learning_rate": 1.491831665649437e-05, + "loss": 0.7647, + "step": 321 + }, + { + "epoch": 0.14581269634619196, + "grad_norm": 0.6985670328140259, + "learning_rate": 1.4917791480443183e-05, + "loss": 0.7859, + "step": 322 + }, + { + "epoch": 0.14626553080689442, + "grad_norm": 0.6507831811904907, + "learning_rate": 1.4917264630813925e-05, + "loss": 0.7143, + "step": 323 + }, + { + "epoch": 0.14671836526759685, + "grad_norm": 0.8120051026344299, + "learning_rate": 1.4916736107725463e-05, + "loss": 0.7825, + "step": 324 + }, + { + "epoch": 0.1471711997282993, + "grad_norm": 0.7564501166343689, + "learning_rate": 1.4916205911297039e-05, + "loss": 0.8156, + "step": 325 + }, + { + "epoch": 0.14762403418900177, + "grad_norm": 0.7937654852867126, + "learning_rate": 1.4915674041648274e-05, + "loss": 0.7779, + "step": 326 + }, + { + "epoch": 0.14807686864970424, + "grad_norm": 0.6390685439109802, + "learning_rate": 1.4915140498899165e-05, + "loss": 0.7297, + "step": 327 + }, + { + "epoch": 0.1485297031104067, + "grad_norm": 0.8413376212120056, + "learning_rate": 1.4914605283170092e-05, + "loss": 0.7459, + "step": 328 + }, + { + "epoch": 0.14898253757110916, + "grad_norm": 0.8402100801467896, + "learning_rate": 1.4914068394581805e-05, + "loss": 0.7285, + "step": 329 + }, + { + "epoch": 0.14943537203181162, + "grad_norm": 0.8007932901382446, + "learning_rate": 1.4913529833255436e-05, + "loss": 0.8058, + "step": 330 + }, + { + "epoch": 0.14988820649251408, + "grad_norm": 0.913092851638794, + "learning_rate": 1.4912989599312496e-05, + "loss": 0.7506, + "step": 331 + }, + { + "epoch": 0.15034104095321654, + "grad_norm": 0.9280517101287842, + "learning_rate": 1.4912447692874865e-05, + "loss": 0.8402, + "step": 332 + }, + { + "epoch": 0.150793875413919, + "grad_norm": 0.8286890387535095, + "learning_rate": 1.4911904114064816e-05, + "loss": 0.6831, + "step": 333 + }, + { + "epoch": 0.15124670987462147, + "grad_norm": 1.3162356615066528, + "learning_rate": 1.491135886300498e-05, + "loss": 0.7654, + "step": 334 + }, + { + "epoch": 0.15169954433532393, + "grad_norm": 0.6534262895584106, + "learning_rate": 1.4910811939818376e-05, + "loss": 0.7818, + "step": 335 + }, + { + "epoch": 0.1521523787960264, + "grad_norm": 1.4352598190307617, + "learning_rate": 1.4910263344628405e-05, + "loss": 0.7497, + "step": 336 + }, + { + "epoch": 0.15260521325672885, + "grad_norm": 0.7645979523658752, + "learning_rate": 1.4909713077558834e-05, + "loss": 0.7298, + "step": 337 + }, + { + "epoch": 0.15305804771743128, + "grad_norm": 1.1644870042800903, + "learning_rate": 1.4909161138733815e-05, + "loss": 0.7725, + "step": 338 + }, + { + "epoch": 0.15351088217813375, + "grad_norm": 0.7654008865356445, + "learning_rate": 1.4908607528277873e-05, + "loss": 0.6841, + "step": 339 + }, + { + "epoch": 0.1539637166388362, + "grad_norm": 0.9176437258720398, + "learning_rate": 1.4908052246315915e-05, + "loss": 0.7965, + "step": 340 + }, + { + "epoch": 0.15441655109953867, + "grad_norm": 0.8297829031944275, + "learning_rate": 1.4907495292973215e-05, + "loss": 0.7624, + "step": 341 + }, + { + "epoch": 0.15486938556024113, + "grad_norm": 0.7733472585678101, + "learning_rate": 1.490693666837544e-05, + "loss": 0.7679, + "step": 342 + }, + { + "epoch": 0.1553222200209436, + "grad_norm": 0.8300292491912842, + "learning_rate": 1.4906376372648618e-05, + "loss": 0.7582, + "step": 343 + }, + { + "epoch": 0.15577505448164605, + "grad_norm": 0.7907745838165283, + "learning_rate": 1.4905814405919163e-05, + "loss": 0.6966, + "step": 344 + }, + { + "epoch": 0.15622788894234851, + "grad_norm": 1.0779638290405273, + "learning_rate": 1.4905250768313865e-05, + "loss": 0.698, + "step": 345 + }, + { + "epoch": 0.15668072340305098, + "grad_norm": 0.7394607663154602, + "learning_rate": 1.490468545995989e-05, + "loss": 0.7973, + "step": 346 + }, + { + "epoch": 0.15713355786375344, + "grad_norm": 0.9171550869941711, + "learning_rate": 1.4904118480984782e-05, + "loss": 0.7185, + "step": 347 + }, + { + "epoch": 0.1575863923244559, + "grad_norm": 0.7766843438148499, + "learning_rate": 1.4903549831516459e-05, + "loss": 0.692, + "step": 348 + }, + { + "epoch": 0.15803922678515836, + "grad_norm": 0.8801367282867432, + "learning_rate": 1.490297951168322e-05, + "loss": 0.7797, + "step": 349 + }, + { + "epoch": 0.15849206124586082, + "grad_norm": 0.8276110887527466, + "learning_rate": 1.4902407521613735e-05, + "loss": 0.7405, + "step": 350 + }, + { + "epoch": 0.15894489570656328, + "grad_norm": 0.8306746482849121, + "learning_rate": 1.4901833861437058e-05, + "loss": 0.7268, + "step": 351 + }, + { + "epoch": 0.15939773016726572, + "grad_norm": 0.9609989523887634, + "learning_rate": 1.4901258531282616e-05, + "loss": 0.7233, + "step": 352 + }, + { + "epoch": 0.15985056462796818, + "grad_norm": 0.8433880805969238, + "learning_rate": 1.4900681531280212e-05, + "loss": 0.7076, + "step": 353 + }, + { + "epoch": 0.16030339908867064, + "grad_norm": 0.7875558137893677, + "learning_rate": 1.4900102861560027e-05, + "loss": 0.6939, + "step": 354 + }, + { + "epoch": 0.1607562335493731, + "grad_norm": 0.7184025645256042, + "learning_rate": 1.4899522522252618e-05, + "loss": 0.7195, + "step": 355 + }, + { + "epoch": 0.16120906801007556, + "grad_norm": 0.7774180173873901, + "learning_rate": 1.4898940513488921e-05, + "loss": 0.713, + "step": 356 + }, + { + "epoch": 0.16166190247077802, + "grad_norm": 0.8321492075920105, + "learning_rate": 1.4898356835400247e-05, + "loss": 0.7299, + "step": 357 + }, + { + "epoch": 0.16211473693148049, + "grad_norm": 0.8344357013702393, + "learning_rate": 1.4897771488118281e-05, + "loss": 0.723, + "step": 358 + }, + { + "epoch": 0.16256757139218295, + "grad_norm": 0.8810961842536926, + "learning_rate": 1.489718447177509e-05, + "loss": 0.723, + "step": 359 + }, + { + "epoch": 0.1630204058528854, + "grad_norm": 0.6815357804298401, + "learning_rate": 1.4896595786503114e-05, + "loss": 0.7178, + "step": 360 + }, + { + "epoch": 0.16347324031358787, + "grad_norm": 0.7081362009048462, + "learning_rate": 1.4896005432435168e-05, + "loss": 0.6913, + "step": 361 + }, + { + "epoch": 0.16392607477429033, + "grad_norm": 0.712780237197876, + "learning_rate": 1.4895413409704448e-05, + "loss": 0.7063, + "step": 362 + }, + { + "epoch": 0.1643789092349928, + "grad_norm": 0.7857349514961243, + "learning_rate": 1.4894819718444525e-05, + "loss": 0.6999, + "step": 363 + }, + { + "epoch": 0.16483174369569525, + "grad_norm": 0.7282727360725403, + "learning_rate": 1.4894224358789344e-05, + "loss": 0.7216, + "step": 364 + }, + { + "epoch": 0.16528457815639772, + "grad_norm": 0.7465353012084961, + "learning_rate": 1.4893627330873227e-05, + "loss": 0.7342, + "step": 365 + }, + { + "epoch": 0.16573741261710015, + "grad_norm": 0.8547371029853821, + "learning_rate": 1.4893028634830877e-05, + "loss": 0.6788, + "step": 366 + }, + { + "epoch": 0.1661902470778026, + "grad_norm": 0.7159935832023621, + "learning_rate": 1.4892428270797368e-05, + "loss": 0.6689, + "step": 367 + }, + { + "epoch": 0.16664308153850507, + "grad_norm": 0.8785684108734131, + "learning_rate": 1.489182623890815e-05, + "loss": 0.7402, + "step": 368 + }, + { + "epoch": 0.16709591599920753, + "grad_norm": 0.8778204917907715, + "learning_rate": 1.4891222539299058e-05, + "loss": 0.6354, + "step": 369 + }, + { + "epoch": 0.16754875045991, + "grad_norm": 0.8214927315711975, + "learning_rate": 1.4890617172106286e-05, + "loss": 0.7109, + "step": 370 + }, + { + "epoch": 0.16800158492061246, + "grad_norm": 0.7724870443344116, + "learning_rate": 1.4890010137466428e-05, + "loss": 0.7082, + "step": 371 + }, + { + "epoch": 0.16845441938131492, + "grad_norm": 0.7708100080490112, + "learning_rate": 1.4889401435516431e-05, + "loss": 0.6824, + "step": 372 + }, + { + "epoch": 0.16890725384201738, + "grad_norm": 0.6783519387245178, + "learning_rate": 1.4888791066393632e-05, + "loss": 0.733, + "step": 373 + }, + { + "epoch": 0.16936008830271984, + "grad_norm": 0.7518247961997986, + "learning_rate": 1.488817903023574e-05, + "loss": 0.7549, + "step": 374 + }, + { + "epoch": 0.1698129227634223, + "grad_norm": 0.7920773029327393, + "learning_rate": 1.4887565327180842e-05, + "loss": 0.7505, + "step": 375 + }, + { + "epoch": 0.17026575722412476, + "grad_norm": 0.7498000860214233, + "learning_rate": 1.4886949957367398e-05, + "loss": 0.738, + "step": 376 + }, + { + "epoch": 0.17071859168482723, + "grad_norm": 0.682931661605835, + "learning_rate": 1.4886332920934247e-05, + "loss": 0.6858, + "step": 377 + }, + { + "epoch": 0.1711714261455297, + "grad_norm": 0.705777108669281, + "learning_rate": 1.4885714218020604e-05, + "loss": 0.7319, + "step": 378 + }, + { + "epoch": 0.17162426060623212, + "grad_norm": 0.7821267247200012, + "learning_rate": 1.4885093848766055e-05, + "loss": 0.6815, + "step": 379 + }, + { + "epoch": 0.17207709506693458, + "grad_norm": 0.7366465330123901, + "learning_rate": 1.4884471813310567e-05, + "loss": 0.7158, + "step": 380 + }, + { + "epoch": 0.17252992952763704, + "grad_norm": 0.7276479601860046, + "learning_rate": 1.4883848111794484e-05, + "loss": 0.6813, + "step": 381 + }, + { + "epoch": 0.1729827639883395, + "grad_norm": 0.6883667707443237, + "learning_rate": 1.488322274435852e-05, + "loss": 0.6472, + "step": 382 + }, + { + "epoch": 0.17343559844904197, + "grad_norm": 0.6681039333343506, + "learning_rate": 1.4882595711143772e-05, + "loss": 0.6808, + "step": 383 + }, + { + "epoch": 0.17388843290974443, + "grad_norm": 0.9255726933479309, + "learning_rate": 1.4881967012291707e-05, + "loss": 0.6653, + "step": 384 + }, + { + "epoch": 0.1743412673704469, + "grad_norm": 0.7656347155570984, + "learning_rate": 1.488133664794417e-05, + "loss": 0.6502, + "step": 385 + }, + { + "epoch": 0.17479410183114935, + "grad_norm": 0.6879505515098572, + "learning_rate": 1.4880704618243382e-05, + "loss": 0.6785, + "step": 386 + }, + { + "epoch": 0.1752469362918518, + "grad_norm": 0.7322476506233215, + "learning_rate": 1.4880070923331942e-05, + "loss": 0.6697, + "step": 387 + }, + { + "epoch": 0.17569977075255427, + "grad_norm": 0.7081751823425293, + "learning_rate": 1.4879435563352815e-05, + "loss": 0.6706, + "step": 388 + }, + { + "epoch": 0.17615260521325674, + "grad_norm": 0.9385679364204407, + "learning_rate": 1.4878798538449358e-05, + "loss": 0.6689, + "step": 389 + }, + { + "epoch": 0.1766054396739592, + "grad_norm": 1.0547739267349243, + "learning_rate": 1.4878159848765288e-05, + "loss": 0.7344, + "step": 390 + }, + { + "epoch": 0.17705827413466166, + "grad_norm": 0.772883951663971, + "learning_rate": 1.4877519494444707e-05, + "loss": 0.7109, + "step": 391 + }, + { + "epoch": 0.17751110859536412, + "grad_norm": 0.9401255249977112, + "learning_rate": 1.4876877475632089e-05, + "loss": 0.6846, + "step": 392 + }, + { + "epoch": 0.17796394305606655, + "grad_norm": 0.6944053769111633, + "learning_rate": 1.4876233792472284e-05, + "loss": 0.6735, + "step": 393 + }, + { + "epoch": 0.17841677751676902, + "grad_norm": 1.1043004989624023, + "learning_rate": 1.4875588445110517e-05, + "loss": 0.6569, + "step": 394 + }, + { + "epoch": 0.17886961197747148, + "grad_norm": 0.6451114416122437, + "learning_rate": 1.4874941433692393e-05, + "loss": 0.6748, + "step": 395 + }, + { + "epoch": 0.17932244643817394, + "grad_norm": 0.8441924452781677, + "learning_rate": 1.4874292758363882e-05, + "loss": 0.729, + "step": 396 + }, + { + "epoch": 0.1797752808988764, + "grad_norm": 0.7901274561882019, + "learning_rate": 1.4873642419271342e-05, + "loss": 0.7124, + "step": 397 + }, + { + "epoch": 0.18022811535957886, + "grad_norm": 1.0363836288452148, + "learning_rate": 1.4872990416561499e-05, + "loss": 0.6816, + "step": 398 + }, + { + "epoch": 0.18068094982028132, + "grad_norm": 0.7908300161361694, + "learning_rate": 1.4872336750381452e-05, + "loss": 0.6721, + "step": 399 + }, + { + "epoch": 0.18113378428098378, + "grad_norm": 0.8819090127944946, + "learning_rate": 1.4871681420878683e-05, + "loss": 0.6521, + "step": 400 + }, + { + "epoch": 0.18158661874168625, + "grad_norm": 1.1245052814483643, + "learning_rate": 1.4871024428201043e-05, + "loss": 0.7597, + "step": 401 + }, + { + "epoch": 0.1820394532023887, + "grad_norm": 0.962951123714447, + "learning_rate": 1.4870365772496764e-05, + "loss": 0.7093, + "step": 402 + }, + { + "epoch": 0.18249228766309117, + "grad_norm": 1.236630916595459, + "learning_rate": 1.4869705453914446e-05, + "loss": 0.7013, + "step": 403 + }, + { + "epoch": 0.18294512212379363, + "grad_norm": 0.8700730204582214, + "learning_rate": 1.486904347260307e-05, + "loss": 0.6539, + "step": 404 + }, + { + "epoch": 0.1833979565844961, + "grad_norm": 1.0793607234954834, + "learning_rate": 1.4868379828711991e-05, + "loss": 0.6784, + "step": 405 + }, + { + "epoch": 0.18385079104519855, + "grad_norm": 0.9725819826126099, + "learning_rate": 1.4867714522390934e-05, + "loss": 0.7212, + "step": 406 + }, + { + "epoch": 0.184303625505901, + "grad_norm": 0.9909386038780212, + "learning_rate": 1.4867047553790007e-05, + "loss": 0.67, + "step": 407 + }, + { + "epoch": 0.18475645996660345, + "grad_norm": 0.993817925453186, + "learning_rate": 1.4866378923059687e-05, + "loss": 0.6186, + "step": 408 + }, + { + "epoch": 0.1852092944273059, + "grad_norm": 1.0599784851074219, + "learning_rate": 1.4865708630350834e-05, + "loss": 0.674, + "step": 409 + }, + { + "epoch": 0.18566212888800837, + "grad_norm": 1.0941828489303589, + "learning_rate": 1.4865036675814669e-05, + "loss": 0.6587, + "step": 410 + }, + { + "epoch": 0.18611496334871083, + "grad_norm": 3.841005802154541, + "learning_rate": 1.48643630596028e-05, + "loss": 0.729, + "step": 411 + }, + { + "epoch": 0.1865677978094133, + "grad_norm": 1.5443971157073975, + "learning_rate": 1.4863687781867209e-05, + "loss": 0.6768, + "step": 412 + }, + { + "epoch": 0.18702063227011576, + "grad_norm": 0.9371129870414734, + "learning_rate": 1.4863010842760246e-05, + "loss": 0.6169, + "step": 413 + }, + { + "epoch": 0.18747346673081822, + "grad_norm": 1.437848448753357, + "learning_rate": 1.4862332242434639e-05, + "loss": 0.7218, + "step": 414 + }, + { + "epoch": 0.18792630119152068, + "grad_norm": 1.4850507974624634, + "learning_rate": 1.4861651981043495e-05, + "loss": 0.6684, + "step": 415 + }, + { + "epoch": 0.18837913565222314, + "grad_norm": 0.7159014940261841, + "learning_rate": 1.486097005874029e-05, + "loss": 0.6782, + "step": 416 + }, + { + "epoch": 0.1888319701129256, + "grad_norm": 1.2282770872116089, + "learning_rate": 1.4860286475678877e-05, + "loss": 0.7224, + "step": 417 + }, + { + "epoch": 0.18928480457362806, + "grad_norm": 1.0234930515289307, + "learning_rate": 1.4859601232013488e-05, + "loss": 0.693, + "step": 418 + }, + { + "epoch": 0.18973763903433052, + "grad_norm": 0.8315566778182983, + "learning_rate": 1.485891432789872e-05, + "loss": 0.6348, + "step": 419 + }, + { + "epoch": 0.19019047349503299, + "grad_norm": 1.353355884552002, + "learning_rate": 1.4858225763489552e-05, + "loss": 0.7412, + "step": 420 + }, + { + "epoch": 0.19064330795573542, + "grad_norm": 0.8341365456581116, + "learning_rate": 1.4857535538941339e-05, + "loss": 0.6768, + "step": 421 + }, + { + "epoch": 0.19109614241643788, + "grad_norm": 0.8955153226852417, + "learning_rate": 1.4856843654409802e-05, + "loss": 0.65, + "step": 422 + }, + { + "epoch": 0.19154897687714034, + "grad_norm": 0.8571435809135437, + "learning_rate": 1.4856150110051044e-05, + "loss": 0.6437, + "step": 423 + }, + { + "epoch": 0.1920018113378428, + "grad_norm": 0.76616370677948, + "learning_rate": 1.485545490602154e-05, + "loss": 0.6599, + "step": 424 + }, + { + "epoch": 0.19245464579854527, + "grad_norm": 0.7934353947639465, + "learning_rate": 1.485475804247814e-05, + "loss": 0.6271, + "step": 425 + }, + { + "epoch": 0.19290748025924773, + "grad_norm": 0.8939810395240784, + "learning_rate": 1.4854059519578069e-05, + "loss": 0.659, + "step": 426 + }, + { + "epoch": 0.1933603147199502, + "grad_norm": 0.6890648007392883, + "learning_rate": 1.4853359337478923e-05, + "loss": 0.7456, + "step": 427 + }, + { + "epoch": 0.19381314918065265, + "grad_norm": 0.8249607682228088, + "learning_rate": 1.4852657496338678e-05, + "loss": 0.6701, + "step": 428 + }, + { + "epoch": 0.1942659836413551, + "grad_norm": 0.8182385563850403, + "learning_rate": 1.4851953996315678e-05, + "loss": 0.6081, + "step": 429 + }, + { + "epoch": 0.19471881810205757, + "grad_norm": 0.7631829977035522, + "learning_rate": 1.4851248837568646e-05, + "loss": 0.5855, + "step": 430 + }, + { + "epoch": 0.19517165256276003, + "grad_norm": 0.9246631860733032, + "learning_rate": 1.4850542020256677e-05, + "loss": 0.6469, + "step": 431 + }, + { + "epoch": 0.1956244870234625, + "grad_norm": 0.7482291460037231, + "learning_rate": 1.4849833544539242e-05, + "loss": 0.6801, + "step": 432 + }, + { + "epoch": 0.19607732148416496, + "grad_norm": 0.8039886951446533, + "learning_rate": 1.4849123410576183e-05, + "loss": 0.71, + "step": 433 + }, + { + "epoch": 0.1965301559448674, + "grad_norm": 0.7532901763916016, + "learning_rate": 1.484841161852772e-05, + "loss": 0.6619, + "step": 434 + }, + { + "epoch": 0.19698299040556985, + "grad_norm": 0.7386736273765564, + "learning_rate": 1.4847698168554447e-05, + "loss": 0.69, + "step": 435 + }, + { + "epoch": 0.1974358248662723, + "grad_norm": 0.6776562929153442, + "learning_rate": 1.4846983060817324e-05, + "loss": 0.6575, + "step": 436 + }, + { + "epoch": 0.19788865932697478, + "grad_norm": 0.7260106205940247, + "learning_rate": 1.4846266295477698e-05, + "loss": 0.6311, + "step": 437 + }, + { + "epoch": 0.19834149378767724, + "grad_norm": 0.7524548172950745, + "learning_rate": 1.4845547872697279e-05, + "loss": 0.6287, + "step": 438 + }, + { + "epoch": 0.1987943282483797, + "grad_norm": 0.8071796298027039, + "learning_rate": 1.4844827792638158e-05, + "loss": 0.6258, + "step": 439 + }, + { + "epoch": 0.19924716270908216, + "grad_norm": 0.8051711320877075, + "learning_rate": 1.4844106055462793e-05, + "loss": 0.6966, + "step": 440 + }, + { + "epoch": 0.19969999716978462, + "grad_norm": 0.8135205507278442, + "learning_rate": 1.4843382661334025e-05, + "loss": 0.6637, + "step": 441 + }, + { + "epoch": 0.20015283163048708, + "grad_norm": 0.8501531481742859, + "learning_rate": 1.4842657610415061e-05, + "loss": 0.6969, + "step": 442 + }, + { + "epoch": 0.20060566609118954, + "grad_norm": 0.7646431922912598, + "learning_rate": 1.4841930902869486e-05, + "loss": 0.6786, + "step": 443 + }, + { + "epoch": 0.201058500551892, + "grad_norm": 0.7486881613731384, + "learning_rate": 1.4841202538861255e-05, + "loss": 0.6009, + "step": 444 + }, + { + "epoch": 0.20151133501259447, + "grad_norm": 0.7672154307365417, + "learning_rate": 1.4840472518554702e-05, + "loss": 0.6538, + "step": 445 + }, + { + "epoch": 0.20196416947329693, + "grad_norm": 0.7490094900131226, + "learning_rate": 1.4839740842114529e-05, + "loss": 0.624, + "step": 446 + }, + { + "epoch": 0.2024170039339994, + "grad_norm": 0.6690830588340759, + "learning_rate": 1.4839007509705819e-05, + "loss": 0.6335, + "step": 447 + }, + { + "epoch": 0.20286983839470182, + "grad_norm": 0.7209163308143616, + "learning_rate": 1.483827252149402e-05, + "loss": 0.596, + "step": 448 + }, + { + "epoch": 0.20332267285540429, + "grad_norm": 0.7429115772247314, + "learning_rate": 1.4837535877644957e-05, + "loss": 0.6344, + "step": 449 + }, + { + "epoch": 0.20377550731610675, + "grad_norm": 0.7298805713653564, + "learning_rate": 1.4836797578324833e-05, + "loss": 0.6197, + "step": 450 + }, + { + "epoch": 0.2042283417768092, + "grad_norm": 0.750446081161499, + "learning_rate": 1.4836057623700218e-05, + "loss": 0.5782, + "step": 451 + }, + { + "epoch": 0.20468117623751167, + "grad_norm": 0.811336100101471, + "learning_rate": 1.483531601393806e-05, + "loss": 0.6411, + "step": 452 + }, + { + "epoch": 0.20513401069821413, + "grad_norm": 0.7158386707305908, + "learning_rate": 1.4834572749205675e-05, + "loss": 0.6706, + "step": 453 + }, + { + "epoch": 0.2055868451589166, + "grad_norm": 0.6980316042900085, + "learning_rate": 1.483382782967076e-05, + "loss": 0.607, + "step": 454 + }, + { + "epoch": 0.20603967961961905, + "grad_norm": 0.7448428273200989, + "learning_rate": 1.483308125550138e-05, + "loss": 0.666, + "step": 455 + }, + { + "epoch": 0.20649251408032152, + "grad_norm": 0.6907296776771545, + "learning_rate": 1.4832333026865973e-05, + "loss": 0.6145, + "step": 456 + }, + { + "epoch": 0.20694534854102398, + "grad_norm": 0.7163165211677551, + "learning_rate": 1.4831583143933353e-05, + "loss": 0.6306, + "step": 457 + }, + { + "epoch": 0.20739818300172644, + "grad_norm": 0.6344444155693054, + "learning_rate": 1.483083160687271e-05, + "loss": 0.619, + "step": 458 + }, + { + "epoch": 0.2078510174624289, + "grad_norm": 0.6727322936058044, + "learning_rate": 1.4830078415853596e-05, + "loss": 0.6618, + "step": 459 + }, + { + "epoch": 0.20830385192313136, + "grad_norm": 0.7381990551948547, + "learning_rate": 1.482932357104595e-05, + "loss": 0.5994, + "step": 460 + }, + { + "epoch": 0.20875668638383382, + "grad_norm": 0.7138816118240356, + "learning_rate": 1.4828567072620074e-05, + "loss": 0.6549, + "step": 461 + }, + { + "epoch": 0.20920952084453626, + "grad_norm": 0.708865761756897, + "learning_rate": 1.4827808920746646e-05, + "loss": 0.6603, + "step": 462 + }, + { + "epoch": 0.20966235530523872, + "grad_norm": 0.7122054696083069, + "learning_rate": 1.4827049115596722e-05, + "loss": 0.6423, + "step": 463 + }, + { + "epoch": 0.21011518976594118, + "grad_norm": 0.7929750680923462, + "learning_rate": 1.4826287657341723e-05, + "loss": 0.6772, + "step": 464 + }, + { + "epoch": 0.21056802422664364, + "grad_norm": 0.7480528354644775, + "learning_rate": 1.4825524546153449e-05, + "loss": 0.6118, + "step": 465 + }, + { + "epoch": 0.2110208586873461, + "grad_norm": 0.8834244608879089, + "learning_rate": 1.4824759782204068e-05, + "loss": 0.622, + "step": 466 + }, + { + "epoch": 0.21147369314804856, + "grad_norm": 0.6908849477767944, + "learning_rate": 1.4823993365666127e-05, + "loss": 0.6625, + "step": 467 + }, + { + "epoch": 0.21192652760875103, + "grad_norm": 0.7661241888999939, + "learning_rate": 1.4823225296712539e-05, + "loss": 0.6409, + "step": 468 + }, + { + "epoch": 0.2123793620694535, + "grad_norm": 0.6954145431518555, + "learning_rate": 1.4822455575516595e-05, + "loss": 0.5925, + "step": 469 + }, + { + "epoch": 0.21283219653015595, + "grad_norm": 0.6936039924621582, + "learning_rate": 1.4821684202251958e-05, + "loss": 0.6555, + "step": 470 + }, + { + "epoch": 0.2132850309908584, + "grad_norm": 0.8155099749565125, + "learning_rate": 1.482091117709266e-05, + "loss": 0.6868, + "step": 471 + }, + { + "epoch": 0.21373786545156087, + "grad_norm": 0.6670776605606079, + "learning_rate": 1.4820136500213112e-05, + "loss": 0.602, + "step": 472 + }, + { + "epoch": 0.21419069991226333, + "grad_norm": 0.6749069690704346, + "learning_rate": 1.4819360171788092e-05, + "loss": 0.5757, + "step": 473 + }, + { + "epoch": 0.2146435343729658, + "grad_norm": 0.8901804685592651, + "learning_rate": 1.4818582191992752e-05, + "loss": 0.6199, + "step": 474 + }, + { + "epoch": 0.21509636883366826, + "grad_norm": 1.0478390455245972, + "learning_rate": 1.4817802561002619e-05, + "loss": 0.5611, + "step": 475 + }, + { + "epoch": 0.2155492032943707, + "grad_norm": 0.679456353187561, + "learning_rate": 1.4817021278993588e-05, + "loss": 0.5717, + "step": 476 + }, + { + "epoch": 0.21600203775507315, + "grad_norm": 1.1234318017959595, + "learning_rate": 1.4816238346141934e-05, + "loss": 0.6477, + "step": 477 + }, + { + "epoch": 0.2164548722157756, + "grad_norm": 0.7910907864570618, + "learning_rate": 1.4815453762624295e-05, + "loss": 0.5973, + "step": 478 + }, + { + "epoch": 0.21690770667647807, + "grad_norm": 0.8617794513702393, + "learning_rate": 1.481466752861769e-05, + "loss": 0.5653, + "step": 479 + }, + { + "epoch": 0.21736054113718054, + "grad_norm": 0.7190772294998169, + "learning_rate": 1.4813879644299502e-05, + "loss": 0.6063, + "step": 480 + }, + { + "epoch": 0.217813375597883, + "grad_norm": 0.9855300188064575, + "learning_rate": 1.4813090109847495e-05, + "loss": 0.6402, + "step": 481 + }, + { + "epoch": 0.21826621005858546, + "grad_norm": 0.7089081406593323, + "learning_rate": 1.4812298925439799e-05, + "loss": 0.6129, + "step": 482 + }, + { + "epoch": 0.21871904451928792, + "grad_norm": 0.8527977466583252, + "learning_rate": 1.4811506091254922e-05, + "loss": 0.6116, + "step": 483 + }, + { + "epoch": 0.21917187897999038, + "grad_norm": 0.8156578540802002, + "learning_rate": 1.4810711607471737e-05, + "loss": 0.6608, + "step": 484 + }, + { + "epoch": 0.21962471344069284, + "grad_norm": 0.7411708235740662, + "learning_rate": 1.4809915474269493e-05, + "loss": 0.5877, + "step": 485 + }, + { + "epoch": 0.2200775479013953, + "grad_norm": 0.9931669235229492, + "learning_rate": 1.4809117691827812e-05, + "loss": 0.6118, + "step": 486 + }, + { + "epoch": 0.22053038236209777, + "grad_norm": 0.79165118932724, + "learning_rate": 1.4808318260326687e-05, + "loss": 0.5565, + "step": 487 + }, + { + "epoch": 0.22098321682280023, + "grad_norm": 0.8213533163070679, + "learning_rate": 1.4807517179946483e-05, + "loss": 0.6048, + "step": 488 + }, + { + "epoch": 0.2214360512835027, + "grad_norm": 0.7628992795944214, + "learning_rate": 1.4806714450867937e-05, + "loss": 0.5837, + "step": 489 + }, + { + "epoch": 0.22188888574420512, + "grad_norm": 0.7759009003639221, + "learning_rate": 1.4805910073272159e-05, + "loss": 0.6224, + "step": 490 + }, + { + "epoch": 0.22234172020490758, + "grad_norm": 0.7115432024002075, + "learning_rate": 1.4805104047340628e-05, + "loss": 0.6427, + "step": 491 + }, + { + "epoch": 0.22279455466561005, + "grad_norm": 0.756812334060669, + "learning_rate": 1.4804296373255201e-05, + "loss": 0.6536, + "step": 492 + }, + { + "epoch": 0.2232473891263125, + "grad_norm": 0.7481681108474731, + "learning_rate": 1.48034870511981e-05, + "loss": 0.6257, + "step": 493 + }, + { + "epoch": 0.22370022358701497, + "grad_norm": 0.7567417621612549, + "learning_rate": 1.4802676081351922e-05, + "loss": 0.5999, + "step": 494 + }, + { + "epoch": 0.22415305804771743, + "grad_norm": 0.8136939406394958, + "learning_rate": 1.4801863463899634e-05, + "loss": 0.6432, + "step": 495 + }, + { + "epoch": 0.2246058925084199, + "grad_norm": 0.7277612686157227, + "learning_rate": 1.4801049199024577e-05, + "loss": 0.6153, + "step": 496 + }, + { + "epoch": 0.22505872696912235, + "grad_norm": 0.8077271580696106, + "learning_rate": 1.4800233286910462e-05, + "loss": 0.5886, + "step": 497 + }, + { + "epoch": 0.22551156142982481, + "grad_norm": 0.8224574327468872, + "learning_rate": 1.4799415727741376e-05, + "loss": 0.5642, + "step": 498 + }, + { + "epoch": 0.22596439589052728, + "grad_norm": 0.7653172612190247, + "learning_rate": 1.479859652170177e-05, + "loss": 0.605, + "step": 499 + }, + { + "epoch": 0.22641723035122974, + "grad_norm": 0.74990314245224, + "learning_rate": 1.4797775668976473e-05, + "loss": 0.619, + "step": 500 + }, + { + "epoch": 0.2268700648119322, + "grad_norm": 0.8035469055175781, + "learning_rate": 1.4796953169750684e-05, + "loss": 0.5863, + "step": 501 + }, + { + "epoch": 0.22732289927263466, + "grad_norm": 0.7987021207809448, + "learning_rate": 1.4796129024209968e-05, + "loss": 0.5786, + "step": 502 + }, + { + "epoch": 0.2277757337333371, + "grad_norm": 0.7279565334320068, + "learning_rate": 1.4795303232540272e-05, + "loss": 0.646, + "step": 503 + }, + { + "epoch": 0.22822856819403955, + "grad_norm": 0.753925621509552, + "learning_rate": 1.4794475794927902e-05, + "loss": 0.6106, + "step": 504 + }, + { + "epoch": 0.22868140265474202, + "grad_norm": 0.8243687152862549, + "learning_rate": 1.4793646711559549e-05, + "loss": 0.5846, + "step": 505 + }, + { + "epoch": 0.22913423711544448, + "grad_norm": 0.7034870982170105, + "learning_rate": 1.4792815982622264e-05, + "loss": 0.6537, + "step": 506 + }, + { + "epoch": 0.22958707157614694, + "grad_norm": 0.7104423642158508, + "learning_rate": 1.4791983608303472e-05, + "loss": 0.6009, + "step": 507 + }, + { + "epoch": 0.2300399060368494, + "grad_norm": 0.8007641434669495, + "learning_rate": 1.4791149588790973e-05, + "loss": 0.5634, + "step": 508 + }, + { + "epoch": 0.23049274049755186, + "grad_norm": 1.0584112405776978, + "learning_rate": 1.4790313924272935e-05, + "loss": 0.5732, + "step": 509 + }, + { + "epoch": 0.23094557495825432, + "grad_norm": 0.7870271801948547, + "learning_rate": 1.4789476614937899e-05, + "loss": 0.6032, + "step": 510 + }, + { + "epoch": 0.23139840941895679, + "grad_norm": 0.8935226202011108, + "learning_rate": 1.4788637660974773e-05, + "loss": 0.5845, + "step": 511 + }, + { + "epoch": 0.23185124387965925, + "grad_norm": 0.7717947363853455, + "learning_rate": 1.4787797062572842e-05, + "loss": 0.6126, + "step": 512 + }, + { + "epoch": 0.2323040783403617, + "grad_norm": 0.8335773944854736, + "learning_rate": 1.4786954819921759e-05, + "loss": 0.6342, + "step": 513 + }, + { + "epoch": 0.23275691280106417, + "grad_norm": 0.7727064490318298, + "learning_rate": 1.4786110933211548e-05, + "loss": 0.5998, + "step": 514 + }, + { + "epoch": 0.23320974726176663, + "grad_norm": 0.8422814011573792, + "learning_rate": 1.4785265402632602e-05, + "loss": 0.6381, + "step": 515 + }, + { + "epoch": 0.2336625817224691, + "grad_norm": 0.8143975138664246, + "learning_rate": 1.4784418228375688e-05, + "loss": 0.5694, + "step": 516 + }, + { + "epoch": 0.23411541618317153, + "grad_norm": 0.764120876789093, + "learning_rate": 1.4783569410631942e-05, + "loss": 0.566, + "step": 517 + }, + { + "epoch": 0.234568250643874, + "grad_norm": 0.8712199926376343, + "learning_rate": 1.4782718949592873e-05, + "loss": 0.6068, + "step": 518 + }, + { + "epoch": 0.23502108510457645, + "grad_norm": 0.8048343062400818, + "learning_rate": 1.4781866845450361e-05, + "loss": 0.5919, + "step": 519 + }, + { + "epoch": 0.2354739195652789, + "grad_norm": 0.8512641191482544, + "learning_rate": 1.478101309839665e-05, + "loss": 0.6131, + "step": 520 + }, + { + "epoch": 0.23592675402598137, + "grad_norm": 0.7001386880874634, + "learning_rate": 1.4780157708624364e-05, + "loss": 0.5971, + "step": 521 + }, + { + "epoch": 0.23637958848668383, + "grad_norm": 0.7913312911987305, + "learning_rate": 1.477930067632649e-05, + "loss": 0.6156, + "step": 522 + }, + { + "epoch": 0.2368324229473863, + "grad_norm": 0.7557013034820557, + "learning_rate": 1.4778442001696392e-05, + "loss": 0.5684, + "step": 523 + }, + { + "epoch": 0.23728525740808876, + "grad_norm": 0.6847025156021118, + "learning_rate": 1.4777581684927797e-05, + "loss": 0.6304, + "step": 524 + }, + { + "epoch": 0.23773809186879122, + "grad_norm": 0.7465041875839233, + "learning_rate": 1.4776719726214811e-05, + "loss": 0.5996, + "step": 525 + }, + { + "epoch": 0.23819092632949368, + "grad_norm": 0.776152491569519, + "learning_rate": 1.4775856125751907e-05, + "loss": 0.6457, + "step": 526 + }, + { + "epoch": 0.23864376079019614, + "grad_norm": 0.7176225185394287, + "learning_rate": 1.4774990883733924e-05, + "loss": 0.6206, + "step": 527 + }, + { + "epoch": 0.2390965952508986, + "grad_norm": 0.6723232865333557, + "learning_rate": 1.4774124000356074e-05, + "loss": 0.5649, + "step": 528 + }, + { + "epoch": 0.23954942971160106, + "grad_norm": 0.7886945605278015, + "learning_rate": 1.4773255475813948e-05, + "loss": 0.6057, + "step": 529 + }, + { + "epoch": 0.24000226417230353, + "grad_norm": 0.8538393974304199, + "learning_rate": 1.4772385310303492e-05, + "loss": 0.5672, + "step": 530 + }, + { + "epoch": 0.24045509863300596, + "grad_norm": 0.8311111927032471, + "learning_rate": 1.4771513504021034e-05, + "loss": 0.6067, + "step": 531 + }, + { + "epoch": 0.24090793309370842, + "grad_norm": 0.6744326949119568, + "learning_rate": 1.4770640057163265e-05, + "loss": 0.5392, + "step": 532 + }, + { + "epoch": 0.24136076755441088, + "grad_norm": 0.8035153150558472, + "learning_rate": 1.4769764969927255e-05, + "loss": 0.5968, + "step": 533 + }, + { + "epoch": 0.24181360201511334, + "grad_norm": 0.9007952213287354, + "learning_rate": 1.4768888242510433e-05, + "loss": 0.5998, + "step": 534 + }, + { + "epoch": 0.2422664364758158, + "grad_norm": 1.0340895652770996, + "learning_rate": 1.4768009875110603e-05, + "loss": 0.6269, + "step": 535 + }, + { + "epoch": 0.24271927093651827, + "grad_norm": 0.7285747528076172, + "learning_rate": 1.4767129867925942e-05, + "loss": 0.6182, + "step": 536 + }, + { + "epoch": 0.24317210539722073, + "grad_norm": 0.7769334316253662, + "learning_rate": 1.4766248221154996e-05, + "loss": 0.6133, + "step": 537 + }, + { + "epoch": 0.2436249398579232, + "grad_norm": 0.8429601788520813, + "learning_rate": 1.4765364934996674e-05, + "loss": 0.5653, + "step": 538 + }, + { + "epoch": 0.24407777431862565, + "grad_norm": 0.7434231638908386, + "learning_rate": 1.4764480009650264e-05, + "loss": 0.5946, + "step": 539 + }, + { + "epoch": 0.2445306087793281, + "grad_norm": 0.8999215960502625, + "learning_rate": 1.4763593445315422e-05, + "loss": 0.5909, + "step": 540 + }, + { + "epoch": 0.24498344324003057, + "grad_norm": 0.749334454536438, + "learning_rate": 1.4762705242192165e-05, + "loss": 0.6052, + "step": 541 + }, + { + "epoch": 0.24543627770073304, + "grad_norm": 0.8988975882530212, + "learning_rate": 1.4761815400480892e-05, + "loss": 0.5615, + "step": 542 + }, + { + "epoch": 0.2458891121614355, + "grad_norm": 0.7588968276977539, + "learning_rate": 1.4760923920382364e-05, + "loss": 0.5623, + "step": 543 + }, + { + "epoch": 0.24634194662213796, + "grad_norm": 0.8823333978652954, + "learning_rate": 1.4760030802097715e-05, + "loss": 0.5761, + "step": 544 + }, + { + "epoch": 0.2467947810828404, + "grad_norm": 0.6606689095497131, + "learning_rate": 1.4759136045828447e-05, + "loss": 0.5995, + "step": 545 + }, + { + "epoch": 0.24724761554354285, + "grad_norm": 0.9664527773857117, + "learning_rate": 1.4758239651776434e-05, + "loss": 0.6049, + "step": 546 + }, + { + "epoch": 0.24770045000424532, + "grad_norm": 0.6853055357933044, + "learning_rate": 1.4757341620143913e-05, + "loss": 0.6096, + "step": 547 + }, + { + "epoch": 0.24815328446494778, + "grad_norm": 0.845271110534668, + "learning_rate": 1.4756441951133497e-05, + "loss": 0.607, + "step": 548 + }, + { + "epoch": 0.24860611892565024, + "grad_norm": 0.7086828947067261, + "learning_rate": 1.4755540644948168e-05, + "loss": 0.5752, + "step": 549 + }, + { + "epoch": 0.2490589533863527, + "grad_norm": 0.7899136543273926, + "learning_rate": 1.4754637701791273e-05, + "loss": 0.5464, + "step": 550 + }, + { + "epoch": 0.24951178784705516, + "grad_norm": 0.7464292645454407, + "learning_rate": 1.4753733121866534e-05, + "loss": 0.5789, + "step": 551 + }, + { + "epoch": 0.24996462230775762, + "grad_norm": 0.8088657259941101, + "learning_rate": 1.4752826905378039e-05, + "loss": 0.6007, + "step": 552 + }, + { + "epoch": 0.24996462230775762, + "eval_loss": 0.5881260633468628, + "eval_runtime": 19.4408, + "eval_samples_per_second": 5.864, + "eval_steps_per_second": 0.772, + "step": 552 + }, + { + "epoch": 0.25041745676846006, + "grad_norm": 0.8311989307403564, + "learning_rate": 1.4751919052530245e-05, + "loss": 0.5806, + "step": 553 + }, + { + "epoch": 0.25087029122916255, + "grad_norm": 0.9002801179885864, + "learning_rate": 1.4751009563527977e-05, + "loss": 0.5398, + "step": 554 + }, + { + "epoch": 0.251323125689865, + "grad_norm": 0.8495317697525024, + "learning_rate": 1.4750098438576434e-05, + "loss": 0.6073, + "step": 555 + }, + { + "epoch": 0.25177596015056747, + "grad_norm": 0.7978039979934692, + "learning_rate": 1.474918567788118e-05, + "loss": 0.6079, + "step": 556 + }, + { + "epoch": 0.2522287946112699, + "grad_norm": 0.7676923274993896, + "learning_rate": 1.4748271281648145e-05, + "loss": 0.633, + "step": 557 + }, + { + "epoch": 0.2526816290719724, + "grad_norm": 0.7194515466690063, + "learning_rate": 1.4747355250083639e-05, + "loss": 0.5659, + "step": 558 + }, + { + "epoch": 0.2531344635326748, + "grad_norm": 0.8001179099082947, + "learning_rate": 1.474643758339433e-05, + "loss": 0.56, + "step": 559 + }, + { + "epoch": 0.2535872979933773, + "grad_norm": 0.7714060544967651, + "learning_rate": 1.4745518281787259e-05, + "loss": 0.553, + "step": 560 + }, + { + "epoch": 0.25404013245407975, + "grad_norm": 0.8208261132240295, + "learning_rate": 1.474459734546984e-05, + "loss": 0.614, + "step": 561 + }, + { + "epoch": 0.25449296691478224, + "grad_norm": 0.6942229866981506, + "learning_rate": 1.4743674774649845e-05, + "loss": 0.5483, + "step": 562 + }, + { + "epoch": 0.25494580137548467, + "grad_norm": 0.7569373250007629, + "learning_rate": 1.4742750569535425e-05, + "loss": 0.615, + "step": 563 + }, + { + "epoch": 0.25539863583618716, + "grad_norm": 0.7039315700531006, + "learning_rate": 1.4741824730335098e-05, + "loss": 0.5252, + "step": 564 + }, + { + "epoch": 0.2558514702968896, + "grad_norm": 0.8170211315155029, + "learning_rate": 1.4740897257257745e-05, + "loss": 0.6154, + "step": 565 + }, + { + "epoch": 0.256304304757592, + "grad_norm": 0.688449501991272, + "learning_rate": 1.4739968150512622e-05, + "loss": 0.5537, + "step": 566 + }, + { + "epoch": 0.2567571392182945, + "grad_norm": 0.74798983335495, + "learning_rate": 1.473903741030935e-05, + "loss": 0.5854, + "step": 567 + }, + { + "epoch": 0.25720997367899695, + "grad_norm": 0.7894384264945984, + "learning_rate": 1.4738105036857921e-05, + "loss": 0.5017, + "step": 568 + }, + { + "epoch": 0.25766280813969944, + "grad_norm": 0.8515793085098267, + "learning_rate": 1.4737171030368693e-05, + "loss": 0.5644, + "step": 569 + }, + { + "epoch": 0.2581156426004019, + "grad_norm": 0.9285520315170288, + "learning_rate": 1.4736235391052393e-05, + "loss": 0.5714, + "step": 570 + }, + { + "epoch": 0.25856847706110436, + "grad_norm": 0.7274629473686218, + "learning_rate": 1.4735298119120116e-05, + "loss": 0.4978, + "step": 571 + }, + { + "epoch": 0.2590213115218068, + "grad_norm": 1.1050032377243042, + "learning_rate": 1.473435921478333e-05, + "loss": 0.5982, + "step": 572 + }, + { + "epoch": 0.2594741459825093, + "grad_norm": 0.8430832028388977, + "learning_rate": 1.4733418678253865e-05, + "loss": 0.5126, + "step": 573 + }, + { + "epoch": 0.2599269804432117, + "grad_norm": 0.7927145957946777, + "learning_rate": 1.4732476509743919e-05, + "loss": 0.5066, + "step": 574 + }, + { + "epoch": 0.2603798149039142, + "grad_norm": 0.8087596893310547, + "learning_rate": 1.473153270946607e-05, + "loss": 0.6461, + "step": 575 + }, + { + "epoch": 0.26083264936461664, + "grad_norm": 1.0339022874832153, + "learning_rate": 1.4730587277633245e-05, + "loss": 0.6035, + "step": 576 + }, + { + "epoch": 0.26128548382531913, + "grad_norm": 0.8909124732017517, + "learning_rate": 1.4729640214458754e-05, + "loss": 0.573, + "step": 577 + }, + { + "epoch": 0.26173831828602157, + "grad_norm": 1.0848407745361328, + "learning_rate": 1.4728691520156269e-05, + "loss": 0.5968, + "step": 578 + }, + { + "epoch": 0.262191152746724, + "grad_norm": 0.8249455094337463, + "learning_rate": 1.4727741194939832e-05, + "loss": 0.5785, + "step": 579 + }, + { + "epoch": 0.2626439872074265, + "grad_norm": 1.190679669380188, + "learning_rate": 1.4726789239023856e-05, + "loss": 0.597, + "step": 580 + }, + { + "epoch": 0.2630968216681289, + "grad_norm": 0.784512460231781, + "learning_rate": 1.4725835652623112e-05, + "loss": 0.534, + "step": 581 + }, + { + "epoch": 0.2635496561288314, + "grad_norm": 0.8939950466156006, + "learning_rate": 1.4724880435952747e-05, + "loss": 0.5271, + "step": 582 + }, + { + "epoch": 0.26400249058953384, + "grad_norm": 0.7243357300758362, + "learning_rate": 1.4723923589228276e-05, + "loss": 0.5108, + "step": 583 + }, + { + "epoch": 0.26445532505023633, + "grad_norm": 0.7685579657554626, + "learning_rate": 1.4722965112665577e-05, + "loss": 0.5129, + "step": 584 + }, + { + "epoch": 0.26490815951093877, + "grad_norm": 0.8200727105140686, + "learning_rate": 1.47220050064809e-05, + "loss": 0.5488, + "step": 585 + }, + { + "epoch": 0.26536099397164126, + "grad_norm": 0.8088552355766296, + "learning_rate": 1.4721043270890862e-05, + "loss": 0.503, + "step": 586 + }, + { + "epoch": 0.2658138284323437, + "grad_norm": 0.7941896915435791, + "learning_rate": 1.4720079906112444e-05, + "loss": 0.5661, + "step": 587 + }, + { + "epoch": 0.2662666628930462, + "grad_norm": 0.7164261937141418, + "learning_rate": 1.4719114912363e-05, + "loss": 0.5767, + "step": 588 + }, + { + "epoch": 0.2667194973537486, + "grad_norm": 0.92717444896698, + "learning_rate": 1.4718148289860247e-05, + "loss": 0.5572, + "step": 589 + }, + { + "epoch": 0.2671723318144511, + "grad_norm": 0.7904344797134399, + "learning_rate": 1.471718003882227e-05, + "loss": 0.5009, + "step": 590 + }, + { + "epoch": 0.26762516627515354, + "grad_norm": 0.8072219491004944, + "learning_rate": 1.4716210159467527e-05, + "loss": 0.5609, + "step": 591 + }, + { + "epoch": 0.268078000735856, + "grad_norm": 0.7373447418212891, + "learning_rate": 1.4715238652014834e-05, + "loss": 0.5378, + "step": 592 + }, + { + "epoch": 0.26853083519655846, + "grad_norm": 0.8908963203430176, + "learning_rate": 1.4714265516683383e-05, + "loss": 0.5076, + "step": 593 + }, + { + "epoch": 0.2689836696572609, + "grad_norm": 0.7174452543258667, + "learning_rate": 1.4713290753692728e-05, + "loss": 0.5344, + "step": 594 + }, + { + "epoch": 0.2694365041179634, + "grad_norm": 0.7640471458435059, + "learning_rate": 1.471231436326279e-05, + "loss": 0.5525, + "step": 595 + }, + { + "epoch": 0.2698893385786658, + "grad_norm": 0.8122929334640503, + "learning_rate": 1.4711336345613864e-05, + "loss": 0.5774, + "step": 596 + }, + { + "epoch": 0.2703421730393683, + "grad_norm": 0.740322470664978, + "learning_rate": 1.4710356700966603e-05, + "loss": 0.5363, + "step": 597 + }, + { + "epoch": 0.27079500750007074, + "grad_norm": 0.7563768625259399, + "learning_rate": 1.4709375429542034e-05, + "loss": 0.5507, + "step": 598 + }, + { + "epoch": 0.27124784196077323, + "grad_norm": 0.6652012467384338, + "learning_rate": 1.4708392531561544e-05, + "loss": 0.538, + "step": 599 + }, + { + "epoch": 0.27170067642147566, + "grad_norm": 0.785305917263031, + "learning_rate": 1.4707408007246898e-05, + "loss": 0.6097, + "step": 600 + }, + { + "epoch": 0.27215351088217815, + "grad_norm": 0.7112635970115662, + "learning_rate": 1.4706421856820214e-05, + "loss": 0.5993, + "step": 601 + }, + { + "epoch": 0.2726063453428806, + "grad_norm": 0.7882818579673767, + "learning_rate": 1.4705434080503988e-05, + "loss": 0.5176, + "step": 602 + }, + { + "epoch": 0.2730591798035831, + "grad_norm": 1.0064949989318848, + "learning_rate": 1.470444467852108e-05, + "loss": 0.5876, + "step": 603 + }, + { + "epoch": 0.2735120142642855, + "grad_norm": 0.8223081827163696, + "learning_rate": 1.4703453651094713e-05, + "loss": 0.5901, + "step": 604 + }, + { + "epoch": 0.273964848724988, + "grad_norm": 0.9124444723129272, + "learning_rate": 1.4702460998448482e-05, + "loss": 0.5698, + "step": 605 + }, + { + "epoch": 0.27441768318569043, + "grad_norm": 0.8722511529922485, + "learning_rate": 1.4701466720806342e-05, + "loss": 0.6134, + "step": 606 + }, + { + "epoch": 0.27487051764639286, + "grad_norm": 0.6496700048446655, + "learning_rate": 1.4700470818392621e-05, + "loss": 0.5107, + "step": 607 + }, + { + "epoch": 0.27532335210709535, + "grad_norm": 0.696812629699707, + "learning_rate": 1.4699473291432012e-05, + "loss": 0.5341, + "step": 608 + }, + { + "epoch": 0.2757761865677978, + "grad_norm": 0.7689089775085449, + "learning_rate": 1.4698474140149576e-05, + "loss": 0.5457, + "step": 609 + }, + { + "epoch": 0.2762290210285003, + "grad_norm": 0.7031781673431396, + "learning_rate": 1.4697473364770731e-05, + "loss": 0.5142, + "step": 610 + }, + { + "epoch": 0.2766818554892027, + "grad_norm": 0.8002460598945618, + "learning_rate": 1.4696470965521276e-05, + "loss": 0.585, + "step": 611 + }, + { + "epoch": 0.2771346899499052, + "grad_norm": 0.8532388210296631, + "learning_rate": 1.4695466942627365e-05, + "loss": 0.5656, + "step": 612 + }, + { + "epoch": 0.27758752441060763, + "grad_norm": 0.7644308805465698, + "learning_rate": 1.4694461296315525e-05, + "loss": 0.5082, + "step": 613 + }, + { + "epoch": 0.2780403588713101, + "grad_norm": 0.8224950432777405, + "learning_rate": 1.4693454026812643e-05, + "loss": 0.5058, + "step": 614 + }, + { + "epoch": 0.27849319333201256, + "grad_norm": 0.6774716973304749, + "learning_rate": 1.4692445134345981e-05, + "loss": 0.4961, + "step": 615 + }, + { + "epoch": 0.27894602779271505, + "grad_norm": 0.8179674744606018, + "learning_rate": 1.4691434619143159e-05, + "loss": 0.4869, + "step": 616 + }, + { + "epoch": 0.2793988622534175, + "grad_norm": 0.6929678320884705, + "learning_rate": 1.4690422481432164e-05, + "loss": 0.5274, + "step": 617 + }, + { + "epoch": 0.27985169671411997, + "grad_norm": 0.7661529779434204, + "learning_rate": 1.4689408721441357e-05, + "loss": 0.4941, + "step": 618 + }, + { + "epoch": 0.2803045311748224, + "grad_norm": 0.7640644907951355, + "learning_rate": 1.4688393339399453e-05, + "loss": 0.5352, + "step": 619 + }, + { + "epoch": 0.28075736563552484, + "grad_norm": 1.1076126098632812, + "learning_rate": 1.4687376335535543e-05, + "loss": 0.5875, + "step": 620 + }, + { + "epoch": 0.2812102000962273, + "grad_norm": 0.7371631860733032, + "learning_rate": 1.468635771007908e-05, + "loss": 0.524, + "step": 621 + }, + { + "epoch": 0.28166303455692976, + "grad_norm": 0.7571913003921509, + "learning_rate": 1.468533746325988e-05, + "loss": 0.5778, + "step": 622 + }, + { + "epoch": 0.28211586901763225, + "grad_norm": 0.7033771276473999, + "learning_rate": 1.4684315595308132e-05, + "loss": 0.5334, + "step": 623 + }, + { + "epoch": 0.2825687034783347, + "grad_norm": 1.0408718585968018, + "learning_rate": 1.4683292106454383e-05, + "loss": 0.5654, + "step": 624 + }, + { + "epoch": 0.28302153793903717, + "grad_norm": 0.7843034267425537, + "learning_rate": 1.4682266996929551e-05, + "loss": 0.5262, + "step": 625 + }, + { + "epoch": 0.2834743723997396, + "grad_norm": 0.8728121519088745, + "learning_rate": 1.4681240266964919e-05, + "loss": 0.51, + "step": 626 + }, + { + "epoch": 0.2839272068604421, + "grad_norm": 0.640317976474762, + "learning_rate": 1.4680211916792133e-05, + "loss": 0.5431, + "step": 627 + }, + { + "epoch": 0.2843800413211445, + "grad_norm": 0.9380479454994202, + "learning_rate": 1.4679181946643205e-05, + "loss": 0.539, + "step": 628 + }, + { + "epoch": 0.284832875781847, + "grad_norm": 0.7024577856063843, + "learning_rate": 1.4678150356750514e-05, + "loss": 0.4875, + "step": 629 + }, + { + "epoch": 0.28528571024254945, + "grad_norm": 0.7149611711502075, + "learning_rate": 1.4677117147346806e-05, + "loss": 0.4813, + "step": 630 + }, + { + "epoch": 0.28573854470325194, + "grad_norm": 0.6758112907409668, + "learning_rate": 1.4676082318665188e-05, + "loss": 0.5813, + "step": 631 + }, + { + "epoch": 0.2861913791639544, + "grad_norm": 0.733073890209198, + "learning_rate": 1.4675045870939137e-05, + "loss": 0.5057, + "step": 632 + }, + { + "epoch": 0.28664421362465686, + "grad_norm": 0.9430336356163025, + "learning_rate": 1.467400780440249e-05, + "loss": 0.5309, + "step": 633 + }, + { + "epoch": 0.2870970480853593, + "grad_norm": 0.779950737953186, + "learning_rate": 1.4672968119289455e-05, + "loss": 0.5352, + "step": 634 + }, + { + "epoch": 0.28754988254606173, + "grad_norm": 0.804973840713501, + "learning_rate": 1.4671926815834602e-05, + "loss": 0.5636, + "step": 635 + }, + { + "epoch": 0.2880027170067642, + "grad_norm": 0.6549612283706665, + "learning_rate": 1.4670883894272865e-05, + "loss": 0.5356, + "step": 636 + }, + { + "epoch": 0.28845555146746665, + "grad_norm": 0.7596065998077393, + "learning_rate": 1.4669839354839545e-05, + "loss": 0.5719, + "step": 637 + }, + { + "epoch": 0.28890838592816914, + "grad_norm": 0.8659469485282898, + "learning_rate": 1.4668793197770308e-05, + "loss": 0.5028, + "step": 638 + }, + { + "epoch": 0.2893612203888716, + "grad_norm": 0.8582921028137207, + "learning_rate": 1.4667745423301185e-05, + "loss": 0.509, + "step": 639 + }, + { + "epoch": 0.28981405484957407, + "grad_norm": 0.751609206199646, + "learning_rate": 1.466669603166857e-05, + "loss": 0.5419, + "step": 640 + }, + { + "epoch": 0.2902668893102765, + "grad_norm": 1.0048867464065552, + "learning_rate": 1.4665645023109226e-05, + "loss": 0.5745, + "step": 641 + }, + { + "epoch": 0.290719723770979, + "grad_norm": 0.7976627945899963, + "learning_rate": 1.4664592397860274e-05, + "loss": 0.4956, + "step": 642 + }, + { + "epoch": 0.2911725582316814, + "grad_norm": 0.7978916168212891, + "learning_rate": 1.4663538156159208e-05, + "loss": 0.5252, + "step": 643 + }, + { + "epoch": 0.2916253926923839, + "grad_norm": 0.824474573135376, + "learning_rate": 1.466248229824388e-05, + "loss": 0.5338, + "step": 644 + }, + { + "epoch": 0.29207822715308634, + "grad_norm": 0.7580894827842712, + "learning_rate": 1.4661424824352512e-05, + "loss": 0.5074, + "step": 645 + }, + { + "epoch": 0.29253106161378883, + "grad_norm": 0.7472822070121765, + "learning_rate": 1.4660365734723684e-05, + "loss": 0.4755, + "step": 646 + }, + { + "epoch": 0.29298389607449127, + "grad_norm": 0.7068562507629395, + "learning_rate": 1.465930502959635e-05, + "loss": 0.5129, + "step": 647 + }, + { + "epoch": 0.2934367305351937, + "grad_norm": 1.2026171684265137, + "learning_rate": 1.4658242709209815e-05, + "loss": 0.538, + "step": 648 + }, + { + "epoch": 0.2938895649958962, + "grad_norm": 0.7542515397071838, + "learning_rate": 1.4657178773803761e-05, + "loss": 0.5344, + "step": 649 + }, + { + "epoch": 0.2943423994565986, + "grad_norm": 1.083905816078186, + "learning_rate": 1.4656113223618232e-05, + "loss": 0.5295, + "step": 650 + }, + { + "epoch": 0.2947952339173011, + "grad_norm": 0.7265732884407043, + "learning_rate": 1.4655046058893629e-05, + "loss": 0.5152, + "step": 651 + }, + { + "epoch": 0.29524806837800355, + "grad_norm": 0.8517547249794006, + "learning_rate": 1.4653977279870725e-05, + "loss": 0.5316, + "step": 652 + }, + { + "epoch": 0.29570090283870604, + "grad_norm": 0.7927708625793457, + "learning_rate": 1.4652906886790654e-05, + "loss": 0.5193, + "step": 653 + }, + { + "epoch": 0.29615373729940847, + "grad_norm": 0.661551296710968, + "learning_rate": 1.4651834879894914e-05, + "loss": 0.5148, + "step": 654 + }, + { + "epoch": 0.29660657176011096, + "grad_norm": 0.7670228481292725, + "learning_rate": 1.4650761259425369e-05, + "loss": 0.5274, + "step": 655 + }, + { + "epoch": 0.2970594062208134, + "grad_norm": 0.7227180004119873, + "learning_rate": 1.4649686025624244e-05, + "loss": 0.5116, + "step": 656 + }, + { + "epoch": 0.2975122406815159, + "grad_norm": 0.8273769617080688, + "learning_rate": 1.4648609178734132e-05, + "loss": 0.4723, + "step": 657 + }, + { + "epoch": 0.2979650751422183, + "grad_norm": 0.6958010196685791, + "learning_rate": 1.4647530718997987e-05, + "loss": 0.5077, + "step": 658 + }, + { + "epoch": 0.2984179096029208, + "grad_norm": 0.8402749300003052, + "learning_rate": 1.4646450646659126e-05, + "loss": 0.5319, + "step": 659 + }, + { + "epoch": 0.29887074406362324, + "grad_norm": 0.7076095938682556, + "learning_rate": 1.4645368961961232e-05, + "loss": 0.5306, + "step": 660 + }, + { + "epoch": 0.29932357852432573, + "grad_norm": 0.8241704702377319, + "learning_rate": 1.4644285665148352e-05, + "loss": 0.5652, + "step": 661 + }, + { + "epoch": 0.29977641298502816, + "grad_norm": 0.6605017781257629, + "learning_rate": 1.4643200756464895e-05, + "loss": 0.4739, + "step": 662 + }, + { + "epoch": 0.3002292474457306, + "grad_norm": 0.7162265181541443, + "learning_rate": 1.4642114236155635e-05, + "loss": 0.5339, + "step": 663 + }, + { + "epoch": 0.3006820819064331, + "grad_norm": 0.920584499835968, + "learning_rate": 1.464102610446571e-05, + "loss": 0.477, + "step": 664 + }, + { + "epoch": 0.3011349163671355, + "grad_norm": 1.0123887062072754, + "learning_rate": 1.463993636164062e-05, + "loss": 0.5005, + "step": 665 + }, + { + "epoch": 0.301587750827838, + "grad_norm": 0.8752802610397339, + "learning_rate": 1.4638845007926232e-05, + "loss": 0.5035, + "step": 666 + }, + { + "epoch": 0.30204058528854044, + "grad_norm": 0.9289608001708984, + "learning_rate": 1.4637752043568767e-05, + "loss": 0.4911, + "step": 667 + }, + { + "epoch": 0.30249341974924293, + "grad_norm": 0.8423551321029663, + "learning_rate": 1.463665746881482e-05, + "loss": 0.5333, + "step": 668 + }, + { + "epoch": 0.30294625420994536, + "grad_norm": 1.0525884628295898, + "learning_rate": 1.463556128391135e-05, + "loss": 0.5097, + "step": 669 + }, + { + "epoch": 0.30339908867064785, + "grad_norm": 0.909200131893158, + "learning_rate": 1.4634463489105666e-05, + "loss": 0.5584, + "step": 670 + }, + { + "epoch": 0.3038519231313503, + "grad_norm": 1.0725759267807007, + "learning_rate": 1.4633364084645457e-05, + "loss": 0.5722, + "step": 671 + }, + { + "epoch": 0.3043047575920528, + "grad_norm": 0.7647678256034851, + "learning_rate": 1.4632263070778758e-05, + "loss": 0.5064, + "step": 672 + }, + { + "epoch": 0.3047575920527552, + "grad_norm": 1.1567573547363281, + "learning_rate": 1.4631160447753985e-05, + "loss": 0.5108, + "step": 673 + }, + { + "epoch": 0.3052104265134577, + "grad_norm": 1.012374758720398, + "learning_rate": 1.4630056215819904e-05, + "loss": 0.5589, + "step": 674 + }, + { + "epoch": 0.30566326097416013, + "grad_norm": 1.4147967100143433, + "learning_rate": 1.4628950375225648e-05, + "loss": 0.5355, + "step": 675 + }, + { + "epoch": 0.30611609543486257, + "grad_norm": 0.9129096269607544, + "learning_rate": 1.4627842926220715e-05, + "loss": 0.5529, + "step": 676 + }, + { + "epoch": 0.30656892989556506, + "grad_norm": 0.9252450466156006, + "learning_rate": 1.4626733869054964e-05, + "loss": 0.4604, + "step": 677 + }, + { + "epoch": 0.3070217643562675, + "grad_norm": 1.167197585105896, + "learning_rate": 1.4625623203978614e-05, + "loss": 0.5238, + "step": 678 + }, + { + "epoch": 0.30747459881697, + "grad_norm": 0.672999918460846, + "learning_rate": 1.4624510931242252e-05, + "loss": 0.4724, + "step": 679 + }, + { + "epoch": 0.3079274332776724, + "grad_norm": 0.8699397444725037, + "learning_rate": 1.4623397051096825e-05, + "loss": 0.5158, + "step": 680 + }, + { + "epoch": 0.3083802677383749, + "grad_norm": 0.7273291945457458, + "learning_rate": 1.4622281563793643e-05, + "loss": 0.5246, + "step": 681 + }, + { + "epoch": 0.30883310219907734, + "grad_norm": 0.9489529728889465, + "learning_rate": 1.4621164469584378e-05, + "loss": 0.5135, + "step": 682 + }, + { + "epoch": 0.3092859366597798, + "grad_norm": 0.786284863948822, + "learning_rate": 1.4620045768721065e-05, + "loss": 0.4938, + "step": 683 + }, + { + "epoch": 0.30973877112048226, + "grad_norm": 0.894683837890625, + "learning_rate": 1.4618925461456106e-05, + "loss": 0.5306, + "step": 684 + }, + { + "epoch": 0.31019160558118475, + "grad_norm": 0.9071069955825806, + "learning_rate": 1.4617803548042254e-05, + "loss": 0.5439, + "step": 685 + }, + { + "epoch": 0.3106444400418872, + "grad_norm": 0.8423410654067993, + "learning_rate": 1.4616680028732637e-05, + "loss": 0.4799, + "step": 686 + }, + { + "epoch": 0.31109727450258967, + "grad_norm": 0.7595556378364563, + "learning_rate": 1.4615554903780738e-05, + "loss": 0.4923, + "step": 687 + }, + { + "epoch": 0.3115501089632921, + "grad_norm": 0.7766554355621338, + "learning_rate": 1.4614428173440403e-05, + "loss": 0.5725, + "step": 688 + }, + { + "epoch": 0.31200294342399454, + "grad_norm": 0.8758528828620911, + "learning_rate": 1.4613299837965843e-05, + "loss": 0.5016, + "step": 689 + }, + { + "epoch": 0.31245577788469703, + "grad_norm": 0.8516387343406677, + "learning_rate": 1.4612169897611626e-05, + "loss": 0.511, + "step": 690 + }, + { + "epoch": 0.31290861234539946, + "grad_norm": 0.7681635022163391, + "learning_rate": 1.461103835263269e-05, + "loss": 0.4992, + "step": 691 + }, + { + "epoch": 0.31336144680610195, + "grad_norm": 0.7421332597732544, + "learning_rate": 1.4609905203284329e-05, + "loss": 0.4717, + "step": 692 + }, + { + "epoch": 0.3138142812668044, + "grad_norm": 0.7615931630134583, + "learning_rate": 1.46087704498222e-05, + "loss": 0.505, + "step": 693 + }, + { + "epoch": 0.3142671157275069, + "grad_norm": 0.7673407793045044, + "learning_rate": 1.460763409250232e-05, + "loss": 0.4748, + "step": 694 + }, + { + "epoch": 0.3147199501882093, + "grad_norm": 0.8670549988746643, + "learning_rate": 1.4606496131581075e-05, + "loss": 0.496, + "step": 695 + }, + { + "epoch": 0.3151727846489118, + "grad_norm": 0.8095021843910217, + "learning_rate": 1.4605356567315204e-05, + "loss": 0.4385, + "step": 696 + }, + { + "epoch": 0.31562561910961423, + "grad_norm": 0.7151927947998047, + "learning_rate": 1.4604215399961815e-05, + "loss": 0.5262, + "step": 697 + }, + { + "epoch": 0.3160784535703167, + "grad_norm": 1.0873347520828247, + "learning_rate": 1.4603072629778372e-05, + "loss": 0.444, + "step": 698 + }, + { + "epoch": 0.31653128803101915, + "grad_norm": 0.8482450246810913, + "learning_rate": 1.4601928257022705e-05, + "loss": 0.5739, + "step": 699 + }, + { + "epoch": 0.31698412249172164, + "grad_norm": 0.7151737213134766, + "learning_rate": 1.4600782281953e-05, + "loss": 0.4712, + "step": 700 + }, + { + "epoch": 0.3174369569524241, + "grad_norm": 0.685892641544342, + "learning_rate": 1.4599634704827813e-05, + "loss": 0.4681, + "step": 701 + }, + { + "epoch": 0.31788979141312657, + "grad_norm": 0.8122037649154663, + "learning_rate": 1.4598485525906055e-05, + "loss": 0.4768, + "step": 702 + }, + { + "epoch": 0.318342625873829, + "grad_norm": 0.6169959902763367, + "learning_rate": 1.4597334745447e-05, + "loss": 0.4869, + "step": 703 + }, + { + "epoch": 0.31879546033453143, + "grad_norm": 0.7096338272094727, + "learning_rate": 1.4596182363710281e-05, + "loss": 0.4805, + "step": 704 + }, + { + "epoch": 0.3192482947952339, + "grad_norm": 0.6837482452392578, + "learning_rate": 1.4595028380955897e-05, + "loss": 0.5615, + "step": 705 + }, + { + "epoch": 0.31970112925593636, + "grad_norm": 0.7366883158683777, + "learning_rate": 1.4593872797444206e-05, + "loss": 0.4706, + "step": 706 + }, + { + "epoch": 0.32015396371663885, + "grad_norm": 0.8051954507827759, + "learning_rate": 1.4592715613435927e-05, + "loss": 0.4847, + "step": 707 + }, + { + "epoch": 0.3206067981773413, + "grad_norm": 0.8133533596992493, + "learning_rate": 1.4591556829192137e-05, + "loss": 0.4954, + "step": 708 + }, + { + "epoch": 0.32105963263804377, + "grad_norm": 0.7073555588722229, + "learning_rate": 1.4590396444974284e-05, + "loss": 0.4308, + "step": 709 + }, + { + "epoch": 0.3215124670987462, + "grad_norm": 0.8296173810958862, + "learning_rate": 1.4589234461044162e-05, + "loss": 0.5252, + "step": 710 + }, + { + "epoch": 0.3219653015594487, + "grad_norm": 0.7163283824920654, + "learning_rate": 1.458807087766394e-05, + "loss": 0.5206, + "step": 711 + }, + { + "epoch": 0.3224181360201511, + "grad_norm": 0.8291743993759155, + "learning_rate": 1.4586905695096137e-05, + "loss": 0.4864, + "step": 712 + }, + { + "epoch": 0.3228709704808536, + "grad_norm": 0.7736324667930603, + "learning_rate": 1.4585738913603644e-05, + "loss": 0.5039, + "step": 713 + }, + { + "epoch": 0.32332380494155605, + "grad_norm": 0.7839064002037048, + "learning_rate": 1.45845705334497e-05, + "loss": 0.4585, + "step": 714 + }, + { + "epoch": 0.32377663940225854, + "grad_norm": 0.782141923904419, + "learning_rate": 1.4583400554897916e-05, + "loss": 0.5271, + "step": 715 + }, + { + "epoch": 0.32422947386296097, + "grad_norm": 0.7646268010139465, + "learning_rate": 1.4582228978212256e-05, + "loss": 0.5123, + "step": 716 + }, + { + "epoch": 0.3246823083236634, + "grad_norm": 0.8933156728744507, + "learning_rate": 1.458105580365705e-05, + "loss": 0.4291, + "step": 717 + }, + { + "epoch": 0.3251351427843659, + "grad_norm": 0.796796441078186, + "learning_rate": 1.4579881031496982e-05, + "loss": 0.5057, + "step": 718 + }, + { + "epoch": 0.3255879772450683, + "grad_norm": 0.8032305836677551, + "learning_rate": 1.45787046619971e-05, + "loss": 0.5003, + "step": 719 + }, + { + "epoch": 0.3260408117057708, + "grad_norm": 0.788154125213623, + "learning_rate": 1.457752669542282e-05, + "loss": 0.4792, + "step": 720 + }, + { + "epoch": 0.32649364616647325, + "grad_norm": 0.75339674949646, + "learning_rate": 1.4576347132039902e-05, + "loss": 0.4287, + "step": 721 + }, + { + "epoch": 0.32694648062717574, + "grad_norm": 1.0328612327575684, + "learning_rate": 1.457516597211448e-05, + "loss": 0.5249, + "step": 722 + }, + { + "epoch": 0.3273993150878782, + "grad_norm": 1.01662278175354, + "learning_rate": 1.4573983215913043e-05, + "loss": 0.5471, + "step": 723 + }, + { + "epoch": 0.32785214954858066, + "grad_norm": 0.8054636120796204, + "learning_rate": 1.4572798863702442e-05, + "loss": 0.4786, + "step": 724 + }, + { + "epoch": 0.3283049840092831, + "grad_norm": 0.8108761310577393, + "learning_rate": 1.4571612915749885e-05, + "loss": 0.4995, + "step": 725 + }, + { + "epoch": 0.3287578184699856, + "grad_norm": 1.0297499895095825, + "learning_rate": 1.457042537232294e-05, + "loss": 0.5023, + "step": 726 + }, + { + "epoch": 0.329210652930688, + "grad_norm": 0.8287637829780579, + "learning_rate": 1.4569236233689537e-05, + "loss": 0.4844, + "step": 727 + }, + { + "epoch": 0.3296634873913905, + "grad_norm": 0.8962579965591431, + "learning_rate": 1.456804550011797e-05, + "loss": 0.4787, + "step": 728 + }, + { + "epoch": 0.33011632185209294, + "grad_norm": 0.7159314751625061, + "learning_rate": 1.4566853171876886e-05, + "loss": 0.4616, + "step": 729 + }, + { + "epoch": 0.33056915631279543, + "grad_norm": 0.9780300855636597, + "learning_rate": 1.456565924923529e-05, + "loss": 0.4748, + "step": 730 + }, + { + "epoch": 0.33102199077349787, + "grad_norm": 0.8780568242073059, + "learning_rate": 1.4564463732462559e-05, + "loss": 0.5119, + "step": 731 + }, + { + "epoch": 0.3314748252342003, + "grad_norm": 0.7616591453552246, + "learning_rate": 1.4563266621828413e-05, + "loss": 0.4763, + "step": 732 + }, + { + "epoch": 0.3319276596949028, + "grad_norm": 0.913154661655426, + "learning_rate": 1.4562067917602947e-05, + "loss": 0.5038, + "step": 733 + }, + { + "epoch": 0.3323804941556052, + "grad_norm": 0.7419286370277405, + "learning_rate": 1.4560867620056608e-05, + "loss": 0.5218, + "step": 734 + }, + { + "epoch": 0.3328333286163077, + "grad_norm": 0.759284496307373, + "learning_rate": 1.4559665729460198e-05, + "loss": 0.4869, + "step": 735 + }, + { + "epoch": 0.33328616307701014, + "grad_norm": 0.885164201259613, + "learning_rate": 1.455846224608489e-05, + "loss": 0.5, + "step": 736 + }, + { + "epoch": 0.33373899753771263, + "grad_norm": 0.7694923281669617, + "learning_rate": 1.4557257170202207e-05, + "loss": 0.4784, + "step": 737 + }, + { + "epoch": 0.33419183199841507, + "grad_norm": 0.7753260731697083, + "learning_rate": 1.4556050502084034e-05, + "loss": 0.4699, + "step": 738 + }, + { + "epoch": 0.33464466645911756, + "grad_norm": 0.6947426795959473, + "learning_rate": 1.4554842242002614e-05, + "loss": 0.4374, + "step": 739 + }, + { + "epoch": 0.33509750091982, + "grad_norm": 0.796204686164856, + "learning_rate": 1.4553632390230554e-05, + "loss": 0.5163, + "step": 740 + }, + { + "epoch": 0.3355503353805225, + "grad_norm": 0.7171505093574524, + "learning_rate": 1.4552420947040817e-05, + "loss": 0.484, + "step": 741 + }, + { + "epoch": 0.3360031698412249, + "grad_norm": 0.7220250964164734, + "learning_rate": 1.4551207912706725e-05, + "loss": 0.4531, + "step": 742 + }, + { + "epoch": 0.3364560043019274, + "grad_norm": 0.7847347855567932, + "learning_rate": 1.4549993287501955e-05, + "loss": 0.5114, + "step": 743 + }, + { + "epoch": 0.33690883876262984, + "grad_norm": 0.7722388505935669, + "learning_rate": 1.4548777071700549e-05, + "loss": 0.4938, + "step": 744 + }, + { + "epoch": 0.33736167322333227, + "grad_norm": 0.7462444305419922, + "learning_rate": 1.4547559265576907e-05, + "loss": 0.4641, + "step": 745 + }, + { + "epoch": 0.33781450768403476, + "grad_norm": 0.8149163722991943, + "learning_rate": 1.4546339869405783e-05, + "loss": 0.4787, + "step": 746 + }, + { + "epoch": 0.3382673421447372, + "grad_norm": 0.8563286066055298, + "learning_rate": 1.4545118883462298e-05, + "loss": 0.4962, + "step": 747 + }, + { + "epoch": 0.3387201766054397, + "grad_norm": 0.7843456268310547, + "learning_rate": 1.4543896308021924e-05, + "loss": 0.4664, + "step": 748 + }, + { + "epoch": 0.3391730110661421, + "grad_norm": 0.7675468325614929, + "learning_rate": 1.4542672143360493e-05, + "loss": 0.4841, + "step": 749 + }, + { + "epoch": 0.3396258455268446, + "grad_norm": 0.8849632740020752, + "learning_rate": 1.4541446389754201e-05, + "loss": 0.5167, + "step": 750 + }, + { + "epoch": 0.34007867998754704, + "grad_norm": 0.778249979019165, + "learning_rate": 1.4540219047479597e-05, + "loss": 0.4565, + "step": 751 + }, + { + "epoch": 0.34053151444824953, + "grad_norm": 0.7831906080245972, + "learning_rate": 1.4538990116813589e-05, + "loss": 0.4558, + "step": 752 + }, + { + "epoch": 0.34098434890895196, + "grad_norm": 0.777973473072052, + "learning_rate": 1.4537759598033446e-05, + "loss": 0.4822, + "step": 753 + }, + { + "epoch": 0.34143718336965445, + "grad_norm": 0.6689452528953552, + "learning_rate": 1.453652749141679e-05, + "loss": 0.4402, + "step": 754 + }, + { + "epoch": 0.3418900178303569, + "grad_norm": 0.6991743445396423, + "learning_rate": 1.453529379724161e-05, + "loss": 0.4195, + "step": 755 + }, + { + "epoch": 0.3423428522910594, + "grad_norm": 0.8085342049598694, + "learning_rate": 1.4534058515786245e-05, + "loss": 0.4789, + "step": 756 + }, + { + "epoch": 0.3427956867517618, + "grad_norm": 0.7133055329322815, + "learning_rate": 1.4532821647329397e-05, + "loss": 0.4706, + "step": 757 + }, + { + "epoch": 0.34324852121246424, + "grad_norm": 0.7966130375862122, + "learning_rate": 1.4531583192150119e-05, + "loss": 0.4782, + "step": 758 + }, + { + "epoch": 0.34370135567316673, + "grad_norm": 0.7512375712394714, + "learning_rate": 1.4530343150527836e-05, + "loss": 0.4674, + "step": 759 + }, + { + "epoch": 0.34415419013386916, + "grad_norm": 0.9407236576080322, + "learning_rate": 1.4529101522742311e-05, + "loss": 0.4598, + "step": 760 + }, + { + "epoch": 0.34460702459457165, + "grad_norm": 0.8138443231582642, + "learning_rate": 1.4527858309073688e-05, + "loss": 0.4619, + "step": 761 + }, + { + "epoch": 0.3450598590552741, + "grad_norm": 0.6962406039237976, + "learning_rate": 1.452661350980245e-05, + "loss": 0.4936, + "step": 762 + }, + { + "epoch": 0.3455126935159766, + "grad_norm": 0.7135865092277527, + "learning_rate": 1.4525367125209445e-05, + "loss": 0.4329, + "step": 763 + }, + { + "epoch": 0.345965527976679, + "grad_norm": 0.8204096555709839, + "learning_rate": 1.452411915557588e-05, + "loss": 0.4502, + "step": 764 + }, + { + "epoch": 0.3464183624373815, + "grad_norm": 0.7781338691711426, + "learning_rate": 1.4522869601183318e-05, + "loss": 0.4148, + "step": 765 + }, + { + "epoch": 0.34687119689808393, + "grad_norm": 0.7633736729621887, + "learning_rate": 1.4521618462313679e-05, + "loss": 0.4297, + "step": 766 + }, + { + "epoch": 0.3473240313587864, + "grad_norm": 0.904326856136322, + "learning_rate": 1.4520365739249236e-05, + "loss": 0.4972, + "step": 767 + }, + { + "epoch": 0.34777686581948886, + "grad_norm": 0.7865485548973083, + "learning_rate": 1.4519111432272632e-05, + "loss": 0.4718, + "step": 768 + }, + { + "epoch": 0.34822970028019135, + "grad_norm": 0.9248954057693481, + "learning_rate": 1.4517855541666855e-05, + "loss": 0.4736, + "step": 769 + }, + { + "epoch": 0.3486825347408938, + "grad_norm": 0.8533254861831665, + "learning_rate": 1.4516598067715257e-05, + "loss": 0.4452, + "step": 770 + }, + { + "epoch": 0.34913536920159627, + "grad_norm": 0.9142417311668396, + "learning_rate": 1.4515339010701544e-05, + "loss": 0.3997, + "step": 771 + }, + { + "epoch": 0.3495882036622987, + "grad_norm": 0.9811474680900574, + "learning_rate": 1.4514078370909782e-05, + "loss": 0.4839, + "step": 772 + }, + { + "epoch": 0.35004103812300114, + "grad_norm": 0.8523197770118713, + "learning_rate": 1.451281614862439e-05, + "loss": 0.5039, + "step": 773 + }, + { + "epoch": 0.3504938725837036, + "grad_norm": 0.9271774888038635, + "learning_rate": 1.4511552344130147e-05, + "loss": 0.526, + "step": 774 + }, + { + "epoch": 0.35094670704440606, + "grad_norm": 0.7626346349716187, + "learning_rate": 1.4510286957712192e-05, + "loss": 0.4031, + "step": 775 + }, + { + "epoch": 0.35139954150510855, + "grad_norm": 0.9117299318313599, + "learning_rate": 1.4509019989656013e-05, + "loss": 0.4756, + "step": 776 + }, + { + "epoch": 0.351852375965811, + "grad_norm": 0.7210586071014404, + "learning_rate": 1.4507751440247461e-05, + "loss": 0.4667, + "step": 777 + }, + { + "epoch": 0.35230521042651347, + "grad_norm": 0.7991892695426941, + "learning_rate": 1.4506481309772743e-05, + "loss": 0.4789, + "step": 778 + }, + { + "epoch": 0.3527580448872159, + "grad_norm": 0.7282071113586426, + "learning_rate": 1.4505209598518418e-05, + "loss": 0.4288, + "step": 779 + }, + { + "epoch": 0.3532108793479184, + "grad_norm": 0.8729153275489807, + "learning_rate": 1.450393630677141e-05, + "loss": 0.4637, + "step": 780 + }, + { + "epoch": 0.3536637138086208, + "grad_norm": 0.8181951642036438, + "learning_rate": 1.4502661434818992e-05, + "loss": 0.4957, + "step": 781 + }, + { + "epoch": 0.3541165482693233, + "grad_norm": 0.8652138710021973, + "learning_rate": 1.4501384982948799e-05, + "loss": 0.4715, + "step": 782 + }, + { + "epoch": 0.35456938273002575, + "grad_norm": 0.9875885248184204, + "learning_rate": 1.4500106951448816e-05, + "loss": 0.4996, + "step": 783 + }, + { + "epoch": 0.35502221719072824, + "grad_norm": 0.7315845489501953, + "learning_rate": 1.4498827340607393e-05, + "loss": 0.46, + "step": 784 + }, + { + "epoch": 0.3554750516514307, + "grad_norm": 0.93685382604599, + "learning_rate": 1.4497546150713227e-05, + "loss": 0.4265, + "step": 785 + }, + { + "epoch": 0.3559278861121331, + "grad_norm": 0.7243474125862122, + "learning_rate": 1.449626338205538e-05, + "loss": 0.4844, + "step": 786 + }, + { + "epoch": 0.3563807205728356, + "grad_norm": 0.8693944215774536, + "learning_rate": 1.4494979034923265e-05, + "loss": 0.5018, + "step": 787 + }, + { + "epoch": 0.35683355503353803, + "grad_norm": 0.7501546144485474, + "learning_rate": 1.4493693109606649e-05, + "loss": 0.4865, + "step": 788 + }, + { + "epoch": 0.3572863894942405, + "grad_norm": 0.8437265157699585, + "learning_rate": 1.4492405606395663e-05, + "loss": 0.495, + "step": 789 + }, + { + "epoch": 0.35773922395494295, + "grad_norm": 0.782289981842041, + "learning_rate": 1.4491116525580786e-05, + "loss": 0.4567, + "step": 790 + }, + { + "epoch": 0.35819205841564544, + "grad_norm": 0.755082368850708, + "learning_rate": 1.4489825867452858e-05, + "loss": 0.4891, + "step": 791 + }, + { + "epoch": 0.3586448928763479, + "grad_norm": 0.7294925451278687, + "learning_rate": 1.4488533632303072e-05, + "loss": 0.4613, + "step": 792 + }, + { + "epoch": 0.35909772733705037, + "grad_norm": 0.8621608018875122, + "learning_rate": 1.448723982042298e-05, + "loss": 0.4479, + "step": 793 + }, + { + "epoch": 0.3595505617977528, + "grad_norm": 0.7976484298706055, + "learning_rate": 1.4485944432104483e-05, + "loss": 0.4942, + "step": 794 + }, + { + "epoch": 0.3600033962584553, + "grad_norm": 0.6837937235832214, + "learning_rate": 1.4484647467639848e-05, + "loss": 0.4596, + "step": 795 + }, + { + "epoch": 0.3604562307191577, + "grad_norm": 0.7584710717201233, + "learning_rate": 1.4483348927321688e-05, + "loss": 0.4767, + "step": 796 + }, + { + "epoch": 0.3609090651798602, + "grad_norm": 0.8236560225486755, + "learning_rate": 1.4482048811442978e-05, + "loss": 0.4923, + "step": 797 + }, + { + "epoch": 0.36136189964056264, + "grad_norm": 0.7171486020088196, + "learning_rate": 1.4480747120297043e-05, + "loss": 0.4381, + "step": 798 + }, + { + "epoch": 0.3618147341012651, + "grad_norm": 0.7314976453781128, + "learning_rate": 1.447944385417757e-05, + "loss": 0.4573, + "step": 799 + }, + { + "epoch": 0.36226756856196757, + "grad_norm": 0.7224847674369812, + "learning_rate": 1.4478139013378597e-05, + "loss": 0.5023, + "step": 800 + }, + { + "epoch": 0.36272040302267, + "grad_norm": 0.7627956867218018, + "learning_rate": 1.4476832598194514e-05, + "loss": 0.4326, + "step": 801 + }, + { + "epoch": 0.3631732374833725, + "grad_norm": 0.8306934237480164, + "learning_rate": 1.4475524608920071e-05, + "loss": 0.5251, + "step": 802 + }, + { + "epoch": 0.3636260719440749, + "grad_norm": 0.8524587750434875, + "learning_rate": 1.4474215045850377e-05, + "loss": 0.4546, + "step": 803 + }, + { + "epoch": 0.3640789064047774, + "grad_norm": 0.6962935924530029, + "learning_rate": 1.4472903909280888e-05, + "loss": 0.4495, + "step": 804 + }, + { + "epoch": 0.36453174086547985, + "grad_norm": 0.847118616104126, + "learning_rate": 1.4471591199507417e-05, + "loss": 0.4508, + "step": 805 + }, + { + "epoch": 0.36498457532618234, + "grad_norm": 0.7857109308242798, + "learning_rate": 1.4470276916826138e-05, + "loss": 0.456, + "step": 806 + }, + { + "epoch": 0.36543740978688477, + "grad_norm": 0.9544152021408081, + "learning_rate": 1.446896106153357e-05, + "loss": 0.4344, + "step": 807 + }, + { + "epoch": 0.36589024424758726, + "grad_norm": 0.6988970041275024, + "learning_rate": 1.4467643633926598e-05, + "loss": 0.4755, + "step": 808 + }, + { + "epoch": 0.3663430787082897, + "grad_norm": 0.9396758675575256, + "learning_rate": 1.4466324634302448e-05, + "loss": 0.4444, + "step": 809 + }, + { + "epoch": 0.3667959131689922, + "grad_norm": 0.813913106918335, + "learning_rate": 1.4465004062958716e-05, + "loss": 0.4326, + "step": 810 + }, + { + "epoch": 0.3672487476296946, + "grad_norm": 0.8034855723381042, + "learning_rate": 1.4463681920193341e-05, + "loss": 0.4905, + "step": 811 + }, + { + "epoch": 0.3677015820903971, + "grad_norm": 0.7723264694213867, + "learning_rate": 1.446235820630462e-05, + "loss": 0.376, + "step": 812 + }, + { + "epoch": 0.36815441655109954, + "grad_norm": 0.8094017505645752, + "learning_rate": 1.4461032921591207e-05, + "loss": 0.5052, + "step": 813 + }, + { + "epoch": 0.368607251011802, + "grad_norm": 0.7081817984580994, + "learning_rate": 1.4459706066352105e-05, + "loss": 0.4714, + "step": 814 + }, + { + "epoch": 0.36906008547250446, + "grad_norm": 0.7871371507644653, + "learning_rate": 1.4458377640886682e-05, + "loss": 0.4692, + "step": 815 + }, + { + "epoch": 0.3695129199332069, + "grad_norm": 0.8632165789604187, + "learning_rate": 1.4457047645494645e-05, + "loss": 0.4475, + "step": 816 + }, + { + "epoch": 0.3699657543939094, + "grad_norm": 0.875834047794342, + "learning_rate": 1.4455716080476068e-05, + "loss": 0.4508, + "step": 817 + }, + { + "epoch": 0.3704185888546118, + "grad_norm": 0.7545289397239685, + "learning_rate": 1.4454382946131373e-05, + "loss": 0.4841, + "step": 818 + }, + { + "epoch": 0.3708714233153143, + "grad_norm": 0.8614468574523926, + "learning_rate": 1.4453048242761337e-05, + "loss": 0.477, + "step": 819 + }, + { + "epoch": 0.37132425777601674, + "grad_norm": 0.9642696380615234, + "learning_rate": 1.4451711970667094e-05, + "loss": 0.4879, + "step": 820 + }, + { + "epoch": 0.37177709223671923, + "grad_norm": 0.7030223608016968, + "learning_rate": 1.4450374130150124e-05, + "loss": 0.4212, + "step": 821 + }, + { + "epoch": 0.37222992669742166, + "grad_norm": 0.9804565906524658, + "learning_rate": 1.4449034721512271e-05, + "loss": 0.4439, + "step": 822 + }, + { + "epoch": 0.37268276115812415, + "grad_norm": 0.732269287109375, + "learning_rate": 1.4447693745055728e-05, + "loss": 0.4869, + "step": 823 + }, + { + "epoch": 0.3731355956188266, + "grad_norm": 0.914057195186615, + "learning_rate": 1.444635120108304e-05, + "loss": 0.4113, + "step": 824 + }, + { + "epoch": 0.3735884300795291, + "grad_norm": 0.8915912508964539, + "learning_rate": 1.4445007089897108e-05, + "loss": 0.4566, + "step": 825 + }, + { + "epoch": 0.3740412645402315, + "grad_norm": 0.803848147392273, + "learning_rate": 1.4443661411801183e-05, + "loss": 0.4411, + "step": 826 + }, + { + "epoch": 0.37449409900093394, + "grad_norm": 0.8054463267326355, + "learning_rate": 1.4442314167098878e-05, + "loss": 0.4521, + "step": 827 + }, + { + "epoch": 0.37494693346163643, + "grad_norm": 0.6809094548225403, + "learning_rate": 1.444096535609415e-05, + "loss": 0.4083, + "step": 828 + }, + { + "epoch": 0.37494693346163643, + "eval_loss": 0.4509795606136322, + "eval_runtime": 19.4619, + "eval_samples_per_second": 5.858, + "eval_steps_per_second": 0.771, + "step": 828 + }, + { + "epoch": 0.37539976792233887, + "grad_norm": 0.759226381778717, + "learning_rate": 1.4439614979091316e-05, + "loss": 0.3904, + "step": 829 + }, + { + "epoch": 0.37585260238304136, + "grad_norm": 0.7871491312980652, + "learning_rate": 1.4438263036395045e-05, + "loss": 0.4172, + "step": 830 + }, + { + "epoch": 0.3763054368437438, + "grad_norm": 0.6610317230224609, + "learning_rate": 1.4436909528310352e-05, + "loss": 0.4328, + "step": 831 + }, + { + "epoch": 0.3767582713044463, + "grad_norm": 0.7470182776451111, + "learning_rate": 1.4435554455142614e-05, + "loss": 0.4792, + "step": 832 + }, + { + "epoch": 0.3772111057651487, + "grad_norm": 0.7115527391433716, + "learning_rate": 1.4434197817197558e-05, + "loss": 0.4253, + "step": 833 + }, + { + "epoch": 0.3776639402258512, + "grad_norm": 0.8027327656745911, + "learning_rate": 1.4432839614781268e-05, + "loss": 0.4349, + "step": 834 + }, + { + "epoch": 0.37811677468655364, + "grad_norm": 0.6858651041984558, + "learning_rate": 1.4431479848200174e-05, + "loss": 0.464, + "step": 835 + }, + { + "epoch": 0.3785696091472561, + "grad_norm": 0.780019998550415, + "learning_rate": 1.443011851776106e-05, + "loss": 0.4301, + "step": 836 + }, + { + "epoch": 0.37902244360795856, + "grad_norm": 0.8875818252563477, + "learning_rate": 1.4428755623771068e-05, + "loss": 0.4638, + "step": 837 + }, + { + "epoch": 0.37947527806866105, + "grad_norm": 0.7595096230506897, + "learning_rate": 1.442739116653769e-05, + "loss": 0.4761, + "step": 838 + }, + { + "epoch": 0.3799281125293635, + "grad_norm": 0.7235725522041321, + "learning_rate": 1.4426025146368768e-05, + "loss": 0.4596, + "step": 839 + }, + { + "epoch": 0.38038094699006597, + "grad_norm": 0.8074544072151184, + "learning_rate": 1.4424657563572499e-05, + "loss": 0.5049, + "step": 840 + }, + { + "epoch": 0.3808337814507684, + "grad_norm": 0.784801185131073, + "learning_rate": 1.4423288418457437e-05, + "loss": 0.4387, + "step": 841 + }, + { + "epoch": 0.38128661591147084, + "grad_norm": 0.6974912285804749, + "learning_rate": 1.442191771133248e-05, + "loss": 0.4004, + "step": 842 + }, + { + "epoch": 0.38173945037217333, + "grad_norm": 0.6632540822029114, + "learning_rate": 1.4420545442506885e-05, + "loss": 0.4515, + "step": 843 + }, + { + "epoch": 0.38219228483287576, + "grad_norm": 0.7197034358978271, + "learning_rate": 1.4419171612290256e-05, + "loss": 0.4033, + "step": 844 + }, + { + "epoch": 0.38264511929357825, + "grad_norm": 0.6470990180969238, + "learning_rate": 1.4417796220992553e-05, + "loss": 0.4453, + "step": 845 + }, + { + "epoch": 0.3830979537542807, + "grad_norm": 0.7469446659088135, + "learning_rate": 1.441641926892409e-05, + "loss": 0.4657, + "step": 846 + }, + { + "epoch": 0.3835507882149832, + "grad_norm": 0.7464520335197449, + "learning_rate": 1.4415040756395527e-05, + "loss": 0.4376, + "step": 847 + }, + { + "epoch": 0.3840036226756856, + "grad_norm": 0.800579309463501, + "learning_rate": 1.441366068371788e-05, + "loss": 0.4434, + "step": 848 + }, + { + "epoch": 0.3844564571363881, + "grad_norm": 0.6948139071464539, + "learning_rate": 1.4412279051202518e-05, + "loss": 0.3879, + "step": 849 + }, + { + "epoch": 0.38490929159709053, + "grad_norm": 0.6828701496124268, + "learning_rate": 1.441089585916116e-05, + "loss": 0.4162, + "step": 850 + }, + { + "epoch": 0.385362126057793, + "grad_norm": 0.9038822054862976, + "learning_rate": 1.4409511107905878e-05, + "loss": 0.457, + "step": 851 + }, + { + "epoch": 0.38581496051849545, + "grad_norm": 0.7000515460968018, + "learning_rate": 1.4408124797749093e-05, + "loss": 0.4054, + "step": 852 + }, + { + "epoch": 0.38626779497919794, + "grad_norm": 0.8235939145088196, + "learning_rate": 1.4406736929003582e-05, + "loss": 0.4283, + "step": 853 + }, + { + "epoch": 0.3867206294399004, + "grad_norm": 0.824226975440979, + "learning_rate": 1.4405347501982473e-05, + "loss": 0.4499, + "step": 854 + }, + { + "epoch": 0.3871734639006028, + "grad_norm": 0.7194985151290894, + "learning_rate": 1.4403956516999235e-05, + "loss": 0.4278, + "step": 855 + }, + { + "epoch": 0.3876262983613053, + "grad_norm": 0.7407351732254028, + "learning_rate": 1.4402563974367709e-05, + "loss": 0.4495, + "step": 856 + }, + { + "epoch": 0.38807913282200773, + "grad_norm": 0.7273187041282654, + "learning_rate": 1.4401169874402071e-05, + "loss": 0.389, + "step": 857 + }, + { + "epoch": 0.3885319672827102, + "grad_norm": 0.6498844623565674, + "learning_rate": 1.4399774217416852e-05, + "loss": 0.3919, + "step": 858 + }, + { + "epoch": 0.38898480174341266, + "grad_norm": 0.7037122845649719, + "learning_rate": 1.4398377003726936e-05, + "loss": 0.4601, + "step": 859 + }, + { + "epoch": 0.38943763620411515, + "grad_norm": 0.7716974020004272, + "learning_rate": 1.4396978233647561e-05, + "loss": 0.4147, + "step": 860 + }, + { + "epoch": 0.3898904706648176, + "grad_norm": 0.7838717699050903, + "learning_rate": 1.439557790749431e-05, + "loss": 0.4185, + "step": 861 + }, + { + "epoch": 0.39034330512552007, + "grad_norm": 0.7486699819564819, + "learning_rate": 1.439417602558312e-05, + "loss": 0.4397, + "step": 862 + }, + { + "epoch": 0.3907961395862225, + "grad_norm": 0.7644836902618408, + "learning_rate": 1.4392772588230281e-05, + "loss": 0.4814, + "step": 863 + }, + { + "epoch": 0.391248974046925, + "grad_norm": 0.8007204532623291, + "learning_rate": 1.439136759575243e-05, + "loss": 0.47, + "step": 864 + }, + { + "epoch": 0.3917018085076274, + "grad_norm": 0.7783535122871399, + "learning_rate": 1.4389961048466557e-05, + "loss": 0.3774, + "step": 865 + }, + { + "epoch": 0.3921546429683299, + "grad_norm": 0.7233867645263672, + "learning_rate": 1.4388552946690004e-05, + "loss": 0.4344, + "step": 866 + }, + { + "epoch": 0.39260747742903235, + "grad_norm": 0.6840167045593262, + "learning_rate": 1.4387143290740462e-05, + "loss": 0.4511, + "step": 867 + }, + { + "epoch": 0.3930603118897348, + "grad_norm": 0.8994132280349731, + "learning_rate": 1.4385732080935974e-05, + "loss": 0.4716, + "step": 868 + }, + { + "epoch": 0.39351314635043727, + "grad_norm": 0.8595420122146606, + "learning_rate": 1.4384319317594926e-05, + "loss": 0.4388, + "step": 869 + }, + { + "epoch": 0.3939659808111397, + "grad_norm": 0.675515353679657, + "learning_rate": 1.4382905001036073e-05, + "loss": 0.4134, + "step": 870 + }, + { + "epoch": 0.3944188152718422, + "grad_norm": 1.7675050497055054, + "learning_rate": 1.4381489131578499e-05, + "loss": 0.4454, + "step": 871 + }, + { + "epoch": 0.3948716497325446, + "grad_norm": 1.0099380016326904, + "learning_rate": 1.438007170954165e-05, + "loss": 0.4738, + "step": 872 + }, + { + "epoch": 0.3953244841932471, + "grad_norm": 0.7462349534034729, + "learning_rate": 1.437865273524532e-05, + "loss": 0.3707, + "step": 873 + }, + { + "epoch": 0.39577731865394955, + "grad_norm": 0.7054492235183716, + "learning_rate": 1.4377232209009657e-05, + "loss": 0.4068, + "step": 874 + }, + { + "epoch": 0.39623015311465204, + "grad_norm": 0.7035594582557678, + "learning_rate": 1.4375810131155152e-05, + "loss": 0.4456, + "step": 875 + }, + { + "epoch": 0.3966829875753545, + "grad_norm": 0.691320538520813, + "learning_rate": 1.4374386502002649e-05, + "loss": 0.4188, + "step": 876 + }, + { + "epoch": 0.39713582203605696, + "grad_norm": 0.7369733452796936, + "learning_rate": 1.4372961321873344e-05, + "loss": 0.4572, + "step": 877 + }, + { + "epoch": 0.3975886564967594, + "grad_norm": 0.687791109085083, + "learning_rate": 1.4371534591088785e-05, + "loss": 0.4136, + "step": 878 + }, + { + "epoch": 0.3980414909574619, + "grad_norm": 0.7917645573616028, + "learning_rate": 1.437010630997086e-05, + "loss": 0.4474, + "step": 879 + }, + { + "epoch": 0.3984943254181643, + "grad_norm": 0.7191473245620728, + "learning_rate": 1.4368676478841818e-05, + "loss": 0.4608, + "step": 880 + }, + { + "epoch": 0.3989471598788668, + "grad_norm": 0.7102738618850708, + "learning_rate": 1.4367245098024248e-05, + "loss": 0.4014, + "step": 881 + }, + { + "epoch": 0.39939999433956924, + "grad_norm": 0.8336265683174133, + "learning_rate": 1.4365812167841096e-05, + "loss": 0.4597, + "step": 882 + }, + { + "epoch": 0.3998528288002717, + "grad_norm": 0.6486582159996033, + "learning_rate": 1.436437768861566e-05, + "loss": 0.4273, + "step": 883 + }, + { + "epoch": 0.40030566326097416, + "grad_norm": 0.779009222984314, + "learning_rate": 1.4362941660671574e-05, + "loss": 0.4285, + "step": 884 + }, + { + "epoch": 0.4007584977216766, + "grad_norm": 0.8129340410232544, + "learning_rate": 1.4361504084332835e-05, + "loss": 0.4729, + "step": 885 + }, + { + "epoch": 0.4012113321823791, + "grad_norm": 0.765630841255188, + "learning_rate": 1.4360064959923784e-05, + "loss": 0.4146, + "step": 886 + }, + { + "epoch": 0.4016641666430815, + "grad_norm": 0.7774229049682617, + "learning_rate": 1.435862428776911e-05, + "loss": 0.442, + "step": 887 + }, + { + "epoch": 0.402117001103784, + "grad_norm": 0.7265660166740417, + "learning_rate": 1.4357182068193856e-05, + "loss": 0.4161, + "step": 888 + }, + { + "epoch": 0.40256983556448644, + "grad_norm": 0.6984989643096924, + "learning_rate": 1.4355738301523405e-05, + "loss": 0.4508, + "step": 889 + }, + { + "epoch": 0.40302267002518893, + "grad_norm": 0.8146981596946716, + "learning_rate": 1.4354292988083503e-05, + "loss": 0.3812, + "step": 890 + }, + { + "epoch": 0.40347550448589137, + "grad_norm": 0.7826554775238037, + "learning_rate": 1.435284612820023e-05, + "loss": 0.4889, + "step": 891 + }, + { + "epoch": 0.40392833894659386, + "grad_norm": 0.7819204926490784, + "learning_rate": 1.4351397722200023e-05, + "loss": 0.4392, + "step": 892 + }, + { + "epoch": 0.4043811734072963, + "grad_norm": 0.851521909236908, + "learning_rate": 1.4349947770409669e-05, + "loss": 0.457, + "step": 893 + }, + { + "epoch": 0.4048340078679988, + "grad_norm": 0.751393735408783, + "learning_rate": 1.4348496273156302e-05, + "loss": 0.4016, + "step": 894 + }, + { + "epoch": 0.4052868423287012, + "grad_norm": 0.7438313961029053, + "learning_rate": 1.4347043230767402e-05, + "loss": 0.4529, + "step": 895 + }, + { + "epoch": 0.40573967678940365, + "grad_norm": 0.8485840559005737, + "learning_rate": 1.43455886435708e-05, + "loss": 0.4588, + "step": 896 + }, + { + "epoch": 0.40619251125010614, + "grad_norm": 0.6871113777160645, + "learning_rate": 1.4344132511894673e-05, + "loss": 0.4313, + "step": 897 + }, + { + "epoch": 0.40664534571080857, + "grad_norm": 0.7333969473838806, + "learning_rate": 1.4342674836067553e-05, + "loss": 0.4039, + "step": 898 + }, + { + "epoch": 0.40709818017151106, + "grad_norm": 0.7326341867446899, + "learning_rate": 1.4341215616418316e-05, + "loss": 0.4024, + "step": 899 + }, + { + "epoch": 0.4075510146322135, + "grad_norm": 0.754438042640686, + "learning_rate": 1.4339754853276182e-05, + "loss": 0.4381, + "step": 900 + }, + { + "epoch": 0.408003849092916, + "grad_norm": 0.7537654638290405, + "learning_rate": 1.4338292546970729e-05, + "loss": 0.3864, + "step": 901 + }, + { + "epoch": 0.4084566835536184, + "grad_norm": 0.7983335852622986, + "learning_rate": 1.4336828697831873e-05, + "loss": 0.465, + "step": 902 + }, + { + "epoch": 0.4089095180143209, + "grad_norm": 0.6547751426696777, + "learning_rate": 1.4335363306189887e-05, + "loss": 0.4017, + "step": 903 + }, + { + "epoch": 0.40936235247502334, + "grad_norm": 0.8735558390617371, + "learning_rate": 1.4333896372375383e-05, + "loss": 0.4062, + "step": 904 + }, + { + "epoch": 0.40981518693572583, + "grad_norm": 0.6709791421890259, + "learning_rate": 1.4332427896719331e-05, + "loss": 0.4209, + "step": 905 + }, + { + "epoch": 0.41026802139642826, + "grad_norm": 1.0297253131866455, + "learning_rate": 1.433095787955304e-05, + "loss": 0.4074, + "step": 906 + }, + { + "epoch": 0.41072085585713075, + "grad_norm": 0.6717653274536133, + "learning_rate": 1.4329486321208176e-05, + "loss": 0.4291, + "step": 907 + }, + { + "epoch": 0.4111736903178332, + "grad_norm": 0.789406418800354, + "learning_rate": 1.4328013222016739e-05, + "loss": 0.4289, + "step": 908 + }, + { + "epoch": 0.4116265247785357, + "grad_norm": 0.7695425152778625, + "learning_rate": 1.432653858231109e-05, + "loss": 0.3966, + "step": 909 + }, + { + "epoch": 0.4120793592392381, + "grad_norm": 0.8856887817382812, + "learning_rate": 1.4325062402423932e-05, + "loss": 0.3951, + "step": 910 + }, + { + "epoch": 0.41253219369994054, + "grad_norm": 0.8140108585357666, + "learning_rate": 1.4323584682688317e-05, + "loss": 0.4092, + "step": 911 + }, + { + "epoch": 0.41298502816064303, + "grad_norm": 0.8315860033035278, + "learning_rate": 1.4322105423437639e-05, + "loss": 0.4223, + "step": 912 + }, + { + "epoch": 0.41343786262134546, + "grad_norm": 0.7078157067298889, + "learning_rate": 1.4320624625005648e-05, + "loss": 0.4425, + "step": 913 + }, + { + "epoch": 0.41389069708204795, + "grad_norm": 0.740716814994812, + "learning_rate": 1.4319142287726436e-05, + "loss": 0.417, + "step": 914 + }, + { + "epoch": 0.4143435315427504, + "grad_norm": 0.7005321979522705, + "learning_rate": 1.4317658411934442e-05, + "loss": 0.3627, + "step": 915 + }, + { + "epoch": 0.4147963660034529, + "grad_norm": 0.6697372794151306, + "learning_rate": 1.4316172997964452e-05, + "loss": 0.4012, + "step": 916 + }, + { + "epoch": 0.4152492004641553, + "grad_norm": 0.9033950567245483, + "learning_rate": 1.4314686046151605e-05, + "loss": 0.4569, + "step": 917 + }, + { + "epoch": 0.4157020349248578, + "grad_norm": 0.7096682786941528, + "learning_rate": 1.4313197556831378e-05, + "loss": 0.4487, + "step": 918 + }, + { + "epoch": 0.41615486938556023, + "grad_norm": 0.7417932152748108, + "learning_rate": 1.4311707530339601e-05, + "loss": 0.4104, + "step": 919 + }, + { + "epoch": 0.4166077038462627, + "grad_norm": 0.6767085790634155, + "learning_rate": 1.4310215967012449e-05, + "loss": 0.4512, + "step": 920 + }, + { + "epoch": 0.41706053830696516, + "grad_norm": 0.9227702617645264, + "learning_rate": 1.4308722867186442e-05, + "loss": 0.3907, + "step": 921 + }, + { + "epoch": 0.41751337276766765, + "grad_norm": 0.7554175853729248, + "learning_rate": 1.430722823119845e-05, + "loss": 0.4507, + "step": 922 + }, + { + "epoch": 0.4179662072283701, + "grad_norm": 1.1104964017868042, + "learning_rate": 1.4305732059385687e-05, + "loss": 0.3809, + "step": 923 + }, + { + "epoch": 0.4184190416890725, + "grad_norm": 0.6951049566268921, + "learning_rate": 1.4304234352085715e-05, + "loss": 0.4229, + "step": 924 + }, + { + "epoch": 0.418871876149775, + "grad_norm": 0.9023833870887756, + "learning_rate": 1.4302735109636442e-05, + "loss": 0.4166, + "step": 925 + }, + { + "epoch": 0.41932471061047744, + "grad_norm": 0.6952568888664246, + "learning_rate": 1.430123433237612e-05, + "loss": 0.3956, + "step": 926 + }, + { + "epoch": 0.4197775450711799, + "grad_norm": 0.8569507002830505, + "learning_rate": 1.4299732020643354e-05, + "loss": 0.4372, + "step": 927 + }, + { + "epoch": 0.42023037953188236, + "grad_norm": 0.8564557433128357, + "learning_rate": 1.4298228174777086e-05, + "loss": 0.4311, + "step": 928 + }, + { + "epoch": 0.42068321399258485, + "grad_norm": 0.9847443699836731, + "learning_rate": 1.4296722795116614e-05, + "loss": 0.4198, + "step": 929 + }, + { + "epoch": 0.4211360484532873, + "grad_norm": 0.9145928621292114, + "learning_rate": 1.4295215882001572e-05, + "loss": 0.4092, + "step": 930 + }, + { + "epoch": 0.42158888291398977, + "grad_norm": 1.0060977935791016, + "learning_rate": 1.4293707435771947e-05, + "loss": 0.38, + "step": 931 + }, + { + "epoch": 0.4220417173746922, + "grad_norm": 0.8660867810249329, + "learning_rate": 1.4292197456768069e-05, + "loss": 0.4302, + "step": 932 + }, + { + "epoch": 0.4224945518353947, + "grad_norm": 0.7605267763137817, + "learning_rate": 1.4290685945330615e-05, + "loss": 0.4346, + "step": 933 + }, + { + "epoch": 0.4229473862960971, + "grad_norm": 0.9517015814781189, + "learning_rate": 1.4289172901800606e-05, + "loss": 0.4111, + "step": 934 + }, + { + "epoch": 0.4234002207567996, + "grad_norm": 0.7545219659805298, + "learning_rate": 1.4287658326519414e-05, + "loss": 0.4231, + "step": 935 + }, + { + "epoch": 0.42385305521750205, + "grad_norm": 0.7649204134941101, + "learning_rate": 1.4286142219828748e-05, + "loss": 0.4532, + "step": 936 + }, + { + "epoch": 0.4243058896782045, + "grad_norm": 0.8143602013587952, + "learning_rate": 1.428462458207067e-05, + "loss": 0.4212, + "step": 937 + }, + { + "epoch": 0.424758724138907, + "grad_norm": 0.9045494794845581, + "learning_rate": 1.4283105413587581e-05, + "loss": 0.4313, + "step": 938 + }, + { + "epoch": 0.4252115585996094, + "grad_norm": 0.9623934030532837, + "learning_rate": 1.4281584714722237e-05, + "loss": 0.4485, + "step": 939 + }, + { + "epoch": 0.4256643930603119, + "grad_norm": 0.7864290475845337, + "learning_rate": 1.4280062485817727e-05, + "loss": 0.4093, + "step": 940 + }, + { + "epoch": 0.42611722752101433, + "grad_norm": 0.6734517812728882, + "learning_rate": 1.4278538727217495e-05, + "loss": 0.4376, + "step": 941 + }, + { + "epoch": 0.4265700619817168, + "grad_norm": 0.7887140512466431, + "learning_rate": 1.4277013439265325e-05, + "loss": 0.3778, + "step": 942 + }, + { + "epoch": 0.42702289644241925, + "grad_norm": 0.8578718900680542, + "learning_rate": 1.4275486622305348e-05, + "loss": 0.4567, + "step": 943 + }, + { + "epoch": 0.42747573090312174, + "grad_norm": 0.775020956993103, + "learning_rate": 1.427395827668204e-05, + "loss": 0.4177, + "step": 944 + }, + { + "epoch": 0.4279285653638242, + "grad_norm": 0.7445123791694641, + "learning_rate": 1.427242840274022e-05, + "loss": 0.4109, + "step": 945 + }, + { + "epoch": 0.42838139982452667, + "grad_norm": 0.660199761390686, + "learning_rate": 1.4270897000825052e-05, + "loss": 0.3661, + "step": 946 + }, + { + "epoch": 0.4288342342852291, + "grad_norm": 0.960015594959259, + "learning_rate": 1.426936407128205e-05, + "loss": 0.4006, + "step": 947 + }, + { + "epoch": 0.4292870687459316, + "grad_norm": 0.7209600806236267, + "learning_rate": 1.4267829614457067e-05, + "loss": 0.4041, + "step": 948 + }, + { + "epoch": 0.429739903206634, + "grad_norm": 0.7578160762786865, + "learning_rate": 1.42662936306963e-05, + "loss": 0.3883, + "step": 949 + }, + { + "epoch": 0.4301927376673365, + "grad_norm": 0.7112842798233032, + "learning_rate": 1.4264756120346294e-05, + "loss": 0.4147, + "step": 950 + }, + { + "epoch": 0.43064557212803894, + "grad_norm": 0.8537875413894653, + "learning_rate": 1.4263217083753938e-05, + "loss": 0.4498, + "step": 951 + }, + { + "epoch": 0.4310984065887414, + "grad_norm": 0.6418316960334778, + "learning_rate": 1.4261676521266466e-05, + "loss": 0.3858, + "step": 952 + }, + { + "epoch": 0.43155124104944387, + "grad_norm": 0.8194690346717834, + "learning_rate": 1.4260134433231449e-05, + "loss": 0.3749, + "step": 953 + }, + { + "epoch": 0.4320040755101463, + "grad_norm": 1.0442348718643188, + "learning_rate": 1.4258590819996813e-05, + "loss": 0.3945, + "step": 954 + }, + { + "epoch": 0.4324569099708488, + "grad_norm": 0.778914213180542, + "learning_rate": 1.4257045681910822e-05, + "loss": 0.4199, + "step": 955 + }, + { + "epoch": 0.4329097444315512, + "grad_norm": 0.7745048999786377, + "learning_rate": 1.4255499019322084e-05, + "loss": 0.4514, + "step": 956 + }, + { + "epoch": 0.4333625788922537, + "grad_norm": 0.7798154354095459, + "learning_rate": 1.4253950832579552e-05, + "loss": 0.4377, + "step": 957 + }, + { + "epoch": 0.43381541335295615, + "grad_norm": 0.7294392585754395, + "learning_rate": 1.4252401122032525e-05, + "loss": 0.3638, + "step": 958 + }, + { + "epoch": 0.43426824781365864, + "grad_norm": 0.795789361000061, + "learning_rate": 1.4250849888030641e-05, + "loss": 0.4311, + "step": 959 + }, + { + "epoch": 0.43472108227436107, + "grad_norm": 1.5630682706832886, + "learning_rate": 1.4249297130923885e-05, + "loss": 0.396, + "step": 960 + }, + { + "epoch": 0.43517391673506356, + "grad_norm": 0.7337138056755066, + "learning_rate": 1.4247742851062587e-05, + "loss": 0.4561, + "step": 961 + }, + { + "epoch": 0.435626751195766, + "grad_norm": 0.7088420987129211, + "learning_rate": 1.4246187048797414e-05, + "loss": 0.4247, + "step": 962 + }, + { + "epoch": 0.4360795856564685, + "grad_norm": 0.9262889623641968, + "learning_rate": 1.4244629724479387e-05, + "loss": 0.4006, + "step": 963 + }, + { + "epoch": 0.4365324201171709, + "grad_norm": 0.8591921329498291, + "learning_rate": 1.4243070878459862e-05, + "loss": 0.4003, + "step": 964 + }, + { + "epoch": 0.43698525457787335, + "grad_norm": 1.0036776065826416, + "learning_rate": 1.4241510511090539e-05, + "loss": 0.4098, + "step": 965 + }, + { + "epoch": 0.43743808903857584, + "grad_norm": 0.6864471435546875, + "learning_rate": 1.4239948622723465e-05, + "loss": 0.4326, + "step": 966 + }, + { + "epoch": 0.4378909234992783, + "grad_norm": 1.0244603157043457, + "learning_rate": 1.4238385213711027e-05, + "loss": 0.4419, + "step": 967 + }, + { + "epoch": 0.43834375795998076, + "grad_norm": 0.7093055844306946, + "learning_rate": 1.4236820284405956e-05, + "loss": 0.3951, + "step": 968 + }, + { + "epoch": 0.4387965924206832, + "grad_norm": 0.8618746995925903, + "learning_rate": 1.4235253835161329e-05, + "loss": 0.4756, + "step": 969 + }, + { + "epoch": 0.4392494268813857, + "grad_norm": 0.7317725419998169, + "learning_rate": 1.4233685866330561e-05, + "loss": 0.3789, + "step": 970 + }, + { + "epoch": 0.4397022613420881, + "grad_norm": 0.8535686731338501, + "learning_rate": 1.4232116378267412e-05, + "loss": 0.4096, + "step": 971 + }, + { + "epoch": 0.4401550958027906, + "grad_norm": 0.7266016006469727, + "learning_rate": 1.4230545371325985e-05, + "loss": 0.3706, + "step": 972 + }, + { + "epoch": 0.44060793026349304, + "grad_norm": 0.7952896356582642, + "learning_rate": 1.4228972845860725e-05, + "loss": 0.3392, + "step": 973 + }, + { + "epoch": 0.44106076472419553, + "grad_norm": 0.8086087107658386, + "learning_rate": 1.422739880222642e-05, + "loss": 0.3972, + "step": 974 + }, + { + "epoch": 0.44151359918489796, + "grad_norm": 0.8605599403381348, + "learning_rate": 1.4225823240778207e-05, + "loss": 0.422, + "step": 975 + }, + { + "epoch": 0.44196643364560045, + "grad_norm": 0.752622663974762, + "learning_rate": 1.4224246161871548e-05, + "loss": 0.3965, + "step": 976 + }, + { + "epoch": 0.4424192681063029, + "grad_norm": 0.8178388476371765, + "learning_rate": 1.4222667565862264e-05, + "loss": 0.4438, + "step": 977 + }, + { + "epoch": 0.4428721025670054, + "grad_norm": 0.7991887927055359, + "learning_rate": 1.4221087453106515e-05, + "loss": 0.3748, + "step": 978 + }, + { + "epoch": 0.4433249370277078, + "grad_norm": 0.7863441705703735, + "learning_rate": 1.4219505823960798e-05, + "loss": 0.3352, + "step": 979 + }, + { + "epoch": 0.44377777148841024, + "grad_norm": 0.7784522175788879, + "learning_rate": 1.4217922678781955e-05, + "loss": 0.4319, + "step": 980 + }, + { + "epoch": 0.44423060594911273, + "grad_norm": 0.8807750344276428, + "learning_rate": 1.4216338017927171e-05, + "loss": 0.4165, + "step": 981 + }, + { + "epoch": 0.44468344040981517, + "grad_norm": 0.6990790367126465, + "learning_rate": 1.4214751841753971e-05, + "loss": 0.4141, + "step": 982 + }, + { + "epoch": 0.44513627487051766, + "grad_norm": 0.9423395991325378, + "learning_rate": 1.4213164150620227e-05, + "loss": 0.3669, + "step": 983 + }, + { + "epoch": 0.4455891093312201, + "grad_norm": 0.7699115872383118, + "learning_rate": 1.4211574944884143e-05, + "loss": 0.4115, + "step": 984 + }, + { + "epoch": 0.4460419437919226, + "grad_norm": 0.8870624899864197, + "learning_rate": 1.4209984224904272e-05, + "loss": 0.3953, + "step": 985 + }, + { + "epoch": 0.446494778252625, + "grad_norm": 0.8935746550559998, + "learning_rate": 1.4208391991039509e-05, + "loss": 0.4049, + "step": 986 + }, + { + "epoch": 0.4469476127133275, + "grad_norm": 0.647849440574646, + "learning_rate": 1.4206798243649085e-05, + "loss": 0.3873, + "step": 987 + }, + { + "epoch": 0.44740044717402994, + "grad_norm": 0.7980324625968933, + "learning_rate": 1.4205202983092583e-05, + "loss": 0.4253, + "step": 988 + }, + { + "epoch": 0.4478532816347324, + "grad_norm": 0.6993192434310913, + "learning_rate": 1.4203606209729912e-05, + "loss": 0.409, + "step": 989 + }, + { + "epoch": 0.44830611609543486, + "grad_norm": 0.7574604153633118, + "learning_rate": 1.4202007923921338e-05, + "loss": 0.345, + "step": 990 + }, + { + "epoch": 0.44875895055613735, + "grad_norm": 0.8472474217414856, + "learning_rate": 1.4200408126027458e-05, + "loss": 0.3847, + "step": 991 + }, + { + "epoch": 0.4492117850168398, + "grad_norm": 0.7414373159408569, + "learning_rate": 1.4198806816409211e-05, + "loss": 0.3923, + "step": 992 + }, + { + "epoch": 0.4496646194775422, + "grad_norm": 0.6760424971580505, + "learning_rate": 1.4197203995427883e-05, + "loss": 0.3471, + "step": 993 + }, + { + "epoch": 0.4501174539382447, + "grad_norm": 0.6407291889190674, + "learning_rate": 1.4195599663445096e-05, + "loss": 0.4312, + "step": 994 + }, + { + "epoch": 0.45057028839894714, + "grad_norm": 0.6934922337532043, + "learning_rate": 1.4193993820822815e-05, + "loss": 0.4067, + "step": 995 + }, + { + "epoch": 0.45102312285964963, + "grad_norm": 0.7909060716629028, + "learning_rate": 1.4192386467923343e-05, + "loss": 0.4303, + "step": 996 + }, + { + "epoch": 0.45147595732035206, + "grad_norm": 0.7516681551933289, + "learning_rate": 1.4190777605109326e-05, + "loss": 0.4274, + "step": 997 + }, + { + "epoch": 0.45192879178105455, + "grad_norm": 0.7599420547485352, + "learning_rate": 1.4189167232743754e-05, + "loss": 0.3949, + "step": 998 + }, + { + "epoch": 0.452381626241757, + "grad_norm": 0.7314419150352478, + "learning_rate": 1.4187555351189948e-05, + "loss": 0.4176, + "step": 999 + }, + { + "epoch": 0.4528344607024595, + "grad_norm": 0.6400272846221924, + "learning_rate": 1.418594196081158e-05, + "loss": 0.3949, + "step": 1000 + }, + { + "epoch": 0.4532872951631619, + "grad_norm": 0.8566839098930359, + "learning_rate": 1.4184327061972657e-05, + "loss": 0.4034, + "step": 1001 + }, + { + "epoch": 0.4537401296238644, + "grad_norm": 0.7478398084640503, + "learning_rate": 1.4182710655037527e-05, + "loss": 0.4593, + "step": 1002 + }, + { + "epoch": 0.45419296408456683, + "grad_norm": 0.8677337169647217, + "learning_rate": 1.4181092740370878e-05, + "loss": 0.432, + "step": 1003 + }, + { + "epoch": 0.4546457985452693, + "grad_norm": 0.7963004112243652, + "learning_rate": 1.4179473318337738e-05, + "loss": 0.3899, + "step": 1004 + }, + { + "epoch": 0.45509863300597175, + "grad_norm": 0.7104138135910034, + "learning_rate": 1.4177852389303479e-05, + "loss": 0.3823, + "step": 1005 + }, + { + "epoch": 0.4555514674666742, + "grad_norm": 0.6873833537101746, + "learning_rate": 1.4176229953633806e-05, + "loss": 0.3799, + "step": 1006 + }, + { + "epoch": 0.4560043019273767, + "grad_norm": 0.8924839496612549, + "learning_rate": 1.4174606011694767e-05, + "loss": 0.4167, + "step": 1007 + }, + { + "epoch": 0.4564571363880791, + "grad_norm": 0.8046056032180786, + "learning_rate": 1.4172980563852757e-05, + "loss": 0.3823, + "step": 1008 + }, + { + "epoch": 0.4569099708487816, + "grad_norm": 0.7857614159584045, + "learning_rate": 1.4171353610474496e-05, + "loss": 0.4105, + "step": 1009 + }, + { + "epoch": 0.45736280530948403, + "grad_norm": 0.715185284614563, + "learning_rate": 1.416972515192706e-05, + "loss": 0.3557, + "step": 1010 + }, + { + "epoch": 0.4578156397701865, + "grad_norm": 0.7968368530273438, + "learning_rate": 1.416809518857785e-05, + "loss": 0.3438, + "step": 1011 + }, + { + "epoch": 0.45826847423088896, + "grad_norm": 0.9046311974525452, + "learning_rate": 1.4166463720794615e-05, + "loss": 0.4533, + "step": 1012 + }, + { + "epoch": 0.45872130869159145, + "grad_norm": 0.6228207349777222, + "learning_rate": 1.4164830748945445e-05, + "loss": 0.3698, + "step": 1013 + }, + { + "epoch": 0.4591741431522939, + "grad_norm": 0.8371472954750061, + "learning_rate": 1.416319627339876e-05, + "loss": 0.4385, + "step": 1014 + }, + { + "epoch": 0.45962697761299637, + "grad_norm": 0.6872038841247559, + "learning_rate": 1.416156029452333e-05, + "loss": 0.3892, + "step": 1015 + }, + { + "epoch": 0.4600798120736988, + "grad_norm": 0.7857869863510132, + "learning_rate": 1.4159922812688255e-05, + "loss": 0.4179, + "step": 1016 + }, + { + "epoch": 0.4605326465344013, + "grad_norm": 0.8071677684783936, + "learning_rate": 1.4158283828262982e-05, + "loss": 0.4028, + "step": 1017 + }, + { + "epoch": 0.4609854809951037, + "grad_norm": 0.7820652723312378, + "learning_rate": 1.415664334161729e-05, + "loss": 0.379, + "step": 1018 + }, + { + "epoch": 0.4614383154558062, + "grad_norm": 0.7160912752151489, + "learning_rate": 1.4155001353121304e-05, + "loss": 0.393, + "step": 1019 + }, + { + "epoch": 0.46189114991650865, + "grad_norm": 0.7849195003509521, + "learning_rate": 1.415335786314548e-05, + "loss": 0.4273, + "step": 1020 + }, + { + "epoch": 0.4623439843772111, + "grad_norm": 0.7774779200553894, + "learning_rate": 1.4151712872060623e-05, + "loss": 0.356, + "step": 1021 + }, + { + "epoch": 0.46279681883791357, + "grad_norm": 0.7844209671020508, + "learning_rate": 1.4150066380237864e-05, + "loss": 0.4261, + "step": 1022 + }, + { + "epoch": 0.463249653298616, + "grad_norm": 0.840189516544342, + "learning_rate": 1.4148418388048683e-05, + "loss": 0.3801, + "step": 1023 + }, + { + "epoch": 0.4637024877593185, + "grad_norm": 0.8453295230865479, + "learning_rate": 1.4146768895864892e-05, + "loss": 0.4049, + "step": 1024 + }, + { + "epoch": 0.4641553222200209, + "grad_norm": 0.7747737169265747, + "learning_rate": 1.4145117904058645e-05, + "loss": 0.4002, + "step": 1025 + }, + { + "epoch": 0.4646081566807234, + "grad_norm": 0.7298173904418945, + "learning_rate": 1.4143465413002435e-05, + "loss": 0.3816, + "step": 1026 + }, + { + "epoch": 0.46506099114142585, + "grad_norm": 0.8087685704231262, + "learning_rate": 1.4141811423069092e-05, + "loss": 0.3819, + "step": 1027 + }, + { + "epoch": 0.46551382560212834, + "grad_norm": 0.6976639032363892, + "learning_rate": 1.4140155934631783e-05, + "loss": 0.3915, + "step": 1028 + }, + { + "epoch": 0.4659666600628308, + "grad_norm": 0.6582549214363098, + "learning_rate": 1.413849894806401e-05, + "loss": 0.3571, + "step": 1029 + }, + { + "epoch": 0.46641949452353326, + "grad_norm": 0.7097831964492798, + "learning_rate": 1.4136840463739623e-05, + "loss": 0.4004, + "step": 1030 + }, + { + "epoch": 0.4668723289842357, + "grad_norm": 0.8044737577438354, + "learning_rate": 1.4135180482032801e-05, + "loss": 0.4087, + "step": 1031 + }, + { + "epoch": 0.4673251634449382, + "grad_norm": 0.7350354194641113, + "learning_rate": 1.4133519003318063e-05, + "loss": 0.4101, + "step": 1032 + }, + { + "epoch": 0.4677779979056406, + "grad_norm": 0.7550399899482727, + "learning_rate": 1.4131856027970269e-05, + "loss": 0.3657, + "step": 1033 + }, + { + "epoch": 0.46823083236634305, + "grad_norm": 0.65655118227005, + "learning_rate": 1.413019155636461e-05, + "loss": 0.373, + "step": 1034 + }, + { + "epoch": 0.46868366682704554, + "grad_norm": 0.7206447124481201, + "learning_rate": 1.4128525588876624e-05, + "loss": 0.353, + "step": 1035 + }, + { + "epoch": 0.469136501287748, + "grad_norm": 0.7007904052734375, + "learning_rate": 1.4126858125882176e-05, + "loss": 0.3668, + "step": 1036 + }, + { + "epoch": 0.46958933574845046, + "grad_norm": 0.9158049821853638, + "learning_rate": 1.4125189167757477e-05, + "loss": 0.3746, + "step": 1037 + }, + { + "epoch": 0.4700421702091529, + "grad_norm": 0.8141727447509766, + "learning_rate": 1.412351871487907e-05, + "loss": 0.3728, + "step": 1038 + }, + { + "epoch": 0.4704950046698554, + "grad_norm": 0.696324348449707, + "learning_rate": 1.4121846767623839e-05, + "loss": 0.3415, + "step": 1039 + }, + { + "epoch": 0.4709478391305578, + "grad_norm": 0.8677844405174255, + "learning_rate": 1.4120173326369003e-05, + "loss": 0.3584, + "step": 1040 + }, + { + "epoch": 0.4714006735912603, + "grad_norm": 0.8021166324615479, + "learning_rate": 1.4118498391492118e-05, + "loss": 0.3832, + "step": 1041 + }, + { + "epoch": 0.47185350805196274, + "grad_norm": 0.9213194847106934, + "learning_rate": 1.4116821963371075e-05, + "loss": 0.4097, + "step": 1042 + }, + { + "epoch": 0.47230634251266523, + "grad_norm": 0.8504592776298523, + "learning_rate": 1.4115144042384107e-05, + "loss": 0.4031, + "step": 1043 + }, + { + "epoch": 0.47275917697336767, + "grad_norm": 0.746096134185791, + "learning_rate": 1.411346462890978e-05, + "loss": 0.4064, + "step": 1044 + }, + { + "epoch": 0.47321201143407016, + "grad_norm": 0.9829161167144775, + "learning_rate": 1.4111783723327e-05, + "loss": 0.409, + "step": 1045 + }, + { + "epoch": 0.4736648458947726, + "grad_norm": 0.7933005690574646, + "learning_rate": 1.4110101326015007e-05, + "loss": 0.4209, + "step": 1046 + }, + { + "epoch": 0.474117680355475, + "grad_norm": 0.8225368857383728, + "learning_rate": 1.4108417437353374e-05, + "loss": 0.3688, + "step": 1047 + }, + { + "epoch": 0.4745705148161775, + "grad_norm": 0.6937693357467651, + "learning_rate": 1.4106732057722018e-05, + "loss": 0.4077, + "step": 1048 + }, + { + "epoch": 0.47502334927687995, + "grad_norm": 0.7931678891181946, + "learning_rate": 1.410504518750119e-05, + "loss": 0.3273, + "step": 1049 + }, + { + "epoch": 0.47547618373758244, + "grad_norm": 0.7295666933059692, + "learning_rate": 1.4103356827071473e-05, + "loss": 0.3798, + "step": 1050 + }, + { + "epoch": 0.47592901819828487, + "grad_norm": 0.7447585463523865, + "learning_rate": 1.410166697681379e-05, + "loss": 0.3923, + "step": 1051 + }, + { + "epoch": 0.47638185265898736, + "grad_norm": 0.732587993144989, + "learning_rate": 1.4099975637109401e-05, + "loss": 0.3618, + "step": 1052 + }, + { + "epoch": 0.4768346871196898, + "grad_norm": 0.6432980895042419, + "learning_rate": 1.4098282808339899e-05, + "loss": 0.3782, + "step": 1053 + }, + { + "epoch": 0.4772875215803923, + "grad_norm": 0.9398018717765808, + "learning_rate": 1.4096588490887218e-05, + "loss": 0.4002, + "step": 1054 + }, + { + "epoch": 0.4777403560410947, + "grad_norm": 0.845302164554596, + "learning_rate": 1.409489268513362e-05, + "loss": 0.3981, + "step": 1055 + }, + { + "epoch": 0.4781931905017972, + "grad_norm": 0.7976647615432739, + "learning_rate": 1.409319539146171e-05, + "loss": 0.3828, + "step": 1056 + }, + { + "epoch": 0.47864602496249964, + "grad_norm": 0.7224624156951904, + "learning_rate": 1.4091496610254426e-05, + "loss": 0.4466, + "step": 1057 + }, + { + "epoch": 0.47909885942320213, + "grad_norm": 0.6982882618904114, + "learning_rate": 1.4089796341895036e-05, + "loss": 0.3732, + "step": 1058 + }, + { + "epoch": 0.47955169388390456, + "grad_norm": 0.6954671144485474, + "learning_rate": 1.408809458676716e-05, + "loss": 0.3646, + "step": 1059 + }, + { + "epoch": 0.48000452834460705, + "grad_norm": 0.6358652710914612, + "learning_rate": 1.4086391345254732e-05, + "loss": 0.382, + "step": 1060 + }, + { + "epoch": 0.4804573628053095, + "grad_norm": 0.7169029116630554, + "learning_rate": 1.4084686617742037e-05, + "loss": 0.384, + "step": 1061 + }, + { + "epoch": 0.4809101972660119, + "grad_norm": 0.7383198738098145, + "learning_rate": 1.4082980404613687e-05, + "loss": 0.3487, + "step": 1062 + }, + { + "epoch": 0.4813630317267144, + "grad_norm": 0.6351798176765442, + "learning_rate": 1.4081272706254637e-05, + "loss": 0.3988, + "step": 1063 + }, + { + "epoch": 0.48181586618741684, + "grad_norm": 0.7320924401283264, + "learning_rate": 1.4079563523050169e-05, + "loss": 0.3706, + "step": 1064 + }, + { + "epoch": 0.48226870064811933, + "grad_norm": 0.7490411996841431, + "learning_rate": 1.4077852855385902e-05, + "loss": 0.3777, + "step": 1065 + }, + { + "epoch": 0.48272153510882176, + "grad_norm": 0.8109067678451538, + "learning_rate": 1.4076140703647794e-05, + "loss": 0.391, + "step": 1066 + }, + { + "epoch": 0.48317436956952425, + "grad_norm": 0.7815183401107788, + "learning_rate": 1.4074427068222135e-05, + "loss": 0.3854, + "step": 1067 + }, + { + "epoch": 0.4836272040302267, + "grad_norm": 0.8142592906951904, + "learning_rate": 1.407271194949555e-05, + "loss": 0.3907, + "step": 1068 + }, + { + "epoch": 0.4840800384909292, + "grad_norm": 0.7322883009910583, + "learning_rate": 1.4070995347854997e-05, + "loss": 0.3879, + "step": 1069 + }, + { + "epoch": 0.4845328729516316, + "grad_norm": 0.7850584983825684, + "learning_rate": 1.4069277263687772e-05, + "loss": 0.3516, + "step": 1070 + }, + { + "epoch": 0.4849857074123341, + "grad_norm": 0.6609899401664734, + "learning_rate": 1.40675576973815e-05, + "loss": 0.3677, + "step": 1071 + }, + { + "epoch": 0.48543854187303653, + "grad_norm": 0.6708543300628662, + "learning_rate": 1.4065836649324148e-05, + "loss": 0.3708, + "step": 1072 + }, + { + "epoch": 0.485891376333739, + "grad_norm": 1.1243339776992798, + "learning_rate": 1.4064114119904012e-05, + "loss": 0.4169, + "step": 1073 + }, + { + "epoch": 0.48634421079444146, + "grad_norm": 0.7623947858810425, + "learning_rate": 1.4062390109509722e-05, + "loss": 0.3518, + "step": 1074 + }, + { + "epoch": 0.4867970452551439, + "grad_norm": 0.7470114827156067, + "learning_rate": 1.4060664618530247e-05, + "loss": 0.3892, + "step": 1075 + }, + { + "epoch": 0.4872498797158464, + "grad_norm": 0.8398137092590332, + "learning_rate": 1.4058937647354882e-05, + "loss": 0.4376, + "step": 1076 + }, + { + "epoch": 0.4877027141765488, + "grad_norm": 0.7564401626586914, + "learning_rate": 1.4057209196373265e-05, + "loss": 0.3588, + "step": 1077 + }, + { + "epoch": 0.4881555486372513, + "grad_norm": 0.7910377383232117, + "learning_rate": 1.4055479265975361e-05, + "loss": 0.3916, + "step": 1078 + }, + { + "epoch": 0.48860838309795374, + "grad_norm": 0.720329999923706, + "learning_rate": 1.405374785655147e-05, + "loss": 0.3732, + "step": 1079 + }, + { + "epoch": 0.4890612175586562, + "grad_norm": 0.7369146347045898, + "learning_rate": 1.4052014968492232e-05, + "loss": 0.397, + "step": 1080 + }, + { + "epoch": 0.48951405201935866, + "grad_norm": 0.6554039120674133, + "learning_rate": 1.4050280602188613e-05, + "loss": 0.3219, + "step": 1081 + }, + { + "epoch": 0.48996688648006115, + "grad_norm": 0.7499859929084778, + "learning_rate": 1.4048544758031912e-05, + "loss": 0.3816, + "step": 1082 + }, + { + "epoch": 0.4904197209407636, + "grad_norm": 0.855495810508728, + "learning_rate": 1.4046807436413768e-05, + "loss": 0.3978, + "step": 1083 + }, + { + "epoch": 0.49087255540146607, + "grad_norm": 0.6773454546928406, + "learning_rate": 1.404506863772615e-05, + "loss": 0.389, + "step": 1084 + }, + { + "epoch": 0.4913253898621685, + "grad_norm": 0.7844047546386719, + "learning_rate": 1.4043328362361359e-05, + "loss": 0.3594, + "step": 1085 + }, + { + "epoch": 0.491778224322871, + "grad_norm": 0.7174739241600037, + "learning_rate": 1.404158661071203e-05, + "loss": 0.3667, + "step": 1086 + }, + { + "epoch": 0.4922310587835734, + "grad_norm": 0.6895620226860046, + "learning_rate": 1.4039843383171132e-05, + "loss": 0.4006, + "step": 1087 + }, + { + "epoch": 0.4926838932442759, + "grad_norm": 0.6506296992301941, + "learning_rate": 1.4038098680131966e-05, + "loss": 0.3277, + "step": 1088 + }, + { + "epoch": 0.49313672770497835, + "grad_norm": 0.7601318359375, + "learning_rate": 1.4036352501988168e-05, + "loss": 0.3736, + "step": 1089 + }, + { + "epoch": 0.4935895621656808, + "grad_norm": 0.6839517951011658, + "learning_rate": 1.4034604849133703e-05, + "loss": 0.3365, + "step": 1090 + }, + { + "epoch": 0.4940423966263833, + "grad_norm": 0.6496476531028748, + "learning_rate": 1.4032855721962869e-05, + "loss": 0.3697, + "step": 1091 + }, + { + "epoch": 0.4944952310870857, + "grad_norm": 0.7376840710639954, + "learning_rate": 1.4031105120870303e-05, + "loss": 0.4017, + "step": 1092 + }, + { + "epoch": 0.4949480655477882, + "grad_norm": 0.7958086133003235, + "learning_rate": 1.4029353046250965e-05, + "loss": 0.3657, + "step": 1093 + }, + { + "epoch": 0.49540090000849063, + "grad_norm": 0.6121701002120972, + "learning_rate": 1.4027599498500157e-05, + "loss": 0.3189, + "step": 1094 + }, + { + "epoch": 0.4958537344691931, + "grad_norm": 0.7224218249320984, + "learning_rate": 1.4025844478013505e-05, + "loss": 0.3443, + "step": 1095 + }, + { + "epoch": 0.49630656892989555, + "grad_norm": 0.8465177416801453, + "learning_rate": 1.4024087985186975e-05, + "loss": 0.3789, + "step": 1096 + }, + { + "epoch": 0.49675940339059804, + "grad_norm": 0.6270837783813477, + "learning_rate": 1.4022330020416858e-05, + "loss": 0.3388, + "step": 1097 + }, + { + "epoch": 0.4972122378513005, + "grad_norm": 0.6935149431228638, + "learning_rate": 1.4020570584099782e-05, + "loss": 0.336, + "step": 1098 + }, + { + "epoch": 0.49766507231200297, + "grad_norm": 0.7307567596435547, + "learning_rate": 1.4018809676632703e-05, + "loss": 0.4503, + "step": 1099 + }, + { + "epoch": 0.4981179067727054, + "grad_norm": 0.7322489023208618, + "learning_rate": 1.4017047298412914e-05, + "loss": 0.3521, + "step": 1100 + }, + { + "epoch": 0.4985707412334079, + "grad_norm": 0.7978783249855042, + "learning_rate": 1.4015283449838037e-05, + "loss": 0.3992, + "step": 1101 + }, + { + "epoch": 0.4990235756941103, + "grad_norm": 0.6273020505905151, + "learning_rate": 1.4013518131306024e-05, + "loss": 0.3647, + "step": 1102 + }, + { + "epoch": 0.49947641015481276, + "grad_norm": 0.6884061098098755, + "learning_rate": 1.401175134321516e-05, + "loss": 0.3903, + "step": 1103 + }, + { + "epoch": 0.49992924461551524, + "grad_norm": 0.7667953968048096, + "learning_rate": 1.4009983085964064e-05, + "loss": 0.3562, + "step": 1104 + }, + { + "epoch": 0.49992924461551524, + "eval_loss": 0.3898637592792511, + "eval_runtime": 19.4358, + "eval_samples_per_second": 5.865, + "eval_steps_per_second": 0.772, + "step": 1104 + } + ], + "logging_steps": 1, + "max_steps": 6624, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 552, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 9.82217497538577e+19, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +}