| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9952, |
| "eval_steps": 500, |
| "global_step": 312, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0096, |
| "grad_norm": 5.443678855895996, |
| "learning_rate": 3.125e-07, |
| "loss": 0.9061, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0192, |
| "grad_norm": 5.82219934463501, |
| "learning_rate": 6.25e-07, |
| "loss": 0.9105, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0288, |
| "grad_norm": 5.593341827392578, |
| "learning_rate": 9.375000000000001e-07, |
| "loss": 0.9029, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.0384, |
| "grad_norm": 5.499074459075928, |
| "learning_rate": 1.25e-06, |
| "loss": 0.9133, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.048, |
| "grad_norm": 5.513024806976318, |
| "learning_rate": 1.5625e-06, |
| "loss": 0.8816, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.0576, |
| "grad_norm": 4.841058254241943, |
| "learning_rate": 1.8750000000000003e-06, |
| "loss": 0.8616, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.0672, |
| "grad_norm": 4.233567714691162, |
| "learning_rate": 2.1875000000000002e-06, |
| "loss": 0.8717, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.0768, |
| "grad_norm": 3.8331143856048584, |
| "learning_rate": 2.5e-06, |
| "loss": 0.8967, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.0864, |
| "grad_norm": 2.1296498775482178, |
| "learning_rate": 2.8125e-06, |
| "loss": 0.8247, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.096, |
| "grad_norm": 2.0691421031951904, |
| "learning_rate": 3.125e-06, |
| "loss": 0.8194, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.1056, |
| "grad_norm": 1.935678482055664, |
| "learning_rate": 3.4375e-06, |
| "loss": 0.8173, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.1152, |
| "grad_norm": 3.1325912475585938, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.8072, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.1248, |
| "grad_norm": 3.440077543258667, |
| "learning_rate": 4.0625000000000005e-06, |
| "loss": 0.7925, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.1344, |
| "grad_norm": 3.7032241821289062, |
| "learning_rate": 4.3750000000000005e-06, |
| "loss": 0.7964, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.144, |
| "grad_norm": 3.4100658893585205, |
| "learning_rate": 4.6875000000000004e-06, |
| "loss": 0.7774, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.1536, |
| "grad_norm": 2.6250312328338623, |
| "learning_rate": 5e-06, |
| "loss": 0.757, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.1632, |
| "grad_norm": 2.285292387008667, |
| "learning_rate": 5.3125e-06, |
| "loss": 0.7718, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.1728, |
| "grad_norm": 1.9034889936447144, |
| "learning_rate": 5.625e-06, |
| "loss": 0.7582, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.1824, |
| "grad_norm": 1.5559135675430298, |
| "learning_rate": 5.9375e-06, |
| "loss": 0.7192, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.192, |
| "grad_norm": 1.4654297828674316, |
| "learning_rate": 6.25e-06, |
| "loss": 0.7072, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.2016, |
| "grad_norm": 1.4360804557800293, |
| "learning_rate": 6.5625e-06, |
| "loss": 0.7075, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.2112, |
| "grad_norm": 1.46616530418396, |
| "learning_rate": 6.875e-06, |
| "loss": 0.7336, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.2208, |
| "grad_norm": 1.2139849662780762, |
| "learning_rate": 7.1875e-06, |
| "loss": 0.6851, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.2304, |
| "grad_norm": 1.1717193126678467, |
| "learning_rate": 7.500000000000001e-06, |
| "loss": 0.6955, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 1.0088032484054565, |
| "learning_rate": 7.8125e-06, |
| "loss": 0.6746, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.2496, |
| "grad_norm": 0.9678511023521423, |
| "learning_rate": 8.125000000000001e-06, |
| "loss": 0.6963, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.2592, |
| "grad_norm": 0.8797613382339478, |
| "learning_rate": 8.4375e-06, |
| "loss": 0.6886, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.2688, |
| "grad_norm": 0.7748528718948364, |
| "learning_rate": 8.750000000000001e-06, |
| "loss": 0.6746, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.2784, |
| "grad_norm": 0.9056878089904785, |
| "learning_rate": 9.0625e-06, |
| "loss": 0.6852, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.288, |
| "grad_norm": 0.7696974277496338, |
| "learning_rate": 9.375000000000001e-06, |
| "loss": 0.6524, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.2976, |
| "grad_norm": 0.7831515073776245, |
| "learning_rate": 9.6875e-06, |
| "loss": 0.6993, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.3072, |
| "grad_norm": 0.8536770343780518, |
| "learning_rate": 1e-05, |
| "loss": 0.6839, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.3168, |
| "grad_norm": 0.734736442565918, |
| "learning_rate": 9.999685283773504e-06, |
| "loss": 0.6732, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.3264, |
| "grad_norm": 0.7764032483100891, |
| "learning_rate": 9.998741174712534e-06, |
| "loss": 0.6645, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.336, |
| "grad_norm": 0.6298977732658386, |
| "learning_rate": 9.997167791667668e-06, |
| "loss": 0.6251, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.3456, |
| "grad_norm": 0.6373680830001831, |
| "learning_rate": 9.994965332706574e-06, |
| "loss": 0.6305, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.3552, |
| "grad_norm": 0.7237633466720581, |
| "learning_rate": 9.992134075089085e-06, |
| "loss": 0.6501, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.3648, |
| "grad_norm": 0.7214115262031555, |
| "learning_rate": 9.98867437523228e-06, |
| "loss": 0.6473, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.3744, |
| "grad_norm": 0.5279441475868225, |
| "learning_rate": 9.984586668665641e-06, |
| "loss": 0.6267, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.384, |
| "grad_norm": 0.5429782271385193, |
| "learning_rate": 9.979871469976197e-06, |
| "loss": 0.6353, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.3936, |
| "grad_norm": 0.6703981161117554, |
| "learning_rate": 9.974529372743762e-06, |
| "loss": 0.6697, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.4032, |
| "grad_norm": 0.576714813709259, |
| "learning_rate": 9.968561049466214e-06, |
| "loss": 0.6229, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.4128, |
| "grad_norm": 0.539608895778656, |
| "learning_rate": 9.961967251474823e-06, |
| "loss": 0.6199, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.4224, |
| "grad_norm": 0.5225086212158203, |
| "learning_rate": 9.954748808839675e-06, |
| "loss": 0.6642, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.432, |
| "grad_norm": 0.5761452913284302, |
| "learning_rate": 9.946906630265184e-06, |
| "loss": 0.6415, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.4416, |
| "grad_norm": 0.5128948092460632, |
| "learning_rate": 9.938441702975689e-06, |
| "loss": 0.6498, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.4512, |
| "grad_norm": 0.4713912606239319, |
| "learning_rate": 9.92935509259118e-06, |
| "loss": 0.6146, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.4608, |
| "grad_norm": 0.5027300715446472, |
| "learning_rate": 9.91964794299315e-06, |
| "loss": 0.6323, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.4704, |
| "grad_norm": 0.4705498516559601, |
| "learning_rate": 9.909321476180594e-06, |
| "loss": 0.6343, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 0.45382463932037354, |
| "learning_rate": 9.898376992116179e-06, |
| "loss": 0.6362, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.4896, |
| "grad_norm": 0.5028925538063049, |
| "learning_rate": 9.886815868562596e-06, |
| "loss": 0.6336, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.4992, |
| "grad_norm": 0.48574110865592957, |
| "learning_rate": 9.874639560909118e-06, |
| "loss": 0.6389, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.5088, |
| "grad_norm": 0.5203267931938171, |
| "learning_rate": 9.861849601988384e-06, |
| "loss": 0.6277, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.5184, |
| "grad_norm": 0.5508487224578857, |
| "learning_rate": 9.848447601883436e-06, |
| "loss": 0.6166, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.528, |
| "grad_norm": 0.46763473749160767, |
| "learning_rate": 9.834435247725032e-06, |
| "loss": 0.6193, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.5376, |
| "grad_norm": 0.524610698223114, |
| "learning_rate": 9.819814303479268e-06, |
| "loss": 0.6277, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.5472, |
| "grad_norm": 0.50461345911026, |
| "learning_rate": 9.804586609725499e-06, |
| "loss": 0.5966, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.5568, |
| "grad_norm": 0.499636173248291, |
| "learning_rate": 9.788754083424654e-06, |
| "loss": 0.6435, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.5664, |
| "grad_norm": 0.4767281711101532, |
| "learning_rate": 9.772318717677905e-06, |
| "loss": 0.653, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.576, |
| "grad_norm": 0.5102624297142029, |
| "learning_rate": 9.755282581475769e-06, |
| "loss": 0.621, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.5856, |
| "grad_norm": 0.44457393884658813, |
| "learning_rate": 9.737647819437645e-06, |
| "loss": 0.6077, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.5952, |
| "grad_norm": 0.48511043190956116, |
| "learning_rate": 9.719416651541839e-06, |
| "loss": 0.6279, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.6048, |
| "grad_norm": 0.5544952154159546, |
| "learning_rate": 9.700591372846096e-06, |
| "loss": 0.6158, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.6144, |
| "grad_norm": 0.48609480261802673, |
| "learning_rate": 9.681174353198687e-06, |
| "loss": 0.5928, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.624, |
| "grad_norm": 0.4879539906978607, |
| "learning_rate": 9.661168036940071e-06, |
| "loss": 0.6023, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.6336, |
| "grad_norm": 0.5057247281074524, |
| "learning_rate": 9.640574942595195e-06, |
| "loss": 0.6342, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.6432, |
| "grad_norm": 0.4872565269470215, |
| "learning_rate": 9.619397662556434e-06, |
| "loss": 0.5781, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.6528, |
| "grad_norm": 0.519126296043396, |
| "learning_rate": 9.597638862757255e-06, |
| "loss": 0.5852, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.6624, |
| "grad_norm": 0.43009886145591736, |
| "learning_rate": 9.5753012823366e-06, |
| "loss": 0.5965, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.672, |
| "grad_norm": 0.640880286693573, |
| "learning_rate": 9.552387733294081e-06, |
| "loss": 0.5982, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.6816, |
| "grad_norm": 0.4887712001800537, |
| "learning_rate": 9.528901100135971e-06, |
| "loss": 0.6152, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.6912, |
| "grad_norm": 0.5316668152809143, |
| "learning_rate": 9.504844339512096e-06, |
| "loss": 0.5856, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.7008, |
| "grad_norm": 0.4966796338558197, |
| "learning_rate": 9.480220479843627e-06, |
| "loss": 0.6151, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.7104, |
| "grad_norm": 0.5114216804504395, |
| "learning_rate": 9.45503262094184e-06, |
| "loss": 0.6251, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 0.5646020770072937, |
| "learning_rate": 9.4292839336179e-06, |
| "loss": 0.6015, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.7296, |
| "grad_norm": 0.46756139397621155, |
| "learning_rate": 9.40297765928369e-06, |
| "loss": 0.5881, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.7392, |
| "grad_norm": 0.47471338510513306, |
| "learning_rate": 9.376117109543769e-06, |
| "loss": 0.5908, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.7488, |
| "grad_norm": 0.5130011439323425, |
| "learning_rate": 9.348705665778479e-06, |
| "loss": 0.6226, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.7584, |
| "grad_norm": 0.4613938629627228, |
| "learning_rate": 9.320746778718274e-06, |
| "loss": 0.6182, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.768, |
| "grad_norm": 0.5221911072731018, |
| "learning_rate": 9.292243968009332e-06, |
| "loss": 0.6044, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.7776, |
| "grad_norm": 0.4437088370323181, |
| "learning_rate": 9.263200821770462e-06, |
| "loss": 0.6141, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.7872, |
| "grad_norm": 0.5470189452171326, |
| "learning_rate": 9.233620996141421e-06, |
| "loss": 0.6298, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.7968, |
| "grad_norm": 0.4281877279281616, |
| "learning_rate": 9.203508214822652e-06, |
| "loss": 0.5925, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.8064, |
| "grad_norm": 0.45089638233184814, |
| "learning_rate": 9.172866268606514e-06, |
| "loss": 0.5883, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.816, |
| "grad_norm": 0.5195220112800598, |
| "learning_rate": 9.141699014900084e-06, |
| "loss": 0.6336, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.8256, |
| "grad_norm": 0.5485154390335083, |
| "learning_rate": 9.110010377239552e-06, |
| "loss": 0.6144, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.8352, |
| "grad_norm": 0.5088512897491455, |
| "learning_rate": 9.077804344796302e-06, |
| "loss": 0.5838, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.8448, |
| "grad_norm": 0.6507291197776794, |
| "learning_rate": 9.045084971874738e-06, |
| "loss": 0.6099, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.8544, |
| "grad_norm": 0.5225642323493958, |
| "learning_rate": 9.011856377401891e-06, |
| "loss": 0.6068, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.864, |
| "grad_norm": 0.4308846592903137, |
| "learning_rate": 8.978122744408905e-06, |
| "loss": 0.5969, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.8736, |
| "grad_norm": 0.6871822476387024, |
| "learning_rate": 8.943888319504456e-06, |
| "loss": 0.6134, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.8832, |
| "grad_norm": 0.435263067483902, |
| "learning_rate": 8.90915741234015e-06, |
| "loss": 0.6013, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.8928, |
| "grad_norm": 0.4600350856781006, |
| "learning_rate": 8.873934395068006e-06, |
| "loss": 0.5696, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.9024, |
| "grad_norm": 0.492987722158432, |
| "learning_rate": 8.838223701790057e-06, |
| "loss": 0.5992, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.912, |
| "grad_norm": 0.4614384174346924, |
| "learning_rate": 8.802029828000157e-06, |
| "loss": 0.5747, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.9216, |
| "grad_norm": 0.487991601228714, |
| "learning_rate": 8.765357330018056e-06, |
| "loss": 0.5639, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.9312, |
| "grad_norm": 0.45146673917770386, |
| "learning_rate": 8.728210824415829e-06, |
| "loss": 0.5801, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.9408, |
| "grad_norm": 0.5142231583595276, |
| "learning_rate": 8.690594987436705e-06, |
| "loss": 0.6, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.9504, |
| "grad_norm": 0.3947697579860687, |
| "learning_rate": 8.652514554406388e-06, |
| "loss": 0.5812, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.4276128113269806, |
| "learning_rate": 8.613974319136959e-06, |
| "loss": 0.5749, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.9696, |
| "grad_norm": 0.5193365216255188, |
| "learning_rate": 8.574979133323378e-06, |
| "loss": 0.583, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.9792, |
| "grad_norm": 0.4325678050518036, |
| "learning_rate": 8.535533905932739e-06, |
| "loss": 0.6007, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.9888, |
| "grad_norm": 0.4741419851779938, |
| "learning_rate": 8.495643602586287e-06, |
| "loss": 0.5838, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.9984, |
| "grad_norm": 0.43793222308158875, |
| "learning_rate": 8.455313244934324e-06, |
| "loss": 0.6134, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.008, |
| "grad_norm": 0.9734936952590942, |
| "learning_rate": 8.414547910024035e-06, |
| "loss": 1.0593, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.0176, |
| "grad_norm": 0.44788506627082825, |
| "learning_rate": 8.373352729660373e-06, |
| "loss": 0.5288, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.0272, |
| "grad_norm": 0.4729693531990051, |
| "learning_rate": 8.331732889760021e-06, |
| "loss": 0.5346, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.0368, |
| "grad_norm": 0.5325874090194702, |
| "learning_rate": 8.289693629698564e-06, |
| "loss": 0.6252, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.0464, |
| "grad_norm": 0.5393266081809998, |
| "learning_rate": 8.247240241650918e-06, |
| "loss": 0.5493, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.056, |
| "grad_norm": 0.5109856128692627, |
| "learning_rate": 8.204378069925121e-06, |
| "loss": 0.5092, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.0656, |
| "grad_norm": 0.4956968426704407, |
| "learning_rate": 8.16111251028955e-06, |
| "loss": 0.5695, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.0752, |
| "grad_norm": 0.4617892801761627, |
| "learning_rate": 8.117449009293668e-06, |
| "loss": 0.5158, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.0848, |
| "grad_norm": 0.470152884721756, |
| "learning_rate": 8.073393063582386e-06, |
| "loss": 0.56, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.0944, |
| "grad_norm": 0.40656527876853943, |
| "learning_rate": 8.0289502192041e-06, |
| "loss": 0.516, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.104, |
| "grad_norm": 0.5289188027381897, |
| "learning_rate": 7.984126070912519e-06, |
| "loss": 0.6162, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.1136, |
| "grad_norm": 0.4350636601448059, |
| "learning_rate": 7.938926261462366e-06, |
| "loss": 0.5425, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.1232, |
| "grad_norm": 0.4797382056713104, |
| "learning_rate": 7.89335648089903e-06, |
| "loss": 0.5956, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.1328, |
| "grad_norm": 0.44591331481933594, |
| "learning_rate": 7.84742246584226e-06, |
| "loss": 0.5188, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.1424, |
| "grad_norm": 0.3946237564086914, |
| "learning_rate": 7.801129998764014e-06, |
| "loss": 0.5597, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.152, |
| "grad_norm": 0.4344748258590698, |
| "learning_rate": 7.754484907260513e-06, |
| "loss": 0.5617, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.1616, |
| "grad_norm": 0.39545729756355286, |
| "learning_rate": 7.70749306331863e-06, |
| "loss": 0.5186, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.1712, |
| "grad_norm": 0.4443671405315399, |
| "learning_rate": 7.660160382576683e-06, |
| "loss": 0.5828, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.1808, |
| "grad_norm": 0.3955932855606079, |
| "learning_rate": 7.612492823579744e-06, |
| "loss": 0.5833, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.1904, |
| "grad_norm": 0.3952138423919678, |
| "learning_rate": 7.564496387029532e-06, |
| "loss": 0.5227, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.2, |
| "grad_norm": 0.3857017457485199, |
| "learning_rate": 7.516177115029002e-06, |
| "loss": 0.5335, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.2096, |
| "grad_norm": 0.3564269244670868, |
| "learning_rate": 7.467541090321735e-06, |
| "loss": 0.5236, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.2192, |
| "grad_norm": 0.39288026094436646, |
| "learning_rate": 7.4185944355261996e-06, |
| "loss": 0.5508, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.2288000000000001, |
| "grad_norm": 0.4181830883026123, |
| "learning_rate": 7.369343312364994e-06, |
| "loss": 0.5738, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.2384, |
| "grad_norm": 0.38415423035621643, |
| "learning_rate": 7.319793920889171e-06, |
| "loss": 0.5319, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.248, |
| "grad_norm": 0.39088174700737, |
| "learning_rate": 7.269952498697734e-06, |
| "loss": 0.5163, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.2576, |
| "grad_norm": 0.4108724892139435, |
| "learning_rate": 7.219825320152411e-06, |
| "loss": 0.5885, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.2671999999999999, |
| "grad_norm": 0.3562670350074768, |
| "learning_rate": 7.169418695587791e-06, |
| "loss": 0.5172, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.2768, |
| "grad_norm": 0.4148976802825928, |
| "learning_rate": 7.118738970516944e-06, |
| "loss": 0.5517, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.2864, |
| "grad_norm": 0.4055224061012268, |
| "learning_rate": 7.067792524832604e-06, |
| "loss": 0.5682, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.296, |
| "grad_norm": 0.41268885135650635, |
| "learning_rate": 7.016585772004026e-06, |
| "loss": 0.5578, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.3056, |
| "grad_norm": 0.3958832621574402, |
| "learning_rate": 6.965125158269619e-06, |
| "loss": 0.5493, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.3152, |
| "grad_norm": 0.3835631012916565, |
| "learning_rate": 6.913417161825449e-06, |
| "loss": 0.5248, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.3248, |
| "grad_norm": 0.4725865125656128, |
| "learning_rate": 6.8614682920097265e-06, |
| "loss": 0.5647, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.3344, |
| "grad_norm": 0.41323113441467285, |
| "learning_rate": 6.809285088483361e-06, |
| "loss": 0.5551, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.3439999999999999, |
| "grad_norm": 0.41332513093948364, |
| "learning_rate": 6.7568741204067145e-06, |
| "loss": 0.5003, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.3536000000000001, |
| "grad_norm": 0.4922088384628296, |
| "learning_rate": 6.704241985612625e-06, |
| "loss": 0.5717, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.3632, |
| "grad_norm": 0.3990492820739746, |
| "learning_rate": 6.651395309775837e-06, |
| "loss": 0.5622, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.3728, |
| "grad_norm": 0.4874456822872162, |
| "learning_rate": 6.598340745578908e-06, |
| "loss": 0.5472, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.3824, |
| "grad_norm": 0.44028475880622864, |
| "learning_rate": 6.545084971874738e-06, |
| "loss": 0.5737, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.392, |
| "grad_norm": 0.4180607497692108, |
| "learning_rate": 6.491634692845781e-06, |
| "loss": 0.5355, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.4016, |
| "grad_norm": 0.36899641156196594, |
| "learning_rate": 6.437996637160086e-06, |
| "loss": 0.4625, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.4112, |
| "grad_norm": 0.5210156440734863, |
| "learning_rate": 6.384177557124247e-06, |
| "loss": 0.6048, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.4208, |
| "grad_norm": 0.46662116050720215, |
| "learning_rate": 6.330184227833376e-06, |
| "loss": 0.5512, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.4304000000000001, |
| "grad_norm": 0.447454035282135, |
| "learning_rate": 6.276023446318214e-06, |
| "loss": 0.598, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.44, |
| "grad_norm": 0.5043928027153015, |
| "learning_rate": 6.2217020306894705e-06, |
| "loss": 0.5772, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.4496, |
| "grad_norm": 0.39012083411216736, |
| "learning_rate": 6.1672268192795285e-06, |
| "loss": 0.5113, |
| "step": 151 |
| }, |
| { |
| "epoch": 1.4592, |
| "grad_norm": 0.4258650243282318, |
| "learning_rate": 6.112604669781572e-06, |
| "loss": 0.5898, |
| "step": 152 |
| }, |
| { |
| "epoch": 1.4687999999999999, |
| "grad_norm": 0.4456270635128021, |
| "learning_rate": 6.057842458386315e-06, |
| "loss": 0.5674, |
| "step": 153 |
| }, |
| { |
| "epoch": 1.4784, |
| "grad_norm": 0.3997485041618347, |
| "learning_rate": 6.002947078916365e-06, |
| "loss": 0.5014, |
| "step": 154 |
| }, |
| { |
| "epoch": 1.488, |
| "grad_norm": 0.4093655049800873, |
| "learning_rate": 5.947925441958393e-06, |
| "loss": 0.5621, |
| "step": 155 |
| }, |
| { |
| "epoch": 1.4976, |
| "grad_norm": 0.3858829736709595, |
| "learning_rate": 5.892784473993184e-06, |
| "loss": 0.5435, |
| "step": 156 |
| }, |
| { |
| "epoch": 1.5072, |
| "grad_norm": 0.44281816482543945, |
| "learning_rate": 5.837531116523683e-06, |
| "loss": 0.561, |
| "step": 157 |
| }, |
| { |
| "epoch": 1.5168, |
| "grad_norm": 0.4816691279411316, |
| "learning_rate": 5.782172325201155e-06, |
| "loss": 0.5567, |
| "step": 158 |
| }, |
| { |
| "epoch": 1.5264, |
| "grad_norm": 0.47301939129829407, |
| "learning_rate": 5.726715068949564e-06, |
| "loss": 0.5652, |
| "step": 159 |
| }, |
| { |
| "epoch": 1.536, |
| "grad_norm": 0.3875133693218231, |
| "learning_rate": 5.671166329088278e-06, |
| "loss": 0.5504, |
| "step": 160 |
| }, |
| { |
| "epoch": 1.5455999999999999, |
| "grad_norm": 0.46024438738822937, |
| "learning_rate": 5.615533098453215e-06, |
| "loss": 0.5634, |
| "step": 161 |
| }, |
| { |
| "epoch": 1.5552000000000001, |
| "grad_norm": 0.42805129289627075, |
| "learning_rate": 5.559822380516539e-06, |
| "loss": 0.5231, |
| "step": 162 |
| }, |
| { |
| "epoch": 1.5648, |
| "grad_norm": 0.3890428841114044, |
| "learning_rate": 5.504041188505022e-06, |
| "loss": 0.5603, |
| "step": 163 |
| }, |
| { |
| "epoch": 1.5744, |
| "grad_norm": 0.4287274181842804, |
| "learning_rate": 5.448196544517168e-06, |
| "loss": 0.5752, |
| "step": 164 |
| }, |
| { |
| "epoch": 1.584, |
| "grad_norm": 0.39753690361976624, |
| "learning_rate": 5.392295478639226e-06, |
| "loss": 0.505, |
| "step": 165 |
| }, |
| { |
| "epoch": 1.5936, |
| "grad_norm": 0.39962896704673767, |
| "learning_rate": 5.336345028060199e-06, |
| "loss": 0.5811, |
| "step": 166 |
| }, |
| { |
| "epoch": 1.6032, |
| "grad_norm": 0.35847893357276917, |
| "learning_rate": 5.2803522361859596e-06, |
| "loss": 0.4964, |
| "step": 167 |
| }, |
| { |
| "epoch": 1.6128, |
| "grad_norm": 0.46846652030944824, |
| "learning_rate": 5.224324151752575e-06, |
| "loss": 0.5461, |
| "step": 168 |
| }, |
| { |
| "epoch": 1.6223999999999998, |
| "grad_norm": 0.37737685441970825, |
| "learning_rate": 5.168267827938971e-06, |
| "loss": 0.499, |
| "step": 169 |
| }, |
| { |
| "epoch": 1.6320000000000001, |
| "grad_norm": 0.3535378575325012, |
| "learning_rate": 5.112190321479026e-06, |
| "loss": 0.5219, |
| "step": 170 |
| }, |
| { |
| "epoch": 1.6416, |
| "grad_norm": 0.35122019052505493, |
| "learning_rate": 5.05609869177323e-06, |
| "loss": 0.5257, |
| "step": 171 |
| }, |
| { |
| "epoch": 1.6512, |
| "grad_norm": 0.3954353630542755, |
| "learning_rate": 5e-06, |
| "loss": 0.5665, |
| "step": 172 |
| }, |
| { |
| "epoch": 1.6608, |
| "grad_norm": 0.3725343346595764, |
| "learning_rate": 4.943901308226771e-06, |
| "loss": 0.5784, |
| "step": 173 |
| }, |
| { |
| "epoch": 1.6703999999999999, |
| "grad_norm": 0.3784255087375641, |
| "learning_rate": 4.887809678520976e-06, |
| "loss": 0.5383, |
| "step": 174 |
| }, |
| { |
| "epoch": 1.6800000000000002, |
| "grad_norm": 0.4283500015735626, |
| "learning_rate": 4.831732172061032e-06, |
| "loss": 0.5505, |
| "step": 175 |
| }, |
| { |
| "epoch": 1.6896, |
| "grad_norm": 0.375383198261261, |
| "learning_rate": 4.775675848247427e-06, |
| "loss": 0.5287, |
| "step": 176 |
| }, |
| { |
| "epoch": 1.6992, |
| "grad_norm": 0.36719298362731934, |
| "learning_rate": 4.719647763814041e-06, |
| "loss": 0.5617, |
| "step": 177 |
| }, |
| { |
| "epoch": 1.7088, |
| "grad_norm": 0.32004213333129883, |
| "learning_rate": 4.663654971939802e-06, |
| "loss": 0.543, |
| "step": 178 |
| }, |
| { |
| "epoch": 1.7184, |
| "grad_norm": 0.3863466680049896, |
| "learning_rate": 4.6077045213607765e-06, |
| "loss": 0.5558, |
| "step": 179 |
| }, |
| { |
| "epoch": 1.728, |
| "grad_norm": 0.3306654989719391, |
| "learning_rate": 4.551803455482833e-06, |
| "loss": 0.52, |
| "step": 180 |
| }, |
| { |
| "epoch": 1.7376, |
| "grad_norm": 0.3595239222049713, |
| "learning_rate": 4.4959588114949785e-06, |
| "loss": 0.5619, |
| "step": 181 |
| }, |
| { |
| "epoch": 1.7471999999999999, |
| "grad_norm": 0.37461021542549133, |
| "learning_rate": 4.4401776194834615e-06, |
| "loss": 0.5339, |
| "step": 182 |
| }, |
| { |
| "epoch": 1.7568000000000001, |
| "grad_norm": 0.3879006803035736, |
| "learning_rate": 4.384466901546786e-06, |
| "loss": 0.5487, |
| "step": 183 |
| }, |
| { |
| "epoch": 1.7664, |
| "grad_norm": 0.38633430004119873, |
| "learning_rate": 4.3288336709117246e-06, |
| "loss": 0.5803, |
| "step": 184 |
| }, |
| { |
| "epoch": 1.776, |
| "grad_norm": 0.3449064791202545, |
| "learning_rate": 4.273284931050438e-06, |
| "loss": 0.5511, |
| "step": 185 |
| }, |
| { |
| "epoch": 1.7856, |
| "grad_norm": 0.3583766222000122, |
| "learning_rate": 4.217827674798845e-06, |
| "loss": 0.5569, |
| "step": 186 |
| }, |
| { |
| "epoch": 1.7952, |
| "grad_norm": 0.36931082606315613, |
| "learning_rate": 4.162468883476319e-06, |
| "loss": 0.5681, |
| "step": 187 |
| }, |
| { |
| "epoch": 1.8048, |
| "grad_norm": 0.3926275372505188, |
| "learning_rate": 4.107215526006818e-06, |
| "loss": 0.5502, |
| "step": 188 |
| }, |
| { |
| "epoch": 1.8144, |
| "grad_norm": 0.3721172511577606, |
| "learning_rate": 4.052074558041608e-06, |
| "loss": 0.5423, |
| "step": 189 |
| }, |
| { |
| "epoch": 1.8239999999999998, |
| "grad_norm": 0.34798112511634827, |
| "learning_rate": 3.997052921083637e-06, |
| "loss": 0.5215, |
| "step": 190 |
| }, |
| { |
| "epoch": 1.8336000000000001, |
| "grad_norm": 0.4220713675022125, |
| "learning_rate": 3.9421575416136866e-06, |
| "loss": 0.5386, |
| "step": 191 |
| }, |
| { |
| "epoch": 1.8432, |
| "grad_norm": 0.37677109241485596, |
| "learning_rate": 3.887395330218429e-06, |
| "loss": 0.555, |
| "step": 192 |
| }, |
| { |
| "epoch": 1.8528, |
| "grad_norm": 0.3716229796409607, |
| "learning_rate": 3.832773180720475e-06, |
| "loss": 0.5494, |
| "step": 193 |
| }, |
| { |
| "epoch": 1.8624, |
| "grad_norm": 0.36740225553512573, |
| "learning_rate": 3.778297969310529e-06, |
| "loss": 0.5344, |
| "step": 194 |
| }, |
| { |
| "epoch": 1.8719999999999999, |
| "grad_norm": 0.35373231768608093, |
| "learning_rate": 3.723976553681787e-06, |
| "loss": 0.5116, |
| "step": 195 |
| }, |
| { |
| "epoch": 1.8816000000000002, |
| "grad_norm": 0.4049929678440094, |
| "learning_rate": 3.669815772166625e-06, |
| "loss": 0.553, |
| "step": 196 |
| }, |
| { |
| "epoch": 1.8912, |
| "grad_norm": 0.36175477504730225, |
| "learning_rate": 3.6158224428757538e-06, |
| "loss": 0.5179, |
| "step": 197 |
| }, |
| { |
| "epoch": 1.9008, |
| "grad_norm": 0.35334157943725586, |
| "learning_rate": 3.562003362839914e-06, |
| "loss": 0.5621, |
| "step": 198 |
| }, |
| { |
| "epoch": 1.9104, |
| "grad_norm": 0.331590861082077, |
| "learning_rate": 3.50836530715422e-06, |
| "loss": 0.5135, |
| "step": 199 |
| }, |
| { |
| "epoch": 1.92, |
| "grad_norm": 0.33945655822753906, |
| "learning_rate": 3.4549150281252635e-06, |
| "loss": 0.5618, |
| "step": 200 |
| }, |
| { |
| "epoch": 1.9296, |
| "grad_norm": 0.37211182713508606, |
| "learning_rate": 3.4016592544210937e-06, |
| "loss": 0.5485, |
| "step": 201 |
| }, |
| { |
| "epoch": 1.9392, |
| "grad_norm": 0.3631335496902466, |
| "learning_rate": 3.3486046902241663e-06, |
| "loss": 0.5486, |
| "step": 202 |
| }, |
| { |
| "epoch": 1.9487999999999999, |
| "grad_norm": 0.3692123591899872, |
| "learning_rate": 3.295758014387375e-06, |
| "loss": 0.539, |
| "step": 203 |
| }, |
| { |
| "epoch": 1.9584000000000001, |
| "grad_norm": 0.3526400625705719, |
| "learning_rate": 3.2431258795932863e-06, |
| "loss": 0.5622, |
| "step": 204 |
| }, |
| { |
| "epoch": 1.968, |
| "grad_norm": 0.3384586572647095, |
| "learning_rate": 3.1907149115166403e-06, |
| "loss": 0.5608, |
| "step": 205 |
| }, |
| { |
| "epoch": 1.9776, |
| "grad_norm": 0.3885630667209625, |
| "learning_rate": 3.1385317079902743e-06, |
| "loss": 0.5608, |
| "step": 206 |
| }, |
| { |
| "epoch": 1.9872, |
| "grad_norm": 0.3339373767375946, |
| "learning_rate": 3.0865828381745515e-06, |
| "loss": 0.5353, |
| "step": 207 |
| }, |
| { |
| "epoch": 1.9968, |
| "grad_norm": 0.35258418321609497, |
| "learning_rate": 3.0348748417303826e-06, |
| "loss": 0.5135, |
| "step": 208 |
| }, |
| { |
| "epoch": 2.0064, |
| "grad_norm": 0.9603828191757202, |
| "learning_rate": 2.9834142279959754e-06, |
| "loss": 1.0023, |
| "step": 209 |
| }, |
| { |
| "epoch": 2.016, |
| "grad_norm": 0.41805851459503174, |
| "learning_rate": 2.932207475167398e-06, |
| "loss": 0.5273, |
| "step": 210 |
| }, |
| { |
| "epoch": 2.0256, |
| "grad_norm": 0.4096415936946869, |
| "learning_rate": 2.8812610294830568e-06, |
| "loss": 0.5329, |
| "step": 211 |
| }, |
| { |
| "epoch": 2.0352, |
| "grad_norm": 0.3491603136062622, |
| "learning_rate": 2.83058130441221e-06, |
| "loss": 0.4646, |
| "step": 212 |
| }, |
| { |
| "epoch": 2.0448, |
| "grad_norm": 0.37214240431785583, |
| "learning_rate": 2.7801746798475905e-06, |
| "loss": 0.5397, |
| "step": 213 |
| }, |
| { |
| "epoch": 2.0544, |
| "grad_norm": 0.3790673315525055, |
| "learning_rate": 2.7300475013022666e-06, |
| "loss": 0.5099, |
| "step": 214 |
| }, |
| { |
| "epoch": 2.064, |
| "grad_norm": 0.36157622933387756, |
| "learning_rate": 2.6802060791108304e-06, |
| "loss": 0.4986, |
| "step": 215 |
| }, |
| { |
| "epoch": 2.0736, |
| "grad_norm": 0.36601653695106506, |
| "learning_rate": 2.6306566876350072e-06, |
| "loss": 0.4959, |
| "step": 216 |
| }, |
| { |
| "epoch": 2.0832, |
| "grad_norm": 0.3199480473995209, |
| "learning_rate": 2.5814055644738013e-06, |
| "loss": 0.5041, |
| "step": 217 |
| }, |
| { |
| "epoch": 2.0928, |
| "grad_norm": 0.38439592719078064, |
| "learning_rate": 2.532458909678266e-06, |
| "loss": 0.5489, |
| "step": 218 |
| }, |
| { |
| "epoch": 2.1024, |
| "grad_norm": 0.35765281319618225, |
| "learning_rate": 2.483822884971e-06, |
| "loss": 0.4804, |
| "step": 219 |
| }, |
| { |
| "epoch": 2.112, |
| "grad_norm": 0.36830228567123413, |
| "learning_rate": 2.43550361297047e-06, |
| "loss": 0.5071, |
| "step": 220 |
| }, |
| { |
| "epoch": 2.1216, |
| "grad_norm": 0.3654811680316925, |
| "learning_rate": 2.387507176420256e-06, |
| "loss": 0.5151, |
| "step": 221 |
| }, |
| { |
| "epoch": 2.1312, |
| "grad_norm": 0.3462638258934021, |
| "learning_rate": 2.339839617423318e-06, |
| "loss": 0.4932, |
| "step": 222 |
| }, |
| { |
| "epoch": 2.1408, |
| "grad_norm": 0.3995060622692108, |
| "learning_rate": 2.2925069366813718e-06, |
| "loss": 0.5102, |
| "step": 223 |
| }, |
| { |
| "epoch": 2.1504, |
| "grad_norm": 0.3391073942184448, |
| "learning_rate": 2.245515092739488e-06, |
| "loss": 0.4859, |
| "step": 224 |
| }, |
| { |
| "epoch": 2.16, |
| "grad_norm": 0.3244698941707611, |
| "learning_rate": 2.1988700012359865e-06, |
| "loss": 0.4726, |
| "step": 225 |
| }, |
| { |
| "epoch": 2.1696, |
| "grad_norm": 0.37653449177742004, |
| "learning_rate": 2.1525775341577404e-06, |
| "loss": 0.5547, |
| "step": 226 |
| }, |
| { |
| "epoch": 2.1792, |
| "grad_norm": 0.350153386592865, |
| "learning_rate": 2.1066435191009717e-06, |
| "loss": 0.4398, |
| "step": 227 |
| }, |
| { |
| "epoch": 2.1888, |
| "grad_norm": 0.4024869501590729, |
| "learning_rate": 2.061073738537635e-06, |
| "loss": 0.5674, |
| "step": 228 |
| }, |
| { |
| "epoch": 2.1984, |
| "grad_norm": 0.3475300669670105, |
| "learning_rate": 2.0158739290874822e-06, |
| "loss": 0.5286, |
| "step": 229 |
| }, |
| { |
| "epoch": 2.208, |
| "grad_norm": 0.3343484401702881, |
| "learning_rate": 1.971049780795901e-06, |
| "loss": 0.5165, |
| "step": 230 |
| }, |
| { |
| "epoch": 2.2176, |
| "grad_norm": 0.3347722291946411, |
| "learning_rate": 1.9266069364176144e-06, |
| "loss": 0.5018, |
| "step": 231 |
| }, |
| { |
| "epoch": 2.2272, |
| "grad_norm": 0.32682865858078003, |
| "learning_rate": 1.8825509907063328e-06, |
| "loss": 0.4595, |
| "step": 232 |
| }, |
| { |
| "epoch": 2.2368, |
| "grad_norm": 0.39053240418434143, |
| "learning_rate": 1.838887489710452e-06, |
| "loss": 0.5387, |
| "step": 233 |
| }, |
| { |
| "epoch": 2.2464, |
| "grad_norm": 0.36222565174102783, |
| "learning_rate": 1.7956219300748796e-06, |
| "loss": 0.4928, |
| "step": 234 |
| }, |
| { |
| "epoch": 2.2560000000000002, |
| "grad_norm": 0.35628780722618103, |
| "learning_rate": 1.7527597583490825e-06, |
| "loss": 0.4951, |
| "step": 235 |
| }, |
| { |
| "epoch": 2.2656, |
| "grad_norm": 0.32255733013153076, |
| "learning_rate": 1.7103063703014372e-06, |
| "loss": 0.507, |
| "step": 236 |
| }, |
| { |
| "epoch": 2.2752, |
| "grad_norm": 0.3425965905189514, |
| "learning_rate": 1.6682671102399806e-06, |
| "loss": 0.5368, |
| "step": 237 |
| }, |
| { |
| "epoch": 2.2848, |
| "grad_norm": 0.3360115885734558, |
| "learning_rate": 1.6266472703396286e-06, |
| "loss": 0.5216, |
| "step": 238 |
| }, |
| { |
| "epoch": 2.2944, |
| "grad_norm": 0.31881076097488403, |
| "learning_rate": 1.5854520899759656e-06, |
| "loss": 0.5189, |
| "step": 239 |
| }, |
| { |
| "epoch": 2.304, |
| "grad_norm": 0.31721314787864685, |
| "learning_rate": 1.544686755065677e-06, |
| "loss": 0.4975, |
| "step": 240 |
| }, |
| { |
| "epoch": 2.3136, |
| "grad_norm": 0.3104105591773987, |
| "learning_rate": 1.5043563974137132e-06, |
| "loss": 0.4797, |
| "step": 241 |
| }, |
| { |
| "epoch": 2.3232, |
| "grad_norm": 0.33582985401153564, |
| "learning_rate": 1.4644660940672628e-06, |
| "loss": 0.5312, |
| "step": 242 |
| }, |
| { |
| "epoch": 2.3327999999999998, |
| "grad_norm": 0.3309701085090637, |
| "learning_rate": 1.4250208666766235e-06, |
| "loss": 0.4946, |
| "step": 243 |
| }, |
| { |
| "epoch": 2.3424, |
| "grad_norm": 0.30556604266166687, |
| "learning_rate": 1.3860256808630429e-06, |
| "loss": 0.4906, |
| "step": 244 |
| }, |
| { |
| "epoch": 2.352, |
| "grad_norm": 0.34476539492607117, |
| "learning_rate": 1.3474854455936126e-06, |
| "loss": 0.5575, |
| "step": 245 |
| }, |
| { |
| "epoch": 2.3616, |
| "grad_norm": 0.32860761880874634, |
| "learning_rate": 1.3094050125632973e-06, |
| "loss": 0.4896, |
| "step": 246 |
| }, |
| { |
| "epoch": 2.3712, |
| "grad_norm": 0.3144643008708954, |
| "learning_rate": 1.2717891755841722e-06, |
| "loss": 0.4995, |
| "step": 247 |
| }, |
| { |
| "epoch": 2.3808, |
| "grad_norm": 0.34862715005874634, |
| "learning_rate": 1.234642669981946e-06, |
| "loss": 0.5136, |
| "step": 248 |
| }, |
| { |
| "epoch": 2.3904, |
| "grad_norm": 0.33808404207229614, |
| "learning_rate": 1.1979701719998454e-06, |
| "loss": 0.5136, |
| "step": 249 |
| }, |
| { |
| "epoch": 2.4, |
| "grad_norm": 0.31220996379852295, |
| "learning_rate": 1.1617762982099446e-06, |
| "loss": 0.4955, |
| "step": 250 |
| }, |
| { |
| "epoch": 2.4096, |
| "grad_norm": 0.3128294348716736, |
| "learning_rate": 1.1260656049319957e-06, |
| "loss": 0.4899, |
| "step": 251 |
| }, |
| { |
| "epoch": 2.4192, |
| "grad_norm": 0.32542213797569275, |
| "learning_rate": 1.0908425876598512e-06, |
| "loss": 0.5572, |
| "step": 252 |
| }, |
| { |
| "epoch": 2.4288, |
| "grad_norm": 0.32575318217277527, |
| "learning_rate": 1.0561116804955451e-06, |
| "loss": 0.5349, |
| "step": 253 |
| }, |
| { |
| "epoch": 2.4384, |
| "grad_norm": 0.31433993577957153, |
| "learning_rate": 1.0218772555910955e-06, |
| "loss": 0.4838, |
| "step": 254 |
| }, |
| { |
| "epoch": 2.448, |
| "grad_norm": 0.33171674609184265, |
| "learning_rate": 9.881436225981107e-07, |
| "loss": 0.5294, |
| "step": 255 |
| }, |
| { |
| "epoch": 2.4576000000000002, |
| "grad_norm": 0.30407729744911194, |
| "learning_rate": 9.549150281252633e-07, |
| "loss": 0.4757, |
| "step": 256 |
| }, |
| { |
| "epoch": 2.4672, |
| "grad_norm": 0.36378738284111023, |
| "learning_rate": 9.221956552036992e-07, |
| "loss": 0.5431, |
| "step": 257 |
| }, |
| { |
| "epoch": 2.4768, |
| "grad_norm": 0.30705177783966064, |
| "learning_rate": 8.899896227604509e-07, |
| "loss": 0.4695, |
| "step": 258 |
| }, |
| { |
| "epoch": 2.4864, |
| "grad_norm": 0.3292585015296936, |
| "learning_rate": 8.58300985099918e-07, |
| "loss": 0.5697, |
| "step": 259 |
| }, |
| { |
| "epoch": 2.496, |
| "grad_norm": 0.28501492738723755, |
| "learning_rate": 8.271337313934869e-07, |
| "loss": 0.4864, |
| "step": 260 |
| }, |
| { |
| "epoch": 2.5056000000000003, |
| "grad_norm": 0.3187987208366394, |
| "learning_rate": 7.964917851773496e-07, |
| "loss": 0.5343, |
| "step": 261 |
| }, |
| { |
| "epoch": 2.5152, |
| "grad_norm": 0.3403649628162384, |
| "learning_rate": 7.663790038585794e-07, |
| "loss": 0.5249, |
| "step": 262 |
| }, |
| { |
| "epoch": 2.5248, |
| "grad_norm": 0.30438101291656494, |
| "learning_rate": 7.367991782295392e-07, |
| "loss": 0.4767, |
| "step": 263 |
| }, |
| { |
| "epoch": 2.5343999999999998, |
| "grad_norm": 0.3251408338546753, |
| "learning_rate": 7.077560319906696e-07, |
| "loss": 0.5451, |
| "step": 264 |
| }, |
| { |
| "epoch": 2.544, |
| "grad_norm": 0.3420161008834839, |
| "learning_rate": 6.792532212817271e-07, |
| "loss": 0.5298, |
| "step": 265 |
| }, |
| { |
| "epoch": 2.5536, |
| "grad_norm": 0.3219169080257416, |
| "learning_rate": 6.512943342215234e-07, |
| "loss": 0.5286, |
| "step": 266 |
| }, |
| { |
| "epoch": 2.5632, |
| "grad_norm": 0.3166907727718353, |
| "learning_rate": 6.238828904562316e-07, |
| "loss": 0.5298, |
| "step": 267 |
| }, |
| { |
| "epoch": 2.5728, |
| "grad_norm": 0.3281399607658386, |
| "learning_rate": 5.9702234071631e-07, |
| "loss": 0.5225, |
| "step": 268 |
| }, |
| { |
| "epoch": 2.5824, |
| "grad_norm": 0.29590266942977905, |
| "learning_rate": 5.707160663821009e-07, |
| "loss": 0.4878, |
| "step": 269 |
| }, |
| { |
| "epoch": 2.592, |
| "grad_norm": 0.2966673672199249, |
| "learning_rate": 5.449673790581611e-07, |
| "loss": 0.5033, |
| "step": 270 |
| }, |
| { |
| "epoch": 2.6016, |
| "grad_norm": 0.29974713921546936, |
| "learning_rate": 5.197795201563744e-07, |
| "loss": 0.4901, |
| "step": 271 |
| }, |
| { |
| "epoch": 2.6112, |
| "grad_norm": 0.3066723942756653, |
| "learning_rate": 4.951556604879049e-07, |
| "loss": 0.4788, |
| "step": 272 |
| }, |
| { |
| "epoch": 2.6208, |
| "grad_norm": 0.3056885004043579, |
| "learning_rate": 4.710988998640298e-07, |
| "loss": 0.5197, |
| "step": 273 |
| }, |
| { |
| "epoch": 2.6304, |
| "grad_norm": 0.323575884103775, |
| "learning_rate": 4.4761226670592074e-07, |
| "loss": 0.5127, |
| "step": 274 |
| }, |
| { |
| "epoch": 2.64, |
| "grad_norm": 0.3323626220226288, |
| "learning_rate": 4.2469871766340096e-07, |
| "loss": 0.4795, |
| "step": 275 |
| }, |
| { |
| "epoch": 2.6496, |
| "grad_norm": 0.3106381595134735, |
| "learning_rate": 4.0236113724274716e-07, |
| "loss": 0.5181, |
| "step": 276 |
| }, |
| { |
| "epoch": 2.6592000000000002, |
| "grad_norm": 0.32390648126602173, |
| "learning_rate": 3.8060233744356634e-07, |
| "loss": 0.5662, |
| "step": 277 |
| }, |
| { |
| "epoch": 2.6688, |
| "grad_norm": 0.27066949009895325, |
| "learning_rate": 3.5942505740480583e-07, |
| "loss": 0.4112, |
| "step": 278 |
| }, |
| { |
| "epoch": 2.6784, |
| "grad_norm": 0.31843921542167664, |
| "learning_rate": 3.3883196305992906e-07, |
| "loss": 0.5574, |
| "step": 279 |
| }, |
| { |
| "epoch": 2.6879999999999997, |
| "grad_norm": 0.30422160029411316, |
| "learning_rate": 3.18825646801314e-07, |
| "loss": 0.5385, |
| "step": 280 |
| }, |
| { |
| "epoch": 2.6976, |
| "grad_norm": 0.2964281439781189, |
| "learning_rate": 2.9940862715390483e-07, |
| "loss": 0.4983, |
| "step": 281 |
| }, |
| { |
| "epoch": 2.7072000000000003, |
| "grad_norm": 0.31731534004211426, |
| "learning_rate": 2.8058334845816214e-07, |
| "loss": 0.5389, |
| "step": 282 |
| }, |
| { |
| "epoch": 2.7168, |
| "grad_norm": 0.29196590185165405, |
| "learning_rate": 2.6235218056235633e-07, |
| "loss": 0.496, |
| "step": 283 |
| }, |
| { |
| "epoch": 2.7264, |
| "grad_norm": 0.3442181646823883, |
| "learning_rate": 2.447174185242324e-07, |
| "loss": 0.5581, |
| "step": 284 |
| }, |
| { |
| "epoch": 2.7359999999999998, |
| "grad_norm": 0.3149075508117676, |
| "learning_rate": 2.276812823220964e-07, |
| "loss": 0.512, |
| "step": 285 |
| }, |
| { |
| "epoch": 2.7456, |
| "grad_norm": 0.3001596927642822, |
| "learning_rate": 2.1124591657534776e-07, |
| "loss": 0.4941, |
| "step": 286 |
| }, |
| { |
| "epoch": 2.7552, |
| "grad_norm": 0.28749582171440125, |
| "learning_rate": 1.9541339027450256e-07, |
| "loss": 0.4924, |
| "step": 287 |
| }, |
| { |
| "epoch": 2.7648, |
| "grad_norm": 0.3162883520126343, |
| "learning_rate": 1.801856965207338e-07, |
| "loss": 0.4948, |
| "step": 288 |
| }, |
| { |
| "epoch": 2.7744, |
| "grad_norm": 0.2985377311706543, |
| "learning_rate": 1.6556475227496816e-07, |
| "loss": 0.5109, |
| "step": 289 |
| }, |
| { |
| "epoch": 2.784, |
| "grad_norm": 0.28927546739578247, |
| "learning_rate": 1.5155239811656562e-07, |
| "loss": 0.4975, |
| "step": 290 |
| }, |
| { |
| "epoch": 2.7936, |
| "grad_norm": 0.30359500646591187, |
| "learning_rate": 1.3815039801161723e-07, |
| "loss": 0.4968, |
| "step": 291 |
| }, |
| { |
| "epoch": 2.8032, |
| "grad_norm": 0.33133822679519653, |
| "learning_rate": 1.253604390908819e-07, |
| "loss": 0.5401, |
| "step": 292 |
| }, |
| { |
| "epoch": 2.8128, |
| "grad_norm": 0.30433112382888794, |
| "learning_rate": 1.1318413143740436e-07, |
| "loss": 0.5028, |
| "step": 293 |
| }, |
| { |
| "epoch": 2.8224, |
| "grad_norm": 0.3015415072441101, |
| "learning_rate": 1.0162300788382263e-07, |
| "loss": 0.4919, |
| "step": 294 |
| }, |
| { |
| "epoch": 2.832, |
| "grad_norm": 0.2975231111049652, |
| "learning_rate": 9.0678523819408e-08, |
| "loss": 0.4839, |
| "step": 295 |
| }, |
| { |
| "epoch": 2.8416, |
| "grad_norm": 0.32519227266311646, |
| "learning_rate": 8.035205700685167e-08, |
| "loss": 0.5256, |
| "step": 296 |
| }, |
| { |
| "epoch": 2.8512, |
| "grad_norm": 0.2897155284881592, |
| "learning_rate": 7.064490740882057e-08, |
| "loss": 0.4884, |
| "step": 297 |
| }, |
| { |
| "epoch": 2.8608000000000002, |
| "grad_norm": 0.3230243921279907, |
| "learning_rate": 6.15582970243117e-08, |
| "loss": 0.5589, |
| "step": 298 |
| }, |
| { |
| "epoch": 2.8704, |
| "grad_norm": 0.30294710397720337, |
| "learning_rate": 5.3093369734816824e-08, |
| "loss": 0.4802, |
| "step": 299 |
| }, |
| { |
| "epoch": 2.88, |
| "grad_norm": 0.2914966642856598, |
| "learning_rate": 4.52511911603265e-08, |
| "loss": 0.497, |
| "step": 300 |
| }, |
| { |
| "epoch": 2.8895999999999997, |
| "grad_norm": 0.27703312039375305, |
| "learning_rate": 3.8032748525179684e-08, |
| "loss": 0.4772, |
| "step": 301 |
| }, |
| { |
| "epoch": 2.8992, |
| "grad_norm": 0.29439136385917664, |
| "learning_rate": 3.143895053378698e-08, |
| "loss": 0.5001, |
| "step": 302 |
| }, |
| { |
| "epoch": 2.9088000000000003, |
| "grad_norm": 0.31278231739997864, |
| "learning_rate": 2.547062725623828e-08, |
| "loss": 0.523, |
| "step": 303 |
| }, |
| { |
| "epoch": 2.9184, |
| "grad_norm": 0.2997344136238098, |
| "learning_rate": 2.012853002380466e-08, |
| "loss": 0.4714, |
| "step": 304 |
| }, |
| { |
| "epoch": 2.928, |
| "grad_norm": 0.3155505657196045, |
| "learning_rate": 1.541333133436018e-08, |
| "loss": 0.5405, |
| "step": 305 |
| }, |
| { |
| "epoch": 2.9375999999999998, |
| "grad_norm": 0.33159124851226807, |
| "learning_rate": 1.132562476771959e-08, |
| "loss": 0.5023, |
| "step": 306 |
| }, |
| { |
| "epoch": 2.9472, |
| "grad_norm": 0.2960556745529175, |
| "learning_rate": 7.865924910916977e-09, |
| "loss": 0.4542, |
| "step": 307 |
| }, |
| { |
| "epoch": 2.9568, |
| "grad_norm": 0.2954247295856476, |
| "learning_rate": 5.034667293427053e-09, |
| "loss": 0.4865, |
| "step": 308 |
| }, |
| { |
| "epoch": 2.9664, |
| "grad_norm": 0.2954687178134918, |
| "learning_rate": 2.8322083323334417e-09, |
| "loss": 0.5101, |
| "step": 309 |
| }, |
| { |
| "epoch": 2.976, |
| "grad_norm": 0.2963257133960724, |
| "learning_rate": 1.2588252874673469e-09, |
| "loss": 0.5357, |
| "step": 310 |
| }, |
| { |
| "epoch": 2.9856, |
| "grad_norm": 0.2885425388813019, |
| "learning_rate": 3.147162264971471e-10, |
| "loss": 0.4818, |
| "step": 311 |
| }, |
| { |
| "epoch": 2.9952, |
| "grad_norm": 0.29874712228775024, |
| "learning_rate": 0.0, |
| "loss": 0.5586, |
| "step": 312 |
| }, |
| { |
| "epoch": 2.9952, |
| "step": 312, |
| "total_flos": 7.00883763227263e+17, |
| "train_loss": 0.5766575982173284, |
| "train_runtime": 33757.7333, |
| "train_samples_per_second": 0.889, |
| "train_steps_per_second": 0.009 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 312, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 7.00883763227263e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|