Invalid JSON: Unexpected token 'I', ..."ad_norm": Infinity,
"... is not valid JSON
| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.8152173913043478, | |
| "eval_steps": 500, | |
| "global_step": 3000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0002717391304347826, | |
| "grad_norm": Infinity, | |
| "learning_rate": 0.0, | |
| "loss": 3.8119, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0005434782608695652, | |
| "grad_norm": Infinity, | |
| "learning_rate": 0.0, | |
| "loss": 5.6312, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0008152173913043478, | |
| "grad_norm": 3.606865644454956, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 3.6085, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0010869565217391304, | |
| "grad_norm": 5.723954677581787, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 4.5497, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.001358695652173913, | |
| "grad_norm": 8.396666526794434, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 5.6655, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0016304347826086956, | |
| "grad_norm": 6.602017402648926, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 4.9113, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.0019021739130434783, | |
| "grad_norm": 2.8019049167633057, | |
| "learning_rate": 5e-07, | |
| "loss": 3.0843, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.002173913043478261, | |
| "grad_norm": 5.959436893463135, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 4.4235, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.0024456521739130437, | |
| "grad_norm": 5.623807430267334, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 4.5667, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.002717391304347826, | |
| "grad_norm": 5.9944844245910645, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 4.6548, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.002989130434782609, | |
| "grad_norm": 7.308701038360596, | |
| "learning_rate": 9e-07, | |
| "loss": 5.1422, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.003260869565217391, | |
| "grad_norm": 5.805826187133789, | |
| "learning_rate": 1e-06, | |
| "loss": 4.3458, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.003532608695652174, | |
| "grad_norm": 5.096138000488281, | |
| "learning_rate": 1.1e-06, | |
| "loss": 4.0761, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.0038043478260869567, | |
| "grad_norm": 4.222947120666504, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 3.7963, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.004076086956521739, | |
| "grad_norm": 8.304177284240723, | |
| "learning_rate": 1.3e-06, | |
| "loss": 5.4697, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.004347826086956522, | |
| "grad_norm": 6.131305694580078, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 4.5078, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.004619565217391305, | |
| "grad_norm": 5.1205315589904785, | |
| "learning_rate": 1.5e-06, | |
| "loss": 4.1935, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.004891304347826087, | |
| "grad_norm": 3.348874568939209, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 3.165, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.005163043478260869, | |
| "grad_norm": 11.620071411132812, | |
| "learning_rate": 1.7e-06, | |
| "loss": 6.6692, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.005434782608695652, | |
| "grad_norm": 5.148247241973877, | |
| "learning_rate": 1.8e-06, | |
| "loss": 3.7967, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.005706521739130435, | |
| "grad_norm": 6.738089561462402, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 4.6053, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.005978260869565218, | |
| "grad_norm": 7.383264541625977, | |
| "learning_rate": 2e-06, | |
| "loss": 4.7862, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.00625, | |
| "grad_norm": 9.380542755126953, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 5.5961, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.006521739130434782, | |
| "grad_norm": 5.481351852416992, | |
| "learning_rate": 2.2e-06, | |
| "loss": 3.9489, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.006793478260869565, | |
| "grad_norm": 3.769026756286621, | |
| "learning_rate": 2.3e-06, | |
| "loss": 3.3576, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.007065217391304348, | |
| "grad_norm": 6.088125705718994, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 4.3102, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.007336956521739131, | |
| "grad_norm": 3.7228612899780273, | |
| "learning_rate": 2.4999999999999998e-06, | |
| "loss": 3.2898, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.007608695652173913, | |
| "grad_norm": 5.163967609405518, | |
| "learning_rate": 2.6e-06, | |
| "loss": 3.9394, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.007880434782608695, | |
| "grad_norm": 3.577744960784912, | |
| "learning_rate": 2.7e-06, | |
| "loss": 3.4044, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.008152173913043478, | |
| "grad_norm": 5.421440124511719, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 4.2156, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.00842391304347826, | |
| "grad_norm": 4.284877777099609, | |
| "learning_rate": 2.9e-06, | |
| "loss": 3.519, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.008695652173913044, | |
| "grad_norm": 6.597541809082031, | |
| "learning_rate": 3e-06, | |
| "loss": 4.5028, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.008967391304347826, | |
| "grad_norm": 7.851593494415283, | |
| "learning_rate": 3.1e-06, | |
| "loss": 5.1247, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.00923913043478261, | |
| "grad_norm": 2.644531726837158, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 2.9878, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.009510869565217392, | |
| "grad_norm": 3.316633462905884, | |
| "learning_rate": 3.3e-06, | |
| "loss": 3.4112, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.009782608695652175, | |
| "grad_norm": 3.0373375415802, | |
| "learning_rate": 3.4e-06, | |
| "loss": 3.301, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.010054347826086956, | |
| "grad_norm": 5.997847080230713, | |
| "learning_rate": 3.5e-06, | |
| "loss": 4.9763, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.010326086956521738, | |
| "grad_norm": 5.2729268074035645, | |
| "learning_rate": 3.6e-06, | |
| "loss": 4.7279, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.010597826086956521, | |
| "grad_norm": 2.7055575847625732, | |
| "learning_rate": 3.7e-06, | |
| "loss": 3.4656, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.010869565217391304, | |
| "grad_norm": 3.5352964401245117, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 3.9642, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.011141304347826087, | |
| "grad_norm": 2.8491668701171875, | |
| "learning_rate": 3.9e-06, | |
| "loss": 3.8092, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.01141304347826087, | |
| "grad_norm": 1.8910856246948242, | |
| "learning_rate": 4e-06, | |
| "loss": 3.0861, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.011684782608695652, | |
| "grad_norm": 2.873784303665161, | |
| "learning_rate": 4.1e-06, | |
| "loss": 3.788, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.011956521739130435, | |
| "grad_norm": 3.121163845062256, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 4.0837, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.012228260869565218, | |
| "grad_norm": 2.977182388305664, | |
| "learning_rate": 4.3e-06, | |
| "loss": 4.0382, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.0125, | |
| "grad_norm": 3.935685396194458, | |
| "learning_rate": 4.4e-06, | |
| "loss": 5.0603, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.012771739130434783, | |
| "grad_norm": 1.5644341707229614, | |
| "learning_rate": 4.5e-06, | |
| "loss": 3.0085, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.013043478260869565, | |
| "grad_norm": 2.2639412879943848, | |
| "learning_rate": 4.6e-06, | |
| "loss": 3.5606, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.013315217391304347, | |
| "grad_norm": 3.8248438835144043, | |
| "learning_rate": 4.700000000000001e-06, | |
| "loss": 5.1055, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.01358695652173913, | |
| "grad_norm": 2.0189621448516846, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 3.6357, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.013858695652173913, | |
| "grad_norm": 1.7053027153015137, | |
| "learning_rate": 4.9e-06, | |
| "loss": 3.3706, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.014130434782608696, | |
| "grad_norm": 2.996800184249878, | |
| "learning_rate": 4.9999999999999996e-06, | |
| "loss": 4.2643, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.014402173913043478, | |
| "grad_norm": 2.2045717239379883, | |
| "learning_rate": 5.1e-06, | |
| "loss": 3.4676, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.014673913043478261, | |
| "grad_norm": 1.9880719184875488, | |
| "learning_rate": 5.2e-06, | |
| "loss": 3.7901, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.014945652173913044, | |
| "grad_norm": 1.4993855953216553, | |
| "learning_rate": 5.3e-06, | |
| "loss": 3.3175, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.015217391304347827, | |
| "grad_norm": 1.3958892822265625, | |
| "learning_rate": 5.4e-06, | |
| "loss": 3.0166, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.01548913043478261, | |
| "grad_norm": 1.5511572360992432, | |
| "learning_rate": 5.5e-06, | |
| "loss": 3.4503, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.01576086956521739, | |
| "grad_norm": 2.274249792098999, | |
| "learning_rate": 5.600000000000001e-06, | |
| "loss": 4.1636, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.016032608695652175, | |
| "grad_norm": 1.7447724342346191, | |
| "learning_rate": 5.7000000000000005e-06, | |
| "loss": 3.7056, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.016304347826086956, | |
| "grad_norm": 2.4315755367279053, | |
| "learning_rate": 5.8e-06, | |
| "loss": 3.9477, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.01657608695652174, | |
| "grad_norm": 1.5694116353988647, | |
| "learning_rate": 5.899999999999999e-06, | |
| "loss": 3.321, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.01684782608695652, | |
| "grad_norm": 1.856467366218567, | |
| "learning_rate": 6e-06, | |
| "loss": 3.8384, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.017119565217391303, | |
| "grad_norm": 1.3913215398788452, | |
| "learning_rate": 6.1e-06, | |
| "loss": 3.1717, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.017391304347826087, | |
| "grad_norm": 1.1280888319015503, | |
| "learning_rate": 6.2e-06, | |
| "loss": 3.0487, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.017663043478260868, | |
| "grad_norm": 1.3553597927093506, | |
| "learning_rate": 6.3e-06, | |
| "loss": 3.4274, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.017934782608695653, | |
| "grad_norm": 1.5680698156356812, | |
| "learning_rate": 6.4000000000000006e-06, | |
| "loss": 3.2402, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.018206521739130434, | |
| "grad_norm": 1.2775722742080688, | |
| "learning_rate": 6.5000000000000004e-06, | |
| "loss": 3.1252, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.01847826086956522, | |
| "grad_norm": 1.547057867050171, | |
| "learning_rate": 6.6e-06, | |
| "loss": 3.6654, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.01875, | |
| "grad_norm": 1.6398184299468994, | |
| "learning_rate": 6.7e-06, | |
| "loss": 3.4637, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.019021739130434784, | |
| "grad_norm": 0.8923302292823792, | |
| "learning_rate": 6.8e-06, | |
| "loss": 2.6619, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.019293478260869565, | |
| "grad_norm": 1.1862719058990479, | |
| "learning_rate": 6.900000000000001e-06, | |
| "loss": 3.1319, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.01956521739130435, | |
| "grad_norm": 1.0350522994995117, | |
| "learning_rate": 7e-06, | |
| "loss": 2.8874, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.01983695652173913, | |
| "grad_norm": 1.7756403684616089, | |
| "learning_rate": 7.1e-06, | |
| "loss": 3.7356, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.02010869565217391, | |
| "grad_norm": 1.2788705825805664, | |
| "learning_rate": 7.2e-06, | |
| "loss": 3.2409, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.020380434782608696, | |
| "grad_norm": 1.4206314086914062, | |
| "learning_rate": 7.3e-06, | |
| "loss": 3.5884, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.020652173913043477, | |
| "grad_norm": 2.0677270889282227, | |
| "learning_rate": 7.4e-06, | |
| "loss": 4.3218, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.02092391304347826, | |
| "grad_norm": 0.700586199760437, | |
| "learning_rate": 7.5e-06, | |
| "loss": 2.4596, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.021195652173913043, | |
| "grad_norm": 2.1819796562194824, | |
| "learning_rate": 7.600000000000001e-06, | |
| "loss": 3.9885, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.021467391304347827, | |
| "grad_norm": 1.0155264139175415, | |
| "learning_rate": 7.699999999999999e-06, | |
| "loss": 2.7548, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.021739130434782608, | |
| "grad_norm": 1.2022877931594849, | |
| "learning_rate": 7.8e-06, | |
| "loss": 3.1796, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.022010869565217393, | |
| "grad_norm": 1.1962783336639404, | |
| "learning_rate": 7.899999999999999e-06, | |
| "loss": 2.907, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.022282608695652174, | |
| "grad_norm": 1.0273329019546509, | |
| "learning_rate": 8e-06, | |
| "loss": 2.7207, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.022554347826086958, | |
| "grad_norm": 2.3930468559265137, | |
| "learning_rate": 8.1e-06, | |
| "loss": 3.9644, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.02282608695652174, | |
| "grad_norm": 2.244040012359619, | |
| "learning_rate": 8.2e-06, | |
| "loss": 3.5003, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.02309782608695652, | |
| "grad_norm": 1.685024380683899, | |
| "learning_rate": 8.3e-06, | |
| "loss": 3.0757, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.023369565217391305, | |
| "grad_norm": 3.1130995750427246, | |
| "learning_rate": 8.400000000000001e-06, | |
| "loss": 3.9588, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.023641304347826086, | |
| "grad_norm": 2.6688392162323, | |
| "learning_rate": 8.5e-06, | |
| "loss": 3.6427, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.02391304347826087, | |
| "grad_norm": 1.3578976392745972, | |
| "learning_rate": 8.6e-06, | |
| "loss": 2.9824, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.02418478260869565, | |
| "grad_norm": 1.2437134981155396, | |
| "learning_rate": 8.7e-06, | |
| "loss": 3.1183, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.024456521739130436, | |
| "grad_norm": 2.143594741821289, | |
| "learning_rate": 8.8e-06, | |
| "loss": 3.6833, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.024728260869565217, | |
| "grad_norm": 1.2158911228179932, | |
| "learning_rate": 8.900000000000001e-06, | |
| "loss": 2.7814, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.025, | |
| "grad_norm": 2.4209673404693604, | |
| "learning_rate": 9e-06, | |
| "loss": 4.3185, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.025271739130434782, | |
| "grad_norm": 3.090715169906616, | |
| "learning_rate": 9.100000000000001e-06, | |
| "loss": 4.0217, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.025543478260869567, | |
| "grad_norm": 3.1287500858306885, | |
| "learning_rate": 9.2e-06, | |
| "loss": 3.8665, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.025815217391304348, | |
| "grad_norm": 2.6370797157287598, | |
| "learning_rate": 9.3e-06, | |
| "loss": 3.7443, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.02608695652173913, | |
| "grad_norm": 4.120901107788086, | |
| "learning_rate": 9.400000000000001e-06, | |
| "loss": 4.3482, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.026358695652173914, | |
| "grad_norm": NaN, | |
| "learning_rate": 9.400000000000001e-06, | |
| "loss": 3.7633, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.026630434782608695, | |
| "grad_norm": 2.1682493686676025, | |
| "learning_rate": 9.5e-06, | |
| "loss": 3.135, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.02690217391304348, | |
| "grad_norm": 9.943031311035156, | |
| "learning_rate": 9.600000000000001e-06, | |
| "loss": 4.4992, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.02717391304347826, | |
| "grad_norm": 9.851699829101562, | |
| "learning_rate": 9.699999999999999e-06, | |
| "loss": 3.3346, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.027445652173913045, | |
| "grad_norm": 9.068572044372559, | |
| "learning_rate": 9.8e-06, | |
| "loss": 3.4352, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.027717391304347826, | |
| "grad_norm": 16.46036148071289, | |
| "learning_rate": 9.9e-06, | |
| "loss": 4.2803, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.02798913043478261, | |
| "grad_norm": 4.916048049926758, | |
| "learning_rate": 9.999999999999999e-06, | |
| "loss": 5.2283, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.02826086956521739, | |
| "grad_norm": 3.9112675189971924, | |
| "learning_rate": 1.01e-05, | |
| "loss": 3.9085, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.028532608695652172, | |
| "grad_norm": 2.9353702068328857, | |
| "learning_rate": 1.02e-05, | |
| "loss": 3.436, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.028804347826086957, | |
| "grad_norm": 2.1756107807159424, | |
| "learning_rate": 1.03e-05, | |
| "loss": 4.2118, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.029076086956521738, | |
| "grad_norm": 2.4335989952087402, | |
| "learning_rate": 1.04e-05, | |
| "loss": 4.3144, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.029347826086956522, | |
| "grad_norm": 2.6821694374084473, | |
| "learning_rate": 1.05e-05, | |
| "loss": 4.461, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.029619565217391303, | |
| "grad_norm": 1.9325740337371826, | |
| "learning_rate": 1.06e-05, | |
| "loss": 4.1079, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.029891304347826088, | |
| "grad_norm": 3.598806381225586, | |
| "learning_rate": 1.0700000000000001e-05, | |
| "loss": 3.6908, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.03016304347826087, | |
| "grad_norm": 1.7592302560806274, | |
| "learning_rate": 1.08e-05, | |
| "loss": 3.2482, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.030434782608695653, | |
| "grad_norm": 2.4772744178771973, | |
| "learning_rate": 1.09e-05, | |
| "loss": 4.3941, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.030706521739130434, | |
| "grad_norm": 1.8405948877334595, | |
| "learning_rate": 1.1e-05, | |
| "loss": 3.6201, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.03097826086956522, | |
| "grad_norm": 4.303748607635498, | |
| "learning_rate": 1.11e-05, | |
| "loss": 4.6995, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.03125, | |
| "grad_norm": 2.199484348297119, | |
| "learning_rate": 1.1200000000000001e-05, | |
| "loss": 3.7969, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.03152173913043478, | |
| "grad_norm": 1.0349135398864746, | |
| "learning_rate": 1.13e-05, | |
| "loss": 2.8461, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.03179347826086956, | |
| "grad_norm": 2.374260425567627, | |
| "learning_rate": 1.1400000000000001e-05, | |
| "loss": 4.2862, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.03206521739130435, | |
| "grad_norm": 1.283584713935852, | |
| "learning_rate": 1.1500000000000002e-05, | |
| "loss": 3.1288, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.03233695652173913, | |
| "grad_norm": 3.549565076828003, | |
| "learning_rate": 1.16e-05, | |
| "loss": 4.1137, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.03260869565217391, | |
| "grad_norm": 1.3312267065048218, | |
| "learning_rate": 1.1700000000000001e-05, | |
| "loss": 3.6003, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.03288043478260869, | |
| "grad_norm": 1.655480146408081, | |
| "learning_rate": 1.1799999999999999e-05, | |
| "loss": 3.7798, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.03315217391304348, | |
| "grad_norm": 1.6580840349197388, | |
| "learning_rate": 1.19e-05, | |
| "loss": 3.5654, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.03342391304347826, | |
| "grad_norm": 2.8110294342041016, | |
| "learning_rate": 1.2e-05, | |
| "loss": 3.9673, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.03369565217391304, | |
| "grad_norm": 1.2965394258499146, | |
| "learning_rate": 1.21e-05, | |
| "loss": 3.26, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.033967391304347824, | |
| "grad_norm": 1.914302945137024, | |
| "learning_rate": 1.22e-05, | |
| "loss": 3.95, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.034239130434782605, | |
| "grad_norm": 1.3706483840942383, | |
| "learning_rate": 1.2299999999999999e-05, | |
| "loss": 3.5317, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.03451086956521739, | |
| "grad_norm": 2.016113042831421, | |
| "learning_rate": 1.24e-05, | |
| "loss": 3.2582, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.034782608695652174, | |
| "grad_norm": 2.5129711627960205, | |
| "learning_rate": 1.25e-05, | |
| "loss": 3.3608, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.035054347826086955, | |
| "grad_norm": Infinity, | |
| "learning_rate": 1.25e-05, | |
| "loss": 4.3213, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.035326086956521736, | |
| "grad_norm": 3.073050022125244, | |
| "learning_rate": 1.26e-05, | |
| "loss": 4.4668, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.035597826086956524, | |
| "grad_norm": 1.8033742904663086, | |
| "learning_rate": 1.27e-05, | |
| "loss": 3.5871, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.035869565217391305, | |
| "grad_norm": 3.148634195327759, | |
| "learning_rate": 1.2800000000000001e-05, | |
| "loss": 3.7611, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.036141304347826086, | |
| "grad_norm": 2.898167610168457, | |
| "learning_rate": 1.29e-05, | |
| "loss": 3.6928, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.03641304347826087, | |
| "grad_norm": 1.9879661798477173, | |
| "learning_rate": 1.3000000000000001e-05, | |
| "loss": 3.607, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.036684782608695655, | |
| "grad_norm": 2.0479657649993896, | |
| "learning_rate": 1.31e-05, | |
| "loss": 3.2456, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.03695652173913044, | |
| "grad_norm": 1.5060527324676514, | |
| "learning_rate": 1.32e-05, | |
| "loss": 3.4124, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.03722826086956522, | |
| "grad_norm": 2.2012810707092285, | |
| "learning_rate": 1.3300000000000001e-05, | |
| "loss": 4.1008, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.0375, | |
| "grad_norm": 2.2929439544677734, | |
| "learning_rate": 1.34e-05, | |
| "loss": 4.5483, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.03777173913043478, | |
| "grad_norm": 1.777136206626892, | |
| "learning_rate": 1.3500000000000001e-05, | |
| "loss": 3.9796, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.03804347826086957, | |
| "grad_norm": 3.1867823600769043, | |
| "learning_rate": 1.36e-05, | |
| "loss": 4.13, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.03831521739130435, | |
| "grad_norm": 2.070611000061035, | |
| "learning_rate": 1.3700000000000001e-05, | |
| "loss": 3.3474, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.03858695652173913, | |
| "grad_norm": 1.9961167573928833, | |
| "learning_rate": 1.3800000000000002e-05, | |
| "loss": 4.0889, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.03885869565217391, | |
| "grad_norm": 1.709995985031128, | |
| "learning_rate": 1.39e-05, | |
| "loss": 3.9576, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.0391304347826087, | |
| "grad_norm": 1.793494462966919, | |
| "learning_rate": 1.4e-05, | |
| "loss": 4.419, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.03940217391304348, | |
| "grad_norm": 2.1228878498077393, | |
| "learning_rate": 1.4099999999999999e-05, | |
| "loss": 3.7677, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.03967391304347826, | |
| "grad_norm": 1.5662310123443604, | |
| "learning_rate": 1.42e-05, | |
| "loss": 3.5038, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.03994565217391304, | |
| "grad_norm": 1.1859204769134521, | |
| "learning_rate": 1.43e-05, | |
| "loss": 3.0831, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.04021739130434782, | |
| "grad_norm": 1.998694896697998, | |
| "learning_rate": 1.44e-05, | |
| "loss": 4.06, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.04048913043478261, | |
| "grad_norm": 2.2709462642669678, | |
| "learning_rate": 1.45e-05, | |
| "loss": 3.4234, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.04076086956521739, | |
| "grad_norm": 3.572169065475464, | |
| "learning_rate": 1.46e-05, | |
| "loss": 4.1884, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.04103260869565217, | |
| "grad_norm": 2.4503207206726074, | |
| "learning_rate": 1.47e-05, | |
| "loss": 4.0506, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.041304347826086954, | |
| "grad_norm": 2.66367244720459, | |
| "learning_rate": 1.48e-05, | |
| "loss": 4.6858, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.04157608695652174, | |
| "grad_norm": 2.1236956119537354, | |
| "learning_rate": 1.49e-05, | |
| "loss": 3.2571, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.04184782608695652, | |
| "grad_norm": 4.9067206382751465, | |
| "learning_rate": 1.5e-05, | |
| "loss": 5.32, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.042119565217391304, | |
| "grad_norm": 2.0733673572540283, | |
| "learning_rate": 1.51e-05, | |
| "loss": 3.7548, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.042391304347826085, | |
| "grad_norm": 1.186569333076477, | |
| "learning_rate": 1.5200000000000002e-05, | |
| "loss": 3.3953, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.042663043478260866, | |
| "grad_norm": 2.5303616523742676, | |
| "learning_rate": 1.53e-05, | |
| "loss": 4.0742, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.042934782608695654, | |
| "grad_norm": 2.3837692737579346, | |
| "learning_rate": 1.5399999999999998e-05, | |
| "loss": 4.3684, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.043206521739130435, | |
| "grad_norm": 2.3704850673675537, | |
| "learning_rate": 1.55e-05, | |
| "loss": 4.1658, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.043478260869565216, | |
| "grad_norm": 3.8034274578094482, | |
| "learning_rate": 1.56e-05, | |
| "loss": 4.0919, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.04375, | |
| "grad_norm": 1.2763932943344116, | |
| "learning_rate": 1.57e-05, | |
| "loss": 3.8752, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.044021739130434785, | |
| "grad_norm": 1.7155462503433228, | |
| "learning_rate": 1.5799999999999998e-05, | |
| "loss": 4.317, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.044293478260869566, | |
| "grad_norm": 2.481580972671509, | |
| "learning_rate": 1.59e-05, | |
| "loss": 4.1314, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.04456521739130435, | |
| "grad_norm": 2.3853437900543213, | |
| "learning_rate": 1.6e-05, | |
| "loss": 3.6408, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.04483695652173913, | |
| "grad_norm": 1.7092500925064087, | |
| "learning_rate": 1.61e-05, | |
| "loss": 3.833, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.045108695652173916, | |
| "grad_norm": 2.1334292888641357, | |
| "learning_rate": 1.62e-05, | |
| "loss": 4.5951, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.0453804347826087, | |
| "grad_norm": 1.352221965789795, | |
| "learning_rate": 1.63e-05, | |
| "loss": 3.804, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.04565217391304348, | |
| "grad_norm": 2.178356409072876, | |
| "learning_rate": 1.64e-05, | |
| "loss": 3.7594, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.04592391304347826, | |
| "grad_norm": 2.9925286769866943, | |
| "learning_rate": 1.65e-05, | |
| "loss": 3.8819, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.04619565217391304, | |
| "grad_norm": 3.0277819633483887, | |
| "learning_rate": 1.66e-05, | |
| "loss": 4.4896, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.04646739130434783, | |
| "grad_norm": 1.459825873374939, | |
| "learning_rate": 1.67e-05, | |
| "loss": 4.1701, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.04673913043478261, | |
| "grad_norm": 1.6663670539855957, | |
| "learning_rate": 1.6800000000000002e-05, | |
| "loss": 3.6651, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.04701086956521739, | |
| "grad_norm": 1.7755922079086304, | |
| "learning_rate": 1.69e-05, | |
| "loss": 3.3178, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.04728260869565217, | |
| "grad_norm": 1.4884556531906128, | |
| "learning_rate": 1.7e-05, | |
| "loss": 3.6467, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.04755434782608696, | |
| "grad_norm": 2.701298713684082, | |
| "learning_rate": 1.71e-05, | |
| "loss": 4.773, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.04782608695652174, | |
| "grad_norm": 3.169405937194824, | |
| "learning_rate": 1.72e-05, | |
| "loss": 4.1844, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.04809782608695652, | |
| "grad_norm": 2.4385275840759277, | |
| "learning_rate": 1.73e-05, | |
| "loss": 3.8691, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.0483695652173913, | |
| "grad_norm": 3.446741819381714, | |
| "learning_rate": 1.74e-05, | |
| "loss": 3.6733, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.048641304347826084, | |
| "grad_norm": 4.095767974853516, | |
| "learning_rate": 1.7500000000000002e-05, | |
| "loss": 3.927, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.04891304347826087, | |
| "grad_norm": 3.42309308052063, | |
| "learning_rate": 1.76e-05, | |
| "loss": 4.152, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.04918478260869565, | |
| "grad_norm": 1.8505162000656128, | |
| "learning_rate": 1.77e-05, | |
| "loss": 3.5489, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.049456521739130434, | |
| "grad_norm": 3.3934953212738037, | |
| "learning_rate": 1.7800000000000002e-05, | |
| "loss": 3.9161, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.049728260869565215, | |
| "grad_norm": 3.2479214668273926, | |
| "learning_rate": 1.79e-05, | |
| "loss": 3.3177, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 4.14975643157959, | |
| "learning_rate": 1.8e-05, | |
| "loss": 4.0072, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.050271739130434784, | |
| "grad_norm": 4.540154457092285, | |
| "learning_rate": 1.8100000000000003e-05, | |
| "loss": 3.878, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.050543478260869565, | |
| "grad_norm": 2.0964109897613525, | |
| "learning_rate": 1.8200000000000002e-05, | |
| "loss": 3.6273, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.050815217391304346, | |
| "grad_norm": 2.119013547897339, | |
| "learning_rate": 1.83e-05, | |
| "loss": 3.5527, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.051086956521739134, | |
| "grad_norm": 4.674685478210449, | |
| "learning_rate": 1.84e-05, | |
| "loss": 4.1131, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.051358695652173915, | |
| "grad_norm": 7.127579689025879, | |
| "learning_rate": 1.8500000000000002e-05, | |
| "loss": 4.0545, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.051630434782608696, | |
| "grad_norm": 3.285233497619629, | |
| "learning_rate": 1.86e-05, | |
| "loss": 3.6065, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.05190217391304348, | |
| "grad_norm": 1.405059576034546, | |
| "learning_rate": 1.87e-05, | |
| "loss": 3.4746, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.05217391304347826, | |
| "grad_norm": 3.002393960952759, | |
| "learning_rate": 1.8800000000000003e-05, | |
| "loss": 3.9168, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.052445652173913046, | |
| "grad_norm": 2.253774404525757, | |
| "learning_rate": 1.8900000000000002e-05, | |
| "loss": 3.0605, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.05271739130434783, | |
| "grad_norm": 3.4486026763916016, | |
| "learning_rate": 1.9e-05, | |
| "loss": 3.6955, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.05298913043478261, | |
| "grad_norm": 2.6076107025146484, | |
| "learning_rate": 1.9100000000000003e-05, | |
| "loss": 4.7168, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.05326086956521739, | |
| "grad_norm": 2.173379421234131, | |
| "learning_rate": 1.9200000000000003e-05, | |
| "loss": 3.9563, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.05353260869565218, | |
| "grad_norm": 1.8571799993515015, | |
| "learning_rate": 1.9299999999999998e-05, | |
| "loss": 3.531, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.05380434782608696, | |
| "grad_norm": 2.940171957015991, | |
| "learning_rate": 1.9399999999999997e-05, | |
| "loss": 3.9534, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.05407608695652174, | |
| "grad_norm": 1.6403425931930542, | |
| "learning_rate": 1.95e-05, | |
| "loss": 3.7979, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.05434782608695652, | |
| "grad_norm": 1.9086182117462158, | |
| "learning_rate": 1.96e-05, | |
| "loss": 3.3714, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.0546195652173913, | |
| "grad_norm": 2.42441463470459, | |
| "learning_rate": 1.9699999999999998e-05, | |
| "loss": 3.6953, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.05489130434782609, | |
| "grad_norm": 1.7879725694656372, | |
| "learning_rate": 1.98e-05, | |
| "loss": 3.424, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.05516304347826087, | |
| "grad_norm": 1.951125144958496, | |
| "learning_rate": 1.99e-05, | |
| "loss": 4.0087, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.05543478260869565, | |
| "grad_norm": 1.8883895874023438, | |
| "learning_rate": 1.9999999999999998e-05, | |
| "loss": 3.4857, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.05570652173913043, | |
| "grad_norm": 2.1979916095733643, | |
| "learning_rate": 2.01e-05, | |
| "loss": 3.8898, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.05597826086956522, | |
| "grad_norm": 1.6748876571655273, | |
| "learning_rate": 2.02e-05, | |
| "loss": 3.5928, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.05625, | |
| "grad_norm": 4.129452705383301, | |
| "learning_rate": 2.03e-05, | |
| "loss": 4.2804, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.05652173913043478, | |
| "grad_norm": 2.1873629093170166, | |
| "learning_rate": 2.04e-05, | |
| "loss": 3.5242, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.05679347826086956, | |
| "grad_norm": 3.064663887023926, | |
| "learning_rate": 2.05e-05, | |
| "loss": 3.379, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.057065217391304345, | |
| "grad_norm": 2.8077127933502197, | |
| "learning_rate": 2.06e-05, | |
| "loss": 3.4032, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.05733695652173913, | |
| "grad_norm": 2.902247905731201, | |
| "learning_rate": 2.07e-05, | |
| "loss": 3.9496, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.057608695652173914, | |
| "grad_norm": 2.3877575397491455, | |
| "learning_rate": 2.08e-05, | |
| "loss": 3.5266, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.057880434782608695, | |
| "grad_norm": 4.157260417938232, | |
| "learning_rate": 2.09e-05, | |
| "loss": 4.0795, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.058152173913043476, | |
| "grad_norm": 4.117393970489502, | |
| "learning_rate": 2.1e-05, | |
| "loss": 3.3584, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.058423913043478264, | |
| "grad_norm": 3.1118719577789307, | |
| "learning_rate": 2.11e-05, | |
| "loss": 3.5, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.058695652173913045, | |
| "grad_norm": 3.3953042030334473, | |
| "learning_rate": 2.12e-05, | |
| "loss": 3.6948, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.058967391304347826, | |
| "grad_norm": 4.036997318267822, | |
| "learning_rate": 2.13e-05, | |
| "loss": 3.6871, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.05923913043478261, | |
| "grad_norm": 4.334664821624756, | |
| "learning_rate": 2.1400000000000002e-05, | |
| "loss": 4.0403, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.059510869565217395, | |
| "grad_norm": 3.2473344802856445, | |
| "learning_rate": 2.15e-05, | |
| "loss": 2.8208, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.059782608695652176, | |
| "grad_norm": 5.333037376403809, | |
| "learning_rate": 2.16e-05, | |
| "loss": 3.4915, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.06005434782608696, | |
| "grad_norm": 4.594522953033447, | |
| "learning_rate": 2.1700000000000002e-05, | |
| "loss": 3.773, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.06032608695652174, | |
| "grad_norm": 2.8913493156433105, | |
| "learning_rate": 2.18e-05, | |
| "loss": 3.5039, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.06059782608695652, | |
| "grad_norm": 3.1354737281799316, | |
| "learning_rate": 2.19e-05, | |
| "loss": 3.5823, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.06086956521739131, | |
| "grad_norm": 4.392479419708252, | |
| "learning_rate": 2.2e-05, | |
| "loss": 3.8093, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.06114130434782609, | |
| "grad_norm": 3.4441139698028564, | |
| "learning_rate": 2.2100000000000002e-05, | |
| "loss": 3.9232, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.06141304347826087, | |
| "grad_norm": 2.799862861633301, | |
| "learning_rate": 2.22e-05, | |
| "loss": 3.9281, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.06168478260869565, | |
| "grad_norm": 2.910454750061035, | |
| "learning_rate": 2.23e-05, | |
| "loss": 3.635, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.06195652173913044, | |
| "grad_norm": 3.9778056144714355, | |
| "learning_rate": 2.2400000000000002e-05, | |
| "loss": 3.5988, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.06222826086956522, | |
| "grad_norm": 3.500877857208252, | |
| "learning_rate": 2.25e-05, | |
| "loss": 3.2595, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.0625, | |
| "grad_norm": 3.3998258113861084, | |
| "learning_rate": 2.26e-05, | |
| "loss": 3.9116, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.06277173913043478, | |
| "grad_norm": 2.585266351699829, | |
| "learning_rate": 2.2700000000000003e-05, | |
| "loss": 3.6087, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.06304347826086956, | |
| "grad_norm": 2.313265085220337, | |
| "learning_rate": 2.2800000000000002e-05, | |
| "loss": 3.8507, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.06331521739130434, | |
| "grad_norm": 6.233187198638916, | |
| "learning_rate": 2.29e-05, | |
| "loss": 4.6271, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.06358695652173912, | |
| "grad_norm": 4.387447357177734, | |
| "learning_rate": 2.3000000000000003e-05, | |
| "loss": 3.7921, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.06385869565217392, | |
| "grad_norm": 3.573700189590454, | |
| "learning_rate": 2.3100000000000002e-05, | |
| "loss": 3.6567, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.0641304347826087, | |
| "grad_norm": 1.9834128618240356, | |
| "learning_rate": 2.32e-05, | |
| "loss": 3.715, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.06440217391304348, | |
| "grad_norm": 3.0220143795013428, | |
| "learning_rate": 2.33e-05, | |
| "loss": 4.302, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.06467391304347826, | |
| "grad_norm": 5.85452938079834, | |
| "learning_rate": 2.3400000000000003e-05, | |
| "loss": 4.3755, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.06494565217391304, | |
| "grad_norm": 6.263114929199219, | |
| "learning_rate": 2.3500000000000002e-05, | |
| "loss": 4.0045, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.06521739130434782, | |
| "grad_norm": 4.6512322425842285, | |
| "learning_rate": 2.3599999999999998e-05, | |
| "loss": 4.202, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.0654891304347826, | |
| "grad_norm": 1.589857578277588, | |
| "learning_rate": 2.37e-05, | |
| "loss": 3.0368, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.06576086956521739, | |
| "grad_norm": 3.0660102367401123, | |
| "learning_rate": 2.38e-05, | |
| "loss": 3.959, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.06603260869565217, | |
| "grad_norm": 3.7164723873138428, | |
| "learning_rate": 2.3899999999999998e-05, | |
| "loss": 3.5374, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.06630434782608696, | |
| "grad_norm": 3.772892713546753, | |
| "learning_rate": 2.4e-05, | |
| "loss": 3.3464, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.06657608695652174, | |
| "grad_norm": 3.2034425735473633, | |
| "learning_rate": 2.41e-05, | |
| "loss": 3.3077, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.06684782608695652, | |
| "grad_norm": 4.294682502746582, | |
| "learning_rate": 2.42e-05, | |
| "loss": 3.813, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.0671195652173913, | |
| "grad_norm": 3.805955171585083, | |
| "learning_rate": 2.43e-05, | |
| "loss": 3.6854, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.06739130434782609, | |
| "grad_norm": 4.587597370147705, | |
| "learning_rate": 2.44e-05, | |
| "loss": 3.3058, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.06766304347826087, | |
| "grad_norm": 5.513325214385986, | |
| "learning_rate": 2.45e-05, | |
| "loss": 3.6714, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.06793478260869565, | |
| "grad_norm": 5.820981025695801, | |
| "learning_rate": 2.4599999999999998e-05, | |
| "loss": 3.6253, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.06820652173913043, | |
| "grad_norm": 4.236602306365967, | |
| "learning_rate": 2.47e-05, | |
| "loss": 3.605, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.06847826086956521, | |
| "grad_norm": 3.524596929550171, | |
| "learning_rate": 2.48e-05, | |
| "loss": 3.6746, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.06875, | |
| "grad_norm": 4.912044525146484, | |
| "learning_rate": 2.49e-05, | |
| "loss": 3.6485, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.06902173913043479, | |
| "grad_norm": 5.730941295623779, | |
| "learning_rate": 2.5e-05, | |
| "loss": 3.7662, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.06929347826086957, | |
| "grad_norm": 7.801654815673828, | |
| "learning_rate": 2.51e-05, | |
| "loss": 4.8335, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.06956521739130435, | |
| "grad_norm": 4.116402626037598, | |
| "learning_rate": 2.52e-05, | |
| "loss": 3.9456, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.06983695652173913, | |
| "grad_norm": 6.192750930786133, | |
| "learning_rate": 2.5300000000000002e-05, | |
| "loss": 4.0458, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.07010869565217391, | |
| "grad_norm": 4.565896511077881, | |
| "learning_rate": 2.54e-05, | |
| "loss": 3.3915, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.07038043478260869, | |
| "grad_norm": 4.502013206481934, | |
| "learning_rate": 2.55e-05, | |
| "loss": 3.4612, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.07065217391304347, | |
| "grad_norm": 3.8033788204193115, | |
| "learning_rate": 2.5600000000000002e-05, | |
| "loss": 2.9393, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.07092391304347827, | |
| "grad_norm": 4.34993839263916, | |
| "learning_rate": 2.57e-05, | |
| "loss": 3.6575, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.07119565217391305, | |
| "grad_norm": 3.9064600467681885, | |
| "learning_rate": 2.58e-05, | |
| "loss": 3.6109, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.07146739130434783, | |
| "grad_norm": 3.737865447998047, | |
| "learning_rate": 2.59e-05, | |
| "loss": 3.7551, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.07173913043478261, | |
| "grad_norm": 4.92514181137085, | |
| "learning_rate": 2.6000000000000002e-05, | |
| "loss": 3.9133, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.07201086956521739, | |
| "grad_norm": 4.838226318359375, | |
| "learning_rate": 2.61e-05, | |
| "loss": 3.4457, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.07228260869565217, | |
| "grad_norm": 5.465858459472656, | |
| "learning_rate": 2.62e-05, | |
| "loss": 3.3492, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.07255434782608695, | |
| "grad_norm": 4.452864646911621, | |
| "learning_rate": 2.6300000000000002e-05, | |
| "loss": 4.0503, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.07282608695652174, | |
| "grad_norm": 2.657294750213623, | |
| "learning_rate": 2.64e-05, | |
| "loss": 3.5952, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.07309782608695652, | |
| "grad_norm": 3.738781213760376, | |
| "learning_rate": 2.65e-05, | |
| "loss": 3.2922, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.07336956521739131, | |
| "grad_norm": 4.5520524978637695, | |
| "learning_rate": 2.6600000000000003e-05, | |
| "loss": 3.4737, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.07364130434782609, | |
| "grad_norm": 6.709564685821533, | |
| "learning_rate": 2.6700000000000002e-05, | |
| "loss": 3.8822, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.07391304347826087, | |
| "grad_norm": 5.555806636810303, | |
| "learning_rate": 2.68e-05, | |
| "loss": 4.1245, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.07418478260869565, | |
| "grad_norm": 3.99271821975708, | |
| "learning_rate": 2.69e-05, | |
| "loss": 3.535, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.07445652173913044, | |
| "grad_norm": 3.0163462162017822, | |
| "learning_rate": 2.7000000000000002e-05, | |
| "loss": 3.4727, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.07472826086956522, | |
| "grad_norm": 5.363269329071045, | |
| "learning_rate": 2.71e-05, | |
| "loss": 4.1682, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.075, | |
| "grad_norm": 4.282686710357666, | |
| "learning_rate": 2.72e-05, | |
| "loss": 3.1355, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.07527173913043478, | |
| "grad_norm": 5.983767509460449, | |
| "learning_rate": 2.7300000000000003e-05, | |
| "loss": 3.8695, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.07554347826086956, | |
| "grad_norm": 5.345359802246094, | |
| "learning_rate": 2.7400000000000002e-05, | |
| "loss": 4.1882, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.07581521739130435, | |
| "grad_norm": 2.5548171997070312, | |
| "learning_rate": 2.75e-05, | |
| "loss": 3.2401, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.07608695652173914, | |
| "grad_norm": 3.790065050125122, | |
| "learning_rate": 2.7600000000000003e-05, | |
| "loss": 3.8057, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.07635869565217392, | |
| "grad_norm": 4.037810325622559, | |
| "learning_rate": 2.7700000000000002e-05, | |
| "loss": 3.8397, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.0766304347826087, | |
| "grad_norm": 4.571537017822266, | |
| "learning_rate": 2.78e-05, | |
| "loss": 3.1902, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.07690217391304348, | |
| "grad_norm": 4.4513702392578125, | |
| "learning_rate": 2.79e-05, | |
| "loss": 3.6552, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.07717391304347826, | |
| "grad_norm": 2.919421911239624, | |
| "learning_rate": 2.8e-05, | |
| "loss": 3.0449, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.07744565217391304, | |
| "grad_norm": 3.9303693771362305, | |
| "learning_rate": 2.81e-05, | |
| "loss": 3.8186, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.07771739130434782, | |
| "grad_norm": 3.8404746055603027, | |
| "learning_rate": 2.8199999999999998e-05, | |
| "loss": 3.2603, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.0779891304347826, | |
| "grad_norm": 3.2469253540039062, | |
| "learning_rate": 2.83e-05, | |
| "loss": 2.917, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.0782608695652174, | |
| "grad_norm": 2.8970375061035156, | |
| "learning_rate": 2.84e-05, | |
| "loss": 3.6446, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.07853260869565218, | |
| "grad_norm": 4.029104232788086, | |
| "learning_rate": 2.8499999999999998e-05, | |
| "loss": 4.1754, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.07880434782608696, | |
| "grad_norm": 3.967705726623535, | |
| "learning_rate": 2.86e-05, | |
| "loss": 3.3913, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.07907608695652174, | |
| "grad_norm": 3.084944009780884, | |
| "learning_rate": 2.87e-05, | |
| "loss": 3.4598, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.07934782608695652, | |
| "grad_norm": 3.2562334537506104, | |
| "learning_rate": 2.88e-05, | |
| "loss": 3.7326, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.0796195652173913, | |
| "grad_norm": 3.9668397903442383, | |
| "learning_rate": 2.89e-05, | |
| "loss": 3.8312, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.07989130434782608, | |
| "grad_norm": 3.7607946395874023, | |
| "learning_rate": 2.9e-05, | |
| "loss": 3.9309, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.08016304347826086, | |
| "grad_norm": 4.704547882080078, | |
| "learning_rate": 2.91e-05, | |
| "loss": 3.9181, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.08043478260869565, | |
| "grad_norm": 5.68735408782959, | |
| "learning_rate": 2.92e-05, | |
| "loss": 3.6405, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.08070652173913044, | |
| "grad_norm": 4.839725971221924, | |
| "learning_rate": 2.93e-05, | |
| "loss": 3.6735, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.08097826086956522, | |
| "grad_norm": 3.6157710552215576, | |
| "learning_rate": 2.94e-05, | |
| "loss": 4.1132, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.08125, | |
| "grad_norm": 3.6725997924804688, | |
| "learning_rate": 2.95e-05, | |
| "loss": 3.9299, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.08152173913043478, | |
| "grad_norm": 4.408240795135498, | |
| "learning_rate": 2.96e-05, | |
| "loss": 3.4141, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.08179347826086956, | |
| "grad_norm": 3.8871631622314453, | |
| "learning_rate": 2.97e-05, | |
| "loss": 2.7057, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.08206521739130435, | |
| "grad_norm": 5.148252010345459, | |
| "learning_rate": 2.98e-05, | |
| "loss": 3.0604, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.08233695652173913, | |
| "grad_norm": 3.8631784915924072, | |
| "learning_rate": 2.9900000000000002e-05, | |
| "loss": 3.4581, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.08260869565217391, | |
| "grad_norm": 2.67698073387146, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1465, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.08288043478260869, | |
| "grad_norm": 2.6791391372680664, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8593, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.08315217391304348, | |
| "grad_norm": 3.844003677368164, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4371, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.08342391304347826, | |
| "grad_norm": 5.624002933502197, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8372, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.08369565217391305, | |
| "grad_norm": 4.992491245269775, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6231, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.08396739130434783, | |
| "grad_norm": 4.717808246612549, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9652, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.08423913043478261, | |
| "grad_norm": 3.6492786407470703, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7065, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.08451086956521739, | |
| "grad_norm": 4.453199863433838, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7317, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.08478260869565217, | |
| "grad_norm": 5.009824275970459, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7651, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.08505434782608695, | |
| "grad_norm": 6.800715446472168, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0778, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.08532608695652173, | |
| "grad_norm": 6.225503444671631, | |
| "learning_rate": 3e-05, | |
| "loss": 4.3679, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.08559782608695653, | |
| "grad_norm": 2.9776594638824463, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1434, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.08586956521739131, | |
| "grad_norm": 3.1179118156433105, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5896, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.08614130434782609, | |
| "grad_norm": 2.81669282913208, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9054, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.08641304347826087, | |
| "grad_norm": 3.894665002822876, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2122, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.08668478260869565, | |
| "grad_norm": 5.43796443939209, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8039, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.08695652173913043, | |
| "grad_norm": 3.9177141189575195, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6393, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.08722826086956521, | |
| "grad_norm": 3.0576508045196533, | |
| "learning_rate": 3e-05, | |
| "loss": 3.67, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.0875, | |
| "grad_norm": 5.208736896514893, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9719, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.08777173913043479, | |
| "grad_norm": 4.839489936828613, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7197, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.08804347826086957, | |
| "grad_norm": 4.195175647735596, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3963, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.08831521739130435, | |
| "grad_norm": 3.565690279006958, | |
| "learning_rate": 3e-05, | |
| "loss": 3.177, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.08858695652173913, | |
| "grad_norm": 2.6973624229431152, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0416, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.08885869565217391, | |
| "grad_norm": 2.620365619659424, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3155, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.0891304347826087, | |
| "grad_norm": 3.7461090087890625, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6553, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.08940217391304348, | |
| "grad_norm": 6.702042102813721, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2574, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.08967391304347826, | |
| "grad_norm": 5.888139247894287, | |
| "learning_rate": 3e-05, | |
| "loss": 4.148, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.08994565217391304, | |
| "grad_norm": 4.737085342407227, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8761, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.09021739130434783, | |
| "grad_norm": 3.9981937408447266, | |
| "learning_rate": 3e-05, | |
| "loss": 4.4605, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.09048913043478261, | |
| "grad_norm": 5.0993266105651855, | |
| "learning_rate": 3e-05, | |
| "loss": 4.095, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.0907608695652174, | |
| "grad_norm": 4.450422763824463, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8801, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.09103260869565218, | |
| "grad_norm": 6.846070766448975, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5946, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.09130434782608696, | |
| "grad_norm": 5.887484550476074, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4395, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.09157608695652174, | |
| "grad_norm": 3.8128914833068848, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0863, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.09184782608695652, | |
| "grad_norm": 2.493584394454956, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0949, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.0921195652173913, | |
| "grad_norm": 3.304899215698242, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3348, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.09239130434782608, | |
| "grad_norm": 4.633394241333008, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5233, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.09266304347826088, | |
| "grad_norm": 5.405637264251709, | |
| "learning_rate": 3e-05, | |
| "loss": 3.776, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.09293478260869566, | |
| "grad_norm": 5.505592346191406, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9894, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.09320652173913044, | |
| "grad_norm": 3.901547431945801, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1865, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.09347826086956522, | |
| "grad_norm": 3.3303725719451904, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3656, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.09375, | |
| "grad_norm": 5.3323493003845215, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2369, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.09402173913043478, | |
| "grad_norm": 5.352675914764404, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7106, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.09429347826086956, | |
| "grad_norm": 5.6517839431762695, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4388, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.09456521739130434, | |
| "grad_norm": 4.7645111083984375, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4332, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.09483695652173912, | |
| "grad_norm": 3.140958309173584, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2139, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.09510869565217392, | |
| "grad_norm": 2.692319393157959, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3015, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.0953804347826087, | |
| "grad_norm": 4.1542134284973145, | |
| "learning_rate": 3e-05, | |
| "loss": 3.29, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.09565217391304348, | |
| "grad_norm": 4.946714401245117, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2509, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.09592391304347826, | |
| "grad_norm": 5.529204368591309, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0578, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.09619565217391304, | |
| "grad_norm": 3.837975025177002, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7921, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.09646739130434782, | |
| "grad_norm": 3.0146334171295166, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0369, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.0967391304347826, | |
| "grad_norm": 4.283068656921387, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9506, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.09701086956521739, | |
| "grad_norm": 4.30133581161499, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4218, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.09728260869565217, | |
| "grad_norm": 6.228667736053467, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0054, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.09755434782608696, | |
| "grad_norm": 3.927131414413452, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9401, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.09782608695652174, | |
| "grad_norm": 4.374425888061523, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5279, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.09809782608695652, | |
| "grad_norm": 2.559403419494629, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8306, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.0983695652173913, | |
| "grad_norm": 2.9556596279144287, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4532, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.09864130434782609, | |
| "grad_norm": 4.841480731964111, | |
| "learning_rate": 3e-05, | |
| "loss": 3.474, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.09891304347826087, | |
| "grad_norm": 5.785233497619629, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6698, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.09918478260869565, | |
| "grad_norm": 5.180464267730713, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4199, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.09945652173913043, | |
| "grad_norm": 3.5438666343688965, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5603, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.09972826086956521, | |
| "grad_norm": 3.1015937328338623, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9937, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 4.174754619598389, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5994, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.10027173913043479, | |
| "grad_norm": 4.678737163543701, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0816, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.10054347826086957, | |
| "grad_norm": 3.884875774383545, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8268, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.10081521739130435, | |
| "grad_norm": 4.060309886932373, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5038, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.10108695652173913, | |
| "grad_norm": 4.1803812980651855, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8122, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.10135869565217391, | |
| "grad_norm": 3.1725456714630127, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7945, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.10163043478260869, | |
| "grad_norm": 3.1784756183624268, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0676, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.10190217391304347, | |
| "grad_norm": 4.812568187713623, | |
| "learning_rate": 3e-05, | |
| "loss": 3.593, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.10217391304347827, | |
| "grad_norm": 5.944811820983887, | |
| "learning_rate": 3e-05, | |
| "loss": 3.843, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.10244565217391305, | |
| "grad_norm": 4.13525915145874, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9103, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.10271739130434783, | |
| "grad_norm": 5.860477447509766, | |
| "learning_rate": 3e-05, | |
| "loss": 4.9384, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.10298913043478261, | |
| "grad_norm": 3.2515783309936523, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0586, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.10326086956521739, | |
| "grad_norm": 4.5452494621276855, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7046, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.10353260869565217, | |
| "grad_norm": 3.8891849517822266, | |
| "learning_rate": 3e-05, | |
| "loss": 3.34, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.10380434782608695, | |
| "grad_norm": 4.083732604980469, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3112, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.10407608695652174, | |
| "grad_norm": 3.438572406768799, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9908, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.10434782608695652, | |
| "grad_norm": 3.0750012397766113, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3847, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.10461956521739131, | |
| "grad_norm": 3.0833847522735596, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2274, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.10489130434782609, | |
| "grad_norm": 4.356601238250732, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9247, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.10516304347826087, | |
| "grad_norm": 4.894799709320068, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6005, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.10543478260869565, | |
| "grad_norm": 4.887790203094482, | |
| "learning_rate": 3e-05, | |
| "loss": 4.3258, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.10570652173913044, | |
| "grad_norm": 4.861374855041504, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6857, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.10597826086956522, | |
| "grad_norm": 4.775892734527588, | |
| "learning_rate": 3e-05, | |
| "loss": 4.3695, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.10625, | |
| "grad_norm": 4.2715582847595215, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7657, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.10652173913043478, | |
| "grad_norm": 3.8440310955047607, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4856, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.10679347826086956, | |
| "grad_norm": 4.285994529724121, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4081, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.10706521739130435, | |
| "grad_norm": 2.1318747997283936, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9345, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.10733695652173914, | |
| "grad_norm": 2.4292728900909424, | |
| "learning_rate": 3e-05, | |
| "loss": 3.091, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.10760869565217392, | |
| "grad_norm": 2.3496248722076416, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1994, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.1078804347826087, | |
| "grad_norm": 5.604576110839844, | |
| "learning_rate": 3e-05, | |
| "loss": 4.6392, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.10815217391304348, | |
| "grad_norm": 5.448110580444336, | |
| "learning_rate": 3e-05, | |
| "loss": 4.4596, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.10842391304347826, | |
| "grad_norm": 3.2921388149261475, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5248, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.10869565217391304, | |
| "grad_norm": 3.1395716667175293, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1658, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.10896739130434782, | |
| "grad_norm": 2.3313381671905518, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7592, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.1092391304347826, | |
| "grad_norm": 2.9619717597961426, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5212, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.1095108695652174, | |
| "grad_norm": 3.2616071701049805, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2052, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.10978260869565218, | |
| "grad_norm": 4.842064380645752, | |
| "learning_rate": 3e-05, | |
| "loss": 3.907, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.11005434782608696, | |
| "grad_norm": 3.6140100955963135, | |
| "learning_rate": 3e-05, | |
| "loss": 4.061, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.11032608695652174, | |
| "grad_norm": 1.4534454345703125, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0512, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.11059782608695652, | |
| "grad_norm": 2.4747161865234375, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7008, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.1108695652173913, | |
| "grad_norm": 3.0130977630615234, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9765, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.11114130434782608, | |
| "grad_norm": 2.256558656692505, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2158, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.11141304347826086, | |
| "grad_norm": 4.283812522888184, | |
| "learning_rate": 3e-05, | |
| "loss": 4.4811, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.11168478260869565, | |
| "grad_norm": 2.566425323486328, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9403, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.11195652173913044, | |
| "grad_norm": 2.4623820781707764, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2361, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.11222826086956522, | |
| "grad_norm": 2.934722661972046, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0554, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.1125, | |
| "grad_norm": 5.524335861206055, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0103, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.11277173913043478, | |
| "grad_norm": 3.285830020904541, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6652, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.11304347826086956, | |
| "grad_norm": 2.5553982257843018, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8115, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.11331521739130435, | |
| "grad_norm": 1.8953039646148682, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6769, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.11358695652173913, | |
| "grad_norm": 2.4014382362365723, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6343, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.11385869565217391, | |
| "grad_norm": 3.2362847328186035, | |
| "learning_rate": 3e-05, | |
| "loss": 3.722, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.11413043478260869, | |
| "grad_norm": 2.525406837463379, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2201, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.11440217391304348, | |
| "grad_norm": 2.492753028869629, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3864, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.11467391304347826, | |
| "grad_norm": 1.6389284133911133, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1505, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.11494565217391305, | |
| "grad_norm": 2.721895933151245, | |
| "learning_rate": 3e-05, | |
| "loss": 4.02, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.11521739130434783, | |
| "grad_norm": 2.1239542961120605, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3395, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.11548913043478261, | |
| "grad_norm": 2.3277628421783447, | |
| "learning_rate": 3e-05, | |
| "loss": 4.3061, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.11576086956521739, | |
| "grad_norm": 1.7965201139450073, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8778, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.11603260869565217, | |
| "grad_norm": 2.9034996032714844, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8348, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.11630434782608695, | |
| "grad_norm": 2.0713295936584473, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6901, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.11657608695652173, | |
| "grad_norm": 2.612128257751465, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1308, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.11684782608695653, | |
| "grad_norm": 3.535771369934082, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7629, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.11711956521739131, | |
| "grad_norm": 2.6418893337249756, | |
| "learning_rate": 3e-05, | |
| "loss": 3.351, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.11739130434782609, | |
| "grad_norm": 1.9122227430343628, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9162, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.11766304347826087, | |
| "grad_norm": 3.12231707572937, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6215, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.11793478260869565, | |
| "grad_norm": 2.5034008026123047, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6675, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.11820652173913043, | |
| "grad_norm": 2.69254732131958, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8847, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.11847826086956521, | |
| "grad_norm": 2.026611566543579, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9927, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.11875, | |
| "grad_norm": 3.05465030670166, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4363, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.11902173913043479, | |
| "grad_norm": 2.800114631652832, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0161, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.11929347826086957, | |
| "grad_norm": 2.452852725982666, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8825, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.11956521739130435, | |
| "grad_norm": 2.425046920776367, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8342, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.11983695652173913, | |
| "grad_norm": 1.6898943185806274, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4458, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.12010869565217391, | |
| "grad_norm": 2.265521764755249, | |
| "learning_rate": 3e-05, | |
| "loss": 3.498, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.1203804347826087, | |
| "grad_norm": 3.3182766437530518, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0802, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.12065217391304348, | |
| "grad_norm": 3.441931962966919, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7694, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.12092391304347826, | |
| "grad_norm": 2.1982293128967285, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3313, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.12119565217391304, | |
| "grad_norm": 1.986126184463501, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3758, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.12146739130434783, | |
| "grad_norm": 1.6694939136505127, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1291, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.12173913043478261, | |
| "grad_norm": 3.418254852294922, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9384, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.1220108695652174, | |
| "grad_norm": 2.4987125396728516, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0415, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.12228260869565218, | |
| "grad_norm": 2.4183948040008545, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4816, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.12255434782608696, | |
| "grad_norm": 2.753169059753418, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3146, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.12282608695652174, | |
| "grad_norm": 2.2018933296203613, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5041, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.12309782608695652, | |
| "grad_norm": 1.910427451133728, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1185, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.1233695652173913, | |
| "grad_norm": 2.2100231647491455, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1726, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.12364130434782608, | |
| "grad_norm": 2.6990854740142822, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7998, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.12391304347826088, | |
| "grad_norm": 3.1403119564056396, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3689, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.12418478260869566, | |
| "grad_norm": 1.9951990842819214, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0753, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.12445652173913044, | |
| "grad_norm": 2.348395347595215, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8548, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.12472826086956522, | |
| "grad_norm": 2.324969530105591, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3703, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.125, | |
| "grad_norm": 2.897873640060425, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4589, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.1252717391304348, | |
| "grad_norm": 2.81925892829895, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4818, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.12554347826086956, | |
| "grad_norm": 4.320977687835693, | |
| "learning_rate": 3e-05, | |
| "loss": 4.3366, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.12581521739130436, | |
| "grad_norm": 3.2348084449768066, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1263, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.12608695652173912, | |
| "grad_norm": 2.6263833045959473, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9206, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.12635869565217392, | |
| "grad_norm": 4.796882629394531, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8013, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.1266304347826087, | |
| "grad_norm": 5.0804619789123535, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8332, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.12690217391304348, | |
| "grad_norm": 3.0131075382232666, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5667, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.12717391304347825, | |
| "grad_norm": 1.6071690320968628, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8863, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.12744565217391304, | |
| "grad_norm": 2.211411476135254, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2423, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.12771739130434784, | |
| "grad_norm": 2.4000439643859863, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3725, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.1279891304347826, | |
| "grad_norm": 2.421668529510498, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6009, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.1282608695652174, | |
| "grad_norm": 1.906244158744812, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4958, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.12853260869565217, | |
| "grad_norm": 1.7820132970809937, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0321, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.12880434782608696, | |
| "grad_norm": 2.929561138153076, | |
| "learning_rate": 3e-05, | |
| "loss": 3.788, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.12907608695652173, | |
| "grad_norm": 2.740144968032837, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6582, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.12934782608695652, | |
| "grad_norm": 2.153079032897949, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6739, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.1296195652173913, | |
| "grad_norm": 1.366892695426941, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5163, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.1298913043478261, | |
| "grad_norm": 2.3881099224090576, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7265, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.13016304347826088, | |
| "grad_norm": 1.72128164768219, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9679, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.13043478260869565, | |
| "grad_norm": 1.8024574518203735, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6759, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.13070652173913044, | |
| "grad_norm": 2.3349437713623047, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9231, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.1309782608695652, | |
| "grad_norm": 2.60579776763916, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5484, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.13125, | |
| "grad_norm": 1.823648452758789, | |
| "learning_rate": 3e-05, | |
| "loss": 3.409, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.13152173913043477, | |
| "grad_norm": 1.939772367477417, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7185, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.13179347826086957, | |
| "grad_norm": 1.7059251070022583, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2223, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.13206521739130433, | |
| "grad_norm": 2.1309432983398438, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5143, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.13233695652173913, | |
| "grad_norm": 2.5169553756713867, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9375, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.13260869565217392, | |
| "grad_norm": 1.6783947944641113, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0069, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.1328804347826087, | |
| "grad_norm": 2.5426528453826904, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4032, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.1331521739130435, | |
| "grad_norm": 2.8521511554718018, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1236, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.13342391304347825, | |
| "grad_norm": 2.7477147579193115, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0802, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.13369565217391305, | |
| "grad_norm": 2.785457134246826, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8152, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.13396739130434782, | |
| "grad_norm": 3.6600492000579834, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6564, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.1342391304347826, | |
| "grad_norm": 4.777431488037109, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7449, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.13451086956521738, | |
| "grad_norm": 4.060612201690674, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1282, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.13478260869565217, | |
| "grad_norm": 4.2701592445373535, | |
| "learning_rate": 3e-05, | |
| "loss": 4.6185, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.13505434782608697, | |
| "grad_norm": 3.9511125087738037, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2463, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.13532608695652174, | |
| "grad_norm": 3.8475091457366943, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0689, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.13559782608695653, | |
| "grad_norm": 5.655093669891357, | |
| "learning_rate": 3e-05, | |
| "loss": 4.024, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.1358695652173913, | |
| "grad_norm": 3.6835744380950928, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1939, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.1361413043478261, | |
| "grad_norm": 3.2142796516418457, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0483, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.13641304347826086, | |
| "grad_norm": 3.1059064865112305, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8777, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.13668478260869565, | |
| "grad_norm": 2.3298041820526123, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2728, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.13695652173913042, | |
| "grad_norm": 3.318361759185791, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0421, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.13722826086956522, | |
| "grad_norm": 2.8279385566711426, | |
| "learning_rate": 3e-05, | |
| "loss": 3.49, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.1375, | |
| "grad_norm": 2.233304023742676, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3737, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.13777173913043478, | |
| "grad_norm": 2.943725824356079, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5774, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.13804347826086957, | |
| "grad_norm": 2.764371871948242, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7041, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.13831521739130434, | |
| "grad_norm": 2.647728443145752, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0545, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.13858695652173914, | |
| "grad_norm": 3.1184072494506836, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2394, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.1388586956521739, | |
| "grad_norm": 1.9611910581588745, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4171, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.1391304347826087, | |
| "grad_norm": 1.5122188329696655, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1593, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.13940217391304346, | |
| "grad_norm": 1.9166432619094849, | |
| "learning_rate": 3e-05, | |
| "loss": 3.21, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.13967391304347826, | |
| "grad_norm": 2.1466691493988037, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4333, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.13994565217391305, | |
| "grad_norm": 2.217186689376831, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3885, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.14021739130434782, | |
| "grad_norm": 2.03560733795166, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9298, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.14048913043478262, | |
| "grad_norm": 2.214279890060425, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2791, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.14076086956521738, | |
| "grad_norm": 3.428405523300171, | |
| "learning_rate": 3e-05, | |
| "loss": 3.668, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.14103260869565218, | |
| "grad_norm": 3.077150344848633, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7187, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.14130434782608695, | |
| "grad_norm": 1.3983957767486572, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3129, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.14157608695652174, | |
| "grad_norm": 2.0241615772247314, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6655, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.14184782608695654, | |
| "grad_norm": 2.20249080657959, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2375, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.1421195652173913, | |
| "grad_norm": 2.2980117797851562, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7075, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.1423913043478261, | |
| "grad_norm": 1.2177653312683105, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2919, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.14266304347826086, | |
| "grad_norm": 1.0807304382324219, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2761, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.14293478260869566, | |
| "grad_norm": 1.4918198585510254, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6412, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.14320652173913043, | |
| "grad_norm": 1.3037655353546143, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4596, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.14347826086956522, | |
| "grad_norm": 1.1929515600204468, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4554, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.14375, | |
| "grad_norm": 1.7983393669128418, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0941, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.14402173913043478, | |
| "grad_norm": 1.4893770217895508, | |
| "learning_rate": 3e-05, | |
| "loss": 3.688, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.14429347826086958, | |
| "grad_norm": 1.4558385610580444, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6866, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.14456521739130435, | |
| "grad_norm": 1.4489972591400146, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1238, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.14483695652173914, | |
| "grad_norm": 1.603597640991211, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9605, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.1451086956521739, | |
| "grad_norm": 1.1347497701644897, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2066, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.1453804347826087, | |
| "grad_norm": 1.9727925062179565, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6686, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.14565217391304347, | |
| "grad_norm": 2.5670254230499268, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3345, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.14592391304347826, | |
| "grad_norm": 2.017517328262329, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7092, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.14619565217391303, | |
| "grad_norm": 2.1940996646881104, | |
| "learning_rate": 3e-05, | |
| "loss": 3.807, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.14646739130434783, | |
| "grad_norm": 1.7565933465957642, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6645, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.14673913043478262, | |
| "grad_norm": 2.248276948928833, | |
| "learning_rate": 3e-05, | |
| "loss": 3.979, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.1470108695652174, | |
| "grad_norm": 2.3259811401367188, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3435, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.14728260869565218, | |
| "grad_norm": 2.7300784587860107, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7696, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.14755434782608695, | |
| "grad_norm": 1.406214952468872, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6787, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.14782608695652175, | |
| "grad_norm": 2.6304099559783936, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7273, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.1480978260869565, | |
| "grad_norm": 4.7364912033081055, | |
| "learning_rate": 3e-05, | |
| "loss": 4.4759, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.1483695652173913, | |
| "grad_norm": 5.363766193389893, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1787, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.14864130434782608, | |
| "grad_norm": 2.660940408706665, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2534, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.14891304347826087, | |
| "grad_norm": 3.186602830886841, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5762, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.14918478260869567, | |
| "grad_norm": 3.962216377258301, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5002, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.14945652173913043, | |
| "grad_norm": 5.419135093688965, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6202, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.14972826086956523, | |
| "grad_norm": 4.870014667510986, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5467, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 3.175389289855957, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9385, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.1502717391304348, | |
| "grad_norm": 2.783637046813965, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8554, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.15054347826086956, | |
| "grad_norm": 2.6941978931427, | |
| "learning_rate": 3e-05, | |
| "loss": 3.211, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.15081521739130435, | |
| "grad_norm": 3.185209274291992, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3476, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.15108695652173912, | |
| "grad_norm": 2.922362804412842, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0941, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.1513586956521739, | |
| "grad_norm": 2.7833430767059326, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3851, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.1516304347826087, | |
| "grad_norm": 1.4858049154281616, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7326, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.15190217391304348, | |
| "grad_norm": 2.9602174758911133, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2037, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.15217391304347827, | |
| "grad_norm": 3.0462303161621094, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2449, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.15244565217391304, | |
| "grad_norm": 3.038954019546509, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3796, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.15271739130434783, | |
| "grad_norm": 1.572611689567566, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1521, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.1529891304347826, | |
| "grad_norm": 2.4640631675720215, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9771, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.1532608695652174, | |
| "grad_norm": 1.757712483406067, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9893, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.15353260869565216, | |
| "grad_norm": 2.606873035430908, | |
| "learning_rate": 3e-05, | |
| "loss": 3.702, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.15380434782608696, | |
| "grad_norm": 1.677304983139038, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2607, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.15407608695652175, | |
| "grad_norm": 2.4729526042938232, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9865, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.15434782608695652, | |
| "grad_norm": 2.162191867828369, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6349, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.1546195652173913, | |
| "grad_norm": 3.027984380722046, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8996, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.15489130434782608, | |
| "grad_norm": 1.9820539951324463, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3207, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.15516304347826088, | |
| "grad_norm": 1.7880254983901978, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6082, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.15543478260869564, | |
| "grad_norm": 1.9113103151321411, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2639, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.15570652173913044, | |
| "grad_norm": 1.7418012619018555, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4052, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.1559782608695652, | |
| "grad_norm": 1.6183768510818481, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8061, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.15625, | |
| "grad_norm": 1.33986496925354, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3393, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.1565217391304348, | |
| "grad_norm": 1.4486945867538452, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7729, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.15679347826086956, | |
| "grad_norm": 1.7886120080947876, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8346, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.15706521739130436, | |
| "grad_norm": 1.2352441549301147, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2718, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.15733695652173912, | |
| "grad_norm": 1.8679944276809692, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4247, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.15760869565217392, | |
| "grad_norm": 1.689501404762268, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0971, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.1578804347826087, | |
| "grad_norm": 1.6779184341430664, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8803, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.15815217391304348, | |
| "grad_norm": 2.0760598182678223, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5429, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.15842391304347825, | |
| "grad_norm": 1.8308002948760986, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4797, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.15869565217391304, | |
| "grad_norm": 2.4416396617889404, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2491, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.15896739130434784, | |
| "grad_norm": 2.365509271621704, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4862, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.1592391304347826, | |
| "grad_norm": 4.06919002532959, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0302, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.1595108695652174, | |
| "grad_norm": 2.430508613586426, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8575, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.15978260869565217, | |
| "grad_norm": 2.2944397926330566, | |
| "learning_rate": 3e-05, | |
| "loss": 4.3022, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.16005434782608696, | |
| "grad_norm": 4.096346855163574, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0663, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.16032608695652173, | |
| "grad_norm": 3.8741347789764404, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3398, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.16059782608695652, | |
| "grad_norm": 3.7039456367492676, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1813, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.1608695652173913, | |
| "grad_norm": 2.129674196243286, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0778, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.1611413043478261, | |
| "grad_norm": 2.404423952102661, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7275, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.16141304347826088, | |
| "grad_norm": 2.341529607772827, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9763, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.16168478260869565, | |
| "grad_norm": 4.927234649658203, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1287, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.16195652173913044, | |
| "grad_norm": 4.529046535491943, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7712, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.1622282608695652, | |
| "grad_norm": 4.280128479003906, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7557, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.1625, | |
| "grad_norm": 2.845654249191284, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4817, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.16277173913043477, | |
| "grad_norm": 2.5575075149536133, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0366, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.16304347826086957, | |
| "grad_norm": 3.475111722946167, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2081, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.16331521739130433, | |
| "grad_norm": 3.926103353500366, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8455, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.16358695652173913, | |
| "grad_norm": 3.796124219894409, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4857, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.16385869565217392, | |
| "grad_norm": 2.974625825881958, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3609, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.1641304347826087, | |
| "grad_norm": 1.9188250303268433, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0467, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.1644021739130435, | |
| "grad_norm": 2.922881841659546, | |
| "learning_rate": 3e-05, | |
| "loss": 3.969, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.16467391304347825, | |
| "grad_norm": 2.9243340492248535, | |
| "learning_rate": 3e-05, | |
| "loss": 3.418, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.16494565217391305, | |
| "grad_norm": 2.3607518672943115, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3568, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.16521739130434782, | |
| "grad_norm": 1.4109339714050293, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1942, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.1654891304347826, | |
| "grad_norm": 2.339900016784668, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6621, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.16576086956521738, | |
| "grad_norm": 3.0392491817474365, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6538, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.16603260869565217, | |
| "grad_norm": 2.5297353267669678, | |
| "learning_rate": 3e-05, | |
| "loss": 3.72, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.16630434782608697, | |
| "grad_norm": 2.1570205688476562, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1765, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.16657608695652174, | |
| "grad_norm": 1.5125519037246704, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5076, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.16684782608695653, | |
| "grad_norm": 1.5017513036727905, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5962, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.1671195652173913, | |
| "grad_norm": 2.1280760765075684, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4612, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.1673913043478261, | |
| "grad_norm": 1.6511037349700928, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3281, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.16766304347826086, | |
| "grad_norm": 1.842786431312561, | |
| "learning_rate": 3e-05, | |
| "loss": 2.876, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.16793478260869565, | |
| "grad_norm": 2.008168935775757, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7421, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.16820652173913042, | |
| "grad_norm": 2.965365409851074, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9864, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.16847826086956522, | |
| "grad_norm": 3.619001626968384, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1181, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.16875, | |
| "grad_norm": 3.3966426849365234, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2355, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.16902173913043478, | |
| "grad_norm": 2.4561333656311035, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0077, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.16929347826086957, | |
| "grad_norm": 3.7762365341186523, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8245, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.16956521739130434, | |
| "grad_norm": 4.286952495574951, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4895, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.16983695652173914, | |
| "grad_norm": 3.7213828563690186, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9799, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.1701086956521739, | |
| "grad_norm": 3.109027862548828, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8767, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.1703804347826087, | |
| "grad_norm": 2.4418489933013916, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7539, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.17065217391304346, | |
| "grad_norm": 2.3365840911865234, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0382, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.17092391304347826, | |
| "grad_norm": 2.649116039276123, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0011, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.17119565217391305, | |
| "grad_norm": 4.0966105461120605, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5889, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.17146739130434782, | |
| "grad_norm": 4.70590877532959, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4292, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.17173913043478262, | |
| "grad_norm": 4.246743202209473, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3927, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.17201086956521738, | |
| "grad_norm": 3.4085919857025146, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5786, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.17228260869565218, | |
| "grad_norm": 3.164703607559204, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4236, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.17255434782608695, | |
| "grad_norm": 4.914327144622803, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2673, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.17282608695652174, | |
| "grad_norm": 4.016328811645508, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5927, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.17309782608695654, | |
| "grad_norm": 3.3188321590423584, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9687, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.1733695652173913, | |
| "grad_norm": 3.4671499729156494, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3144, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.1736413043478261, | |
| "grad_norm": 2.273728609085083, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2453, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.17391304347826086, | |
| "grad_norm": 2.1210081577301025, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0778, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.17418478260869566, | |
| "grad_norm": 3.6642115116119385, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7503, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.17445652173913043, | |
| "grad_norm": 3.753002166748047, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9504, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.17472826086956522, | |
| "grad_norm": 2.683023691177368, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5103, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.175, | |
| "grad_norm": 2.7764129638671875, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8473, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.17527173913043478, | |
| "grad_norm": 2.340895652770996, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3381, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.17554347826086958, | |
| "grad_norm": 3.224985122680664, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7218, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.17581521739130435, | |
| "grad_norm": 4.798543930053711, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2621, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.17608695652173914, | |
| "grad_norm": 3.2494759559631348, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4175, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.1763586956521739, | |
| "grad_norm": 1.9543368816375732, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9133, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.1766304347826087, | |
| "grad_norm": 1.7534743547439575, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2838, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.17690217391304347, | |
| "grad_norm": 1.8143278360366821, | |
| "learning_rate": 3e-05, | |
| "loss": 3.015, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.17717391304347826, | |
| "grad_norm": 3.4449610710144043, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7868, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.17744565217391303, | |
| "grad_norm": 2.250290632247925, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6572, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.17771739130434783, | |
| "grad_norm": 2.3306338787078857, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6463, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.17798913043478262, | |
| "grad_norm": 1.6108341217041016, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3977, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.1782608695652174, | |
| "grad_norm": 1.7349133491516113, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5826, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.17853260869565218, | |
| "grad_norm": 2.5024263858795166, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2271, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.17880434782608695, | |
| "grad_norm": 2.2973721027374268, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8238, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.17907608695652175, | |
| "grad_norm": 2.167056083679199, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5719, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.1793478260869565, | |
| "grad_norm": 1.8604989051818848, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9091, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.1796195652173913, | |
| "grad_norm": 1.7601895332336426, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8582, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.17989130434782608, | |
| "grad_norm": 1.5462661981582642, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9604, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.18016304347826087, | |
| "grad_norm": 1.9906418323516846, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8877, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.18043478260869567, | |
| "grad_norm": 2.1721136569976807, | |
| "learning_rate": 3e-05, | |
| "loss": 4.241, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.18070652173913043, | |
| "grad_norm": 1.7870231866836548, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0484, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.18097826086956523, | |
| "grad_norm": 1.543925166130066, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1115, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.18125, | |
| "grad_norm": 1.9159563779830933, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3634, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.1815217391304348, | |
| "grad_norm": 2.8006904125213623, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7725, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.18179347826086956, | |
| "grad_norm": 2.8094773292541504, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5421, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.18206521739130435, | |
| "grad_norm": 2.0162885189056396, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4805, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.18233695652173912, | |
| "grad_norm": 2.175633430480957, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3937, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.1826086956521739, | |
| "grad_norm": 2.4267466068267822, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1429, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.1828804347826087, | |
| "grad_norm": 4.546562194824219, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1168, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.18315217391304348, | |
| "grad_norm": 3.3911025524139404, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7966, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.18342391304347827, | |
| "grad_norm": 1.7894442081451416, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8492, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.18369565217391304, | |
| "grad_norm": 2.7028391361236572, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5555, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.18396739130434783, | |
| "grad_norm": 3.052639961242676, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9254, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.1842391304347826, | |
| "grad_norm": 2.4930381774902344, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4053, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.1845108695652174, | |
| "grad_norm": 1.9123059511184692, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5101, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.18478260869565216, | |
| "grad_norm": 1.5995222330093384, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1762, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.18505434782608696, | |
| "grad_norm": 4.272251129150391, | |
| "learning_rate": 3e-05, | |
| "loss": 4.397, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.18532608695652175, | |
| "grad_norm": 3.0316965579986572, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8721, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.18559782608695652, | |
| "grad_norm": 2.0125246047973633, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3782, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.1858695652173913, | |
| "grad_norm": 1.8079962730407715, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4851, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.18614130434782608, | |
| "grad_norm": 1.8277610540390015, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8756, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.18641304347826088, | |
| "grad_norm": 2.4962170124053955, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2237, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.18668478260869564, | |
| "grad_norm": 1.3268901109695435, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1177, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.18695652173913044, | |
| "grad_norm": 2.3978965282440186, | |
| "learning_rate": 3e-05, | |
| "loss": 3.583, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.1872282608695652, | |
| "grad_norm": 1.2693482637405396, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3973, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.1875, | |
| "grad_norm": 2.0299911499023438, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6627, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.1877717391304348, | |
| "grad_norm": 1.2860682010650635, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2345, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.18804347826086956, | |
| "grad_norm": 2.264256477355957, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2411, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.18831521739130436, | |
| "grad_norm": 1.5267597436904907, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1942, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.18858695652173912, | |
| "grad_norm": 1.2684136629104614, | |
| "learning_rate": 3e-05, | |
| "loss": 3.192, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.18885869565217392, | |
| "grad_norm": 1.6705724000930786, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6082, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.1891304347826087, | |
| "grad_norm": 2.0569725036621094, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9019, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.18940217391304348, | |
| "grad_norm": 2.03778076171875, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0118, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.18967391304347825, | |
| "grad_norm": 1.7906421422958374, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7659, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.18994565217391304, | |
| "grad_norm": 1.6202226877212524, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3228, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.19021739130434784, | |
| "grad_norm": 1.568548560142517, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3992, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.1904891304347826, | |
| "grad_norm": 1.3486888408660889, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7886, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.1907608695652174, | |
| "grad_norm": 2.013223171234131, | |
| "learning_rate": 3e-05, | |
| "loss": 3.365, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.19103260869565217, | |
| "grad_norm": 1.7130358219146729, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7516, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.19130434782608696, | |
| "grad_norm": 1.7324903011322021, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1275, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.19157608695652173, | |
| "grad_norm": 1.7227182388305664, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1415, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.19184782608695652, | |
| "grad_norm": 1.3472079038619995, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1949, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.1921195652173913, | |
| "grad_norm": 1.1573243141174316, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8314, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.1923913043478261, | |
| "grad_norm": 1.6137231588363647, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6169, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.19266304347826088, | |
| "grad_norm": 1.4710439443588257, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4097, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.19293478260869565, | |
| "grad_norm": 2.347607374191284, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1638, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.19320652173913044, | |
| "grad_norm": 1.4299064874649048, | |
| "learning_rate": 3e-05, | |
| "loss": 3.811, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.1934782608695652, | |
| "grad_norm": 1.5702636241912842, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5396, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.19375, | |
| "grad_norm": 1.8417882919311523, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5613, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.19402173913043477, | |
| "grad_norm": 1.4125804901123047, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1808, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.19429347826086957, | |
| "grad_norm": 1.3733165264129639, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3127, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.19456521739130433, | |
| "grad_norm": 2.1651833057403564, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1732, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.19483695652173913, | |
| "grad_norm": 1.4096370935440063, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5565, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.19510869565217392, | |
| "grad_norm": 1.7251945734024048, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7342, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.1953804347826087, | |
| "grad_norm": 1.643470287322998, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9678, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.1956521739130435, | |
| "grad_norm": 1.5827432870864868, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1044, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.19592391304347825, | |
| "grad_norm": 1.4647548198699951, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8413, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.19619565217391305, | |
| "grad_norm": 2.1561598777770996, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4492, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.19646739130434782, | |
| "grad_norm": 1.6970568895339966, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1848, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.1967391304347826, | |
| "grad_norm": 2.248117685317993, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6296, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.19701086956521738, | |
| "grad_norm": 3.168339252471924, | |
| "learning_rate": 3e-05, | |
| "loss": 4.291, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.19728260869565217, | |
| "grad_norm": 3.802591323852539, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9495, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.19755434782608697, | |
| "grad_norm": 3.848099708557129, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6353, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.19782608695652174, | |
| "grad_norm": 3.534794569015503, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9035, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.19809782608695653, | |
| "grad_norm": 1.7165888547897339, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3266, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.1983695652173913, | |
| "grad_norm": 2.5262253284454346, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6359, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.1986413043478261, | |
| "grad_norm": 2.5349607467651367, | |
| "learning_rate": 3e-05, | |
| "loss": 3.129, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.19891304347826086, | |
| "grad_norm": 5.680181503295898, | |
| "learning_rate": 3e-05, | |
| "loss": 4.8488, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.19918478260869565, | |
| "grad_norm": 3.0405654907226562, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3313, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.19945652173913042, | |
| "grad_norm": 1.6890621185302734, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2893, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.19972826086956522, | |
| "grad_norm": 2.204172372817993, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5067, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.858398914337158, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4036, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.20027173913043478, | |
| "grad_norm": 2.4763529300689697, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5171, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.20054347826086957, | |
| "grad_norm": 2.957118034362793, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1728, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.20081521739130434, | |
| "grad_norm": 1.8309071063995361, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9663, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.20108695652173914, | |
| "grad_norm": 2.270033121109009, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6115, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.2013586956521739, | |
| "grad_norm": 2.1604442596435547, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2511, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.2016304347826087, | |
| "grad_norm": 3.5561349391937256, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8886, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.20190217391304346, | |
| "grad_norm": 2.510963201522827, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4812, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 0.20217391304347826, | |
| "grad_norm": 1.5206657648086548, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0231, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.20244565217391305, | |
| "grad_norm": 1.645155906677246, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0951, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.20271739130434782, | |
| "grad_norm": 2.075399160385132, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4057, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.20298913043478262, | |
| "grad_norm": 2.36306095123291, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6449, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 0.20326086956521738, | |
| "grad_norm": 1.9054253101348877, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3983, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.20353260869565218, | |
| "grad_norm": 1.7470569610595703, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7903, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 0.20380434782608695, | |
| "grad_norm": 1.6297615766525269, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0304, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.20407608695652174, | |
| "grad_norm": 3.2830233573913574, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2781, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 0.20434782608695654, | |
| "grad_norm": 1.361449122428894, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2224, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.2046195652173913, | |
| "grad_norm": 1.4894417524337769, | |
| "learning_rate": 3e-05, | |
| "loss": 3.262, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 0.2048913043478261, | |
| "grad_norm": 1.7271361351013184, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3017, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.20516304347826086, | |
| "grad_norm": 2.033215284347534, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9264, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.20543478260869566, | |
| "grad_norm": 2.808903455734253, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7223, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.20570652173913043, | |
| "grad_norm": 1.450661540031433, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0346, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 0.20597826086956522, | |
| "grad_norm": 1.9796857833862305, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2978, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.20625, | |
| "grad_norm": 2.9408633708953857, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7247, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 0.20652173913043478, | |
| "grad_norm": 1.4558497667312622, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0258, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.20679347826086958, | |
| "grad_norm": 1.1229232549667358, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9328, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 0.20706521739130435, | |
| "grad_norm": 1.663891077041626, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2935, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.20733695652173914, | |
| "grad_norm": 1.8489412069320679, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8728, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 0.2076086956521739, | |
| "grad_norm": 1.359864592552185, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6552, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.2078804347826087, | |
| "grad_norm": 1.5933263301849365, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2384, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.20815217391304347, | |
| "grad_norm": 3.0126466751098633, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4475, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.20842391304347826, | |
| "grad_norm": 2.608242988586426, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2701, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 0.20869565217391303, | |
| "grad_norm": 2.7247979640960693, | |
| "learning_rate": 3e-05, | |
| "loss": 3.96, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.20896739130434783, | |
| "grad_norm": 1.8706130981445312, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0889, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 0.20923913043478262, | |
| "grad_norm": 2.668093681335449, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4351, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.2095108695652174, | |
| "grad_norm": 3.017181396484375, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2463, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 0.20978260869565218, | |
| "grad_norm": 3.0812008380889893, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3998, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.21005434782608695, | |
| "grad_norm": 2.7510030269622803, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2353, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 0.21032608695652175, | |
| "grad_norm": 2.4355452060699463, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8853, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.2105978260869565, | |
| "grad_norm": 4.041244983673096, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0771, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.2108695652173913, | |
| "grad_norm": 3.403510332107544, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6963, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.21114130434782608, | |
| "grad_norm": 3.216482400894165, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6608, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 0.21141304347826087, | |
| "grad_norm": 2.5989348888397217, | |
| "learning_rate": 3e-05, | |
| "loss": 3.689, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.21168478260869567, | |
| "grad_norm": 2.0239574909210205, | |
| "learning_rate": 3e-05, | |
| "loss": 3.681, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 0.21195652173913043, | |
| "grad_norm": 3.3878705501556396, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9301, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.21222826086956523, | |
| "grad_norm": 4.106480598449707, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2929, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 0.2125, | |
| "grad_norm": 3.4818196296691895, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8562, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.2127717391304348, | |
| "grad_norm": 3.4106242656707764, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6078, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 0.21304347826086956, | |
| "grad_norm": 2.401315450668335, | |
| "learning_rate": 3e-05, | |
| "loss": 3.693, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.21331521739130435, | |
| "grad_norm": 3.092789649963379, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6907, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.21358695652173912, | |
| "grad_norm": 2.8016254901885986, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7602, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.2138586956521739, | |
| "grad_norm": 2.281668186187744, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9451, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 0.2141304347826087, | |
| "grad_norm": 2.3392529487609863, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1969, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.21440217391304348, | |
| "grad_norm": 1.7437257766723633, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2468, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 0.21467391304347827, | |
| "grad_norm": 2.128056049346924, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5476, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.21494565217391304, | |
| "grad_norm": 2.1912710666656494, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7689, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 0.21521739130434783, | |
| "grad_norm": 2.5414609909057617, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4014, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.2154891304347826, | |
| "grad_norm": 2.6355223655700684, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3787, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 0.2157608695652174, | |
| "grad_norm": 2.1927592754364014, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2381, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.21603260869565216, | |
| "grad_norm": 1.7231847047805786, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4627, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.21630434782608696, | |
| "grad_norm": 1.8754600286483765, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7409, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 0.21657608695652175, | |
| "grad_norm": 1.7413159608840942, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6012, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 0.21684782608695652, | |
| "grad_norm": 1.9197379350662231, | |
| "learning_rate": 3e-05, | |
| "loss": 3.15, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 0.2171195652173913, | |
| "grad_norm": 2.100706100463867, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8748, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 0.21739130434782608, | |
| "grad_norm": 2.7185757160186768, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7859, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.21766304347826088, | |
| "grad_norm": 3.241926908493042, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7198, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 0.21793478260869564, | |
| "grad_norm": 2.365151882171631, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7607, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 0.21820652173913044, | |
| "grad_norm": 2.0234482288360596, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1843, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 0.2184782608695652, | |
| "grad_norm": 3.084533929824829, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1945, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 0.21875, | |
| "grad_norm": 2.927222490310669, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3531, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.2190217391304348, | |
| "grad_norm": 2.6623260974884033, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5994, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.21929347826086956, | |
| "grad_norm": 2.8344593048095703, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6001, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 0.21956521739130436, | |
| "grad_norm": 1.8181391954421997, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9434, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 0.21983695652173912, | |
| "grad_norm": 2.103512763977051, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8034, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 0.22010869565217392, | |
| "grad_norm": 1.961503505706787, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0883, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.2203804347826087, | |
| "grad_norm": 2.3394432067871094, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8568, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 0.22065217391304348, | |
| "grad_norm": 2.6501801013946533, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2469, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 0.22092391304347825, | |
| "grad_norm": 2.7663819789886475, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3066, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 0.22119565217391304, | |
| "grad_norm": 2.4567878246307373, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0416, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 0.22146739130434784, | |
| "grad_norm": 2.378188371658325, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6954, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.2217391304347826, | |
| "grad_norm": 2.611842632293701, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3243, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 0.2220108695652174, | |
| "grad_norm": 2.1804463863372803, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9858, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 0.22228260869565217, | |
| "grad_norm": 2.03049373626709, | |
| "learning_rate": 3e-05, | |
| "loss": 2.787, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 0.22255434782608696, | |
| "grad_norm": 1.8013122081756592, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9299, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 0.22282608695652173, | |
| "grad_norm": 2.039745569229126, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2247, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.22309782608695652, | |
| "grad_norm": 1.6249157190322876, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8942, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 0.2233695652173913, | |
| "grad_norm": 1.8277668952941895, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5634, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 0.2236413043478261, | |
| "grad_norm": 2.2749369144439697, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6253, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 0.22391304347826088, | |
| "grad_norm": 2.3847649097442627, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3725, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 0.22418478260869565, | |
| "grad_norm": 1.9465465545654297, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4556, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.22445652173913044, | |
| "grad_norm": 2.793731927871704, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0801, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 0.2247282608695652, | |
| "grad_norm": 2.605842113494873, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0396, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 0.225, | |
| "grad_norm": 2.200025796890259, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7514, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 0.22527173913043477, | |
| "grad_norm": 1.8318731784820557, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3619, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 0.22554347826086957, | |
| "grad_norm": 1.7559170722961426, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3115, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.22581521739130433, | |
| "grad_norm": 1.7032020092010498, | |
| "learning_rate": 3e-05, | |
| "loss": 3.113, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 0.22608695652173913, | |
| "grad_norm": 1.8972278833389282, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1965, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 0.22635869565217392, | |
| "grad_norm": 1.9449539184570312, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2422, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 0.2266304347826087, | |
| "grad_norm": 1.6739164590835571, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2255, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 0.2269021739130435, | |
| "grad_norm": 1.7071807384490967, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4032, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.22717391304347825, | |
| "grad_norm": 3.5912764072418213, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1757, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 0.22744565217391305, | |
| "grad_norm": 1.969997763633728, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1138, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 0.22771739130434782, | |
| "grad_norm": 1.9781277179718018, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5066, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 0.2279891304347826, | |
| "grad_norm": 1.6115202903747559, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9759, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 0.22826086956521738, | |
| "grad_norm": 1.3473846912384033, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0617, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.22853260869565217, | |
| "grad_norm": 1.6643739938735962, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4268, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 0.22880434782608697, | |
| "grad_norm": 1.9249311685562134, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6207, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 0.22907608695652174, | |
| "grad_norm": 2.079634666442871, | |
| "learning_rate": 3e-05, | |
| "loss": 3.534, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 0.22934782608695653, | |
| "grad_norm": 1.8367491960525513, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9664, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 0.2296195652173913, | |
| "grad_norm": 1.7043336629867554, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4275, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.2298913043478261, | |
| "grad_norm": 1.6460905075073242, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1771, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 0.23016304347826086, | |
| "grad_norm": 2.0371439456939697, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3044, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 0.23043478260869565, | |
| "grad_norm": 1.5710325241088867, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0044, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 0.23070652173913042, | |
| "grad_norm": 1.6866766214370728, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9916, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 0.23097826086956522, | |
| "grad_norm": 2.127194404602051, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5753, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.23125, | |
| "grad_norm": 2.0354788303375244, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8992, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 0.23152173913043478, | |
| "grad_norm": 1.5567448139190674, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4486, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 0.23179347826086957, | |
| "grad_norm": 1.3115332126617432, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1354, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 0.23206521739130434, | |
| "grad_norm": 1.3727189302444458, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0521, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 0.23233695652173914, | |
| "grad_norm": 1.3423751592636108, | |
| "learning_rate": 3e-05, | |
| "loss": 2.908, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.2326086956521739, | |
| "grad_norm": 1.61993408203125, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2873, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 0.2328804347826087, | |
| "grad_norm": 1.8327138423919678, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6789, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 0.23315217391304346, | |
| "grad_norm": 1.626006007194519, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0027, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 0.23342391304347826, | |
| "grad_norm": 2.236844539642334, | |
| "learning_rate": 3e-05, | |
| "loss": 4.083, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 0.23369565217391305, | |
| "grad_norm": 2.1321239471435547, | |
| "learning_rate": 3e-05, | |
| "loss": 3.455, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.23396739130434782, | |
| "grad_norm": 2.1679024696350098, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8644, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 0.23423913043478262, | |
| "grad_norm": 1.9821666479110718, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5024, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 0.23451086956521738, | |
| "grad_norm": 2.8352460861206055, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0082, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 0.23478260869565218, | |
| "grad_norm": 2.2241246700286865, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8031, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 0.23505434782608695, | |
| "grad_norm": 2.1706037521362305, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0421, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.23532608695652174, | |
| "grad_norm": 1.695703148841858, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9672, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 0.23559782608695654, | |
| "grad_norm": 1.994674563407898, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3449, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 0.2358695652173913, | |
| "grad_norm": 2.692945957183838, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1761, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 0.2361413043478261, | |
| "grad_norm": 2.5395121574401855, | |
| "learning_rate": 3e-05, | |
| "loss": 3.921, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 0.23641304347826086, | |
| "grad_norm": 1.991248607635498, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.23668478260869566, | |
| "grad_norm": 2.2419328689575195, | |
| "learning_rate": 3e-05, | |
| "loss": 3.501, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 0.23695652173913043, | |
| "grad_norm": 1.9551500082015991, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8966, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 0.23722826086956522, | |
| "grad_norm": 1.9755557775497437, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7852, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 0.2375, | |
| "grad_norm": 1.964316487312317, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4841, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 0.23777173913043478, | |
| "grad_norm": 1.667006015777588, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3479, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.23804347826086958, | |
| "grad_norm": 1.3252580165863037, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1133, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 0.23831521739130435, | |
| "grad_norm": 1.904585361480713, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7604, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 0.23858695652173914, | |
| "grad_norm": 1.581666350364685, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3015, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 0.2388586956521739, | |
| "grad_norm": 1.2517045736312866, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2687, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 0.2391304347826087, | |
| "grad_norm": 1.5604926347732544, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0638, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.23940217391304347, | |
| "grad_norm": 1.9475339651107788, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8968, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 0.23967391304347826, | |
| "grad_norm": 1.528891921043396, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0379, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 0.23994565217391303, | |
| "grad_norm": 1.838294506072998, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7645, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 0.24021739130434783, | |
| "grad_norm": 1.5220407247543335, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9417, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 0.24048913043478262, | |
| "grad_norm": 1.4615991115570068, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4286, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.2407608695652174, | |
| "grad_norm": 2.569244623184204, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9001, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 0.24103260869565218, | |
| "grad_norm": 2.3717269897460938, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2076, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 0.24130434782608695, | |
| "grad_norm": 1.8509140014648438, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2101, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 0.24157608695652175, | |
| "grad_norm": 2.0046916007995605, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9609, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 0.2418478260869565, | |
| "grad_norm": 2.2359912395477295, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6416, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.2421195652173913, | |
| "grad_norm": 1.8625085353851318, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3007, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 0.24239130434782608, | |
| "grad_norm": 2.0958662033081055, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7472, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 0.24266304347826087, | |
| "grad_norm": 2.5501413345336914, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0367, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 0.24293478260869567, | |
| "grad_norm": 3.020587921142578, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7357, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 0.24320652173913043, | |
| "grad_norm": 2.0165812969207764, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6235, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.24347826086956523, | |
| "grad_norm": 3.1172547340393066, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1981, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 0.24375, | |
| "grad_norm": 2.381237268447876, | |
| "learning_rate": 3e-05, | |
| "loss": 3.104, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 0.2440217391304348, | |
| "grad_norm": 3.345118522644043, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1412, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 0.24429347826086956, | |
| "grad_norm": 2.8453972339630127, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3134, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 0.24456521739130435, | |
| "grad_norm": 1.9727962017059326, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1409, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.24483695652173912, | |
| "grad_norm": 1.3924411535263062, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2174, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 0.2451086956521739, | |
| "grad_norm": 1.8885339498519897, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4122, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 0.2453804347826087, | |
| "grad_norm": 2.3274450302124023, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2905, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 0.24565217391304348, | |
| "grad_norm": 1.877142310142517, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4118, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 0.24592391304347827, | |
| "grad_norm": 1.8108350038528442, | |
| "learning_rate": 3e-05, | |
| "loss": 3.206, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.24619565217391304, | |
| "grad_norm": 1.6787278652191162, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2236, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 0.24646739130434783, | |
| "grad_norm": 2.007154703140259, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1134, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 0.2467391304347826, | |
| "grad_norm": 1.9029121398925781, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2066, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 0.2470108695652174, | |
| "grad_norm": 1.5358937978744507, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3406, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 0.24728260869565216, | |
| "grad_norm": 1.706233263015747, | |
| "learning_rate": 3e-05, | |
| "loss": 3.368, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.24755434782608696, | |
| "grad_norm": 2.283039093017578, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1815, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 0.24782608695652175, | |
| "grad_norm": 1.6811976432800293, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8339, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 0.24809782608695652, | |
| "grad_norm": 1.6838414669036865, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6558, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 0.2483695652173913, | |
| "grad_norm": 1.451900839805603, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2279, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 0.24864130434782608, | |
| "grad_norm": 2.3503222465515137, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9484, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.24891304347826088, | |
| "grad_norm": 2.001974582672119, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3028, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 0.24918478260869564, | |
| "grad_norm": 1.5036247968673706, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2459, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 0.24945652173913044, | |
| "grad_norm": 2.033236026763916, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4323, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 0.2497282608695652, | |
| "grad_norm": 1.4043093919754028, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0965, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.203482985496521, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1118, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.25027173913043477, | |
| "grad_norm": 1.4571930170059204, | |
| "learning_rate": 3e-05, | |
| "loss": 3.351, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 0.2505434782608696, | |
| "grad_norm": 0.9827572703361511, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8122, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 0.25081521739130436, | |
| "grad_norm": 1.520359754562378, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6678, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 0.2510869565217391, | |
| "grad_norm": 1.277106761932373, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9916, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 0.2513586956521739, | |
| "grad_norm": 1.7534111738204956, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9959, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.2516304347826087, | |
| "grad_norm": 1.031008005142212, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9768, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 0.2519021739130435, | |
| "grad_norm": 1.1608198881149292, | |
| "learning_rate": 3e-05, | |
| "loss": 3.456, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 0.25217391304347825, | |
| "grad_norm": 1.4016661643981934, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4423, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 0.25244565217391307, | |
| "grad_norm": 1.1200820207595825, | |
| "learning_rate": 3e-05, | |
| "loss": 2.897, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 0.25271739130434784, | |
| "grad_norm": 1.4193557500839233, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7882, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.2529891304347826, | |
| "grad_norm": 1.2631326913833618, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8642, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 0.2532608695652174, | |
| "grad_norm": 1.6645845174789429, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7794, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 0.2535326086956522, | |
| "grad_norm": 1.7016172409057617, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3343, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 0.25380434782608696, | |
| "grad_norm": 1.1917381286621094, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3563, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 0.25407608695652173, | |
| "grad_norm": 1.4421312808990479, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6694, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.2543478260869565, | |
| "grad_norm": 1.2393507957458496, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8572, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 0.2546195652173913, | |
| "grad_norm": 1.4665427207946777, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5944, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 0.2548913043478261, | |
| "grad_norm": 1.3151808977127075, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1136, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 0.25516304347826085, | |
| "grad_norm": 1.3733018636703491, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1559, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 0.2554347826086957, | |
| "grad_norm": 1.638494610786438, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5616, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.25570652173913044, | |
| "grad_norm": 1.5186861753463745, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7529, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 0.2559782608695652, | |
| "grad_norm": 1.7179397344589233, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8634, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 0.25625, | |
| "grad_norm": 1.9366356134414673, | |
| "learning_rate": 3e-05, | |
| "loss": 3.785, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 0.2565217391304348, | |
| "grad_norm": 2.2738037109375, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7442, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 0.25679347826086957, | |
| "grad_norm": 2.67051100730896, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8186, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.25706521739130433, | |
| "grad_norm": 3.3741190433502197, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2118, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 0.25733695652173916, | |
| "grad_norm": 1.6262043714523315, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0208, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 0.2576086956521739, | |
| "grad_norm": 1.7342244386672974, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1897, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 0.2578804347826087, | |
| "grad_norm": 2.0924177169799805, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7324, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 0.25815217391304346, | |
| "grad_norm": 2.21530818939209, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1098, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.2584239130434783, | |
| "grad_norm": 1.642945647239685, | |
| "learning_rate": 3e-05, | |
| "loss": 3.58, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 0.25869565217391305, | |
| "grad_norm": 1.4641486406326294, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4771, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 0.2589673913043478, | |
| "grad_norm": 1.4831671714782715, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0967, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 0.2592391304347826, | |
| "grad_norm": 1.605242133140564, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3319, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 0.2595108695652174, | |
| "grad_norm": 2.312819480895996, | |
| "learning_rate": 3e-05, | |
| "loss": 4.214, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.2597826086956522, | |
| "grad_norm": 1.751943588256836, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3116, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 0.26005434782608694, | |
| "grad_norm": 1.951372504234314, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4478, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 0.26032608695652176, | |
| "grad_norm": 2.3981149196624756, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4107, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 0.26059782608695653, | |
| "grad_norm": 2.448025703430176, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0686, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 0.2608695652173913, | |
| "grad_norm": 2.1298911571502686, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4676, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.26114130434782606, | |
| "grad_norm": 1.9788066148757935, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4239, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 0.2614130434782609, | |
| "grad_norm": 1.6203289031982422, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2623, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 0.26168478260869565, | |
| "grad_norm": 1.7709441184997559, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5855, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 0.2619565217391304, | |
| "grad_norm": 1.4671415090560913, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2081, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 0.26222826086956524, | |
| "grad_norm": 1.2123912572860718, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0287, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.2625, | |
| "grad_norm": 1.9638895988464355, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4101, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 0.2627717391304348, | |
| "grad_norm": 1.6470272541046143, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1761, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 0.26304347826086955, | |
| "grad_norm": 1.7481547594070435, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7102, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 0.26331521739130437, | |
| "grad_norm": 1.589220404624939, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9376, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 0.26358695652173914, | |
| "grad_norm": 2.317216634750366, | |
| "learning_rate": 3e-05, | |
| "loss": 3.888, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.2638586956521739, | |
| "grad_norm": 1.9831353425979614, | |
| "learning_rate": 3e-05, | |
| "loss": 3.831, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 0.26413043478260867, | |
| "grad_norm": 2.201885223388672, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5633, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 0.2644021739130435, | |
| "grad_norm": 1.755304217338562, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9027, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 0.26467391304347826, | |
| "grad_norm": 1.7514090538024902, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3256, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 0.264945652173913, | |
| "grad_norm": 2.1359901428222656, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9904, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.26521739130434785, | |
| "grad_norm": 1.5768872499465942, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3242, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 0.2654891304347826, | |
| "grad_norm": 1.8486355543136597, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7133, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 0.2657608695652174, | |
| "grad_norm": 1.5333871841430664, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4191, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 0.26603260869565215, | |
| "grad_norm": 1.6924866437911987, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3763, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 0.266304347826087, | |
| "grad_norm": 2.1424450874328613, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8962, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.26657608695652174, | |
| "grad_norm": 1.5438109636306763, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2176, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 0.2668478260869565, | |
| "grad_norm": 1.5958982706069946, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8708, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 0.26711956521739133, | |
| "grad_norm": 1.129310965538025, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8674, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 0.2673913043478261, | |
| "grad_norm": 1.6280345916748047, | |
| "learning_rate": 3e-05, | |
| "loss": 4.165, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 0.26766304347826086, | |
| "grad_norm": 1.499475121498108, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9953, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.26793478260869563, | |
| "grad_norm": 1.253732442855835, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4718, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 0.26820652173913045, | |
| "grad_norm": 1.113315463066101, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3101, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 0.2684782608695652, | |
| "grad_norm": 1.4947681427001953, | |
| "learning_rate": 3e-05, | |
| "loss": 3.217, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 0.26875, | |
| "grad_norm": 1.9959630966186523, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0953, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 0.26902173913043476, | |
| "grad_norm": 1.6297112703323364, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4108, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.2692934782608696, | |
| "grad_norm": 1.9028682708740234, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1301, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 0.26956521739130435, | |
| "grad_norm": 1.6655460596084595, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5511, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 0.2698369565217391, | |
| "grad_norm": 1.8980700969696045, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5143, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 0.27010869565217394, | |
| "grad_norm": 1.5208625793457031, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5514, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 0.2703804347826087, | |
| "grad_norm": 1.1632534265518188, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2167, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.27065217391304347, | |
| "grad_norm": 2.094667673110962, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0362, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 0.27092391304347824, | |
| "grad_norm": 1.4889837503433228, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4279, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 0.27119565217391306, | |
| "grad_norm": 1.9965155124664307, | |
| "learning_rate": 3e-05, | |
| "loss": 3.774, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 0.2714673913043478, | |
| "grad_norm": 1.5410970449447632, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2647, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 0.2717391304347826, | |
| "grad_norm": 1.9614113569259644, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5648, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.2720108695652174, | |
| "grad_norm": 1.3656377792358398, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3263, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 0.2722826086956522, | |
| "grad_norm": 2.057823419570923, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1952, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 0.27255434782608695, | |
| "grad_norm": 1.9812930822372437, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5128, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 0.2728260869565217, | |
| "grad_norm": 1.443973183631897, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5108, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 0.27309782608695654, | |
| "grad_norm": 1.1789878606796265, | |
| "learning_rate": 3e-05, | |
| "loss": 3.142, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.2733695652173913, | |
| "grad_norm": 1.5677335262298584, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0946, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 0.2736413043478261, | |
| "grad_norm": 1.6985656023025513, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8017, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 0.27391304347826084, | |
| "grad_norm": 0.9420762658119202, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8754, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 0.27418478260869567, | |
| "grad_norm": 1.1210711002349854, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1734, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 0.27445652173913043, | |
| "grad_norm": 0.9131266474723816, | |
| "learning_rate": 3e-05, | |
| "loss": 2.5223, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.2747282608695652, | |
| "grad_norm": 1.4077900648117065, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2054, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 0.275, | |
| "grad_norm": 1.4042190313339233, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3916, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 0.2752717391304348, | |
| "grad_norm": 1.1815489530563354, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2108, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 0.27554347826086956, | |
| "grad_norm": 1.007114052772522, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8035, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 0.2758152173913043, | |
| "grad_norm": 1.6630133390426636, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5364, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.27608695652173915, | |
| "grad_norm": 1.3151155710220337, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8982, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 0.2763586956521739, | |
| "grad_norm": 1.3191099166870117, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3812, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 0.2766304347826087, | |
| "grad_norm": 0.9553658366203308, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2285, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 0.2769021739130435, | |
| "grad_norm": 1.2714715003967285, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1331, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 0.27717391304347827, | |
| "grad_norm": 1.4878464937210083, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5607, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.27744565217391304, | |
| "grad_norm": 1.3467168807983398, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4943, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 0.2777173913043478, | |
| "grad_norm": 1.052966833114624, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9399, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 0.2779891304347826, | |
| "grad_norm": 1.1948199272155762, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0152, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 0.2782608695652174, | |
| "grad_norm": 1.1960163116455078, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9338, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.27853260869565216, | |
| "grad_norm": 1.2721116542816162, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2096, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.27880434782608693, | |
| "grad_norm": 1.620215654373169, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3828, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 0.27907608695652175, | |
| "grad_norm": 1.4918383359909058, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5746, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 0.2793478260869565, | |
| "grad_norm": 1.9811660051345825, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0559, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 0.2796195652173913, | |
| "grad_norm": 1.3812586069107056, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1452, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 0.2798913043478261, | |
| "grad_norm": 1.53925359249115, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9887, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.2801630434782609, | |
| "grad_norm": 2.2425036430358887, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2516, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 0.28043478260869564, | |
| "grad_norm": 1.2638776302337646, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5207, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 0.2807065217391304, | |
| "grad_norm": 1.1238954067230225, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4072, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 0.28097826086956523, | |
| "grad_norm": 1.2834945917129517, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0585, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 0.28125, | |
| "grad_norm": 1.5284252166748047, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1547, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.28152173913043477, | |
| "grad_norm": 1.8825403451919556, | |
| "learning_rate": 3e-05, | |
| "loss": 3.892, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 0.2817934782608696, | |
| "grad_norm": 1.620376467704773, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9044, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 0.28206521739130436, | |
| "grad_norm": 2.1728601455688477, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1266, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 0.2823369565217391, | |
| "grad_norm": 2.090522050857544, | |
| "learning_rate": 3e-05, | |
| "loss": 4.4403, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 0.2826086956521739, | |
| "grad_norm": 2.0362656116485596, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6416, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.2828804347826087, | |
| "grad_norm": 1.493579387664795, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2111, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 0.2831521739130435, | |
| "grad_norm": 1.2144675254821777, | |
| "learning_rate": 3e-05, | |
| "loss": 2.911, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 0.28342391304347825, | |
| "grad_norm": 1.833510398864746, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0022, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 0.28369565217391307, | |
| "grad_norm": 2.0041656494140625, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1412, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 0.28396739130434784, | |
| "grad_norm": 1.2218122482299805, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4581, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.2842391304347826, | |
| "grad_norm": 1.6633751392364502, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6047, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 0.2845108695652174, | |
| "grad_norm": 2.4305849075317383, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3797, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 0.2847826086956522, | |
| "grad_norm": 2.309483289718628, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6304, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 0.28505434782608696, | |
| "grad_norm": 1.678916335105896, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4073, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 0.28532608695652173, | |
| "grad_norm": 1.9873173236846924, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9298, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.2855978260869565, | |
| "grad_norm": 2.085996389389038, | |
| "learning_rate": 3e-05, | |
| "loss": 4.4263, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 0.2858695652173913, | |
| "grad_norm": 3.049558639526367, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6507, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 0.2861413043478261, | |
| "grad_norm": 1.4257125854492188, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3861, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 0.28641304347826085, | |
| "grad_norm": 1.5436605215072632, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1414, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 0.2866847826086957, | |
| "grad_norm": 3.0967252254486084, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9067, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.28695652173913044, | |
| "grad_norm": 1.6564278602600098, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5354, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 0.2872282608695652, | |
| "grad_norm": 2.498901605606079, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3153, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 0.2875, | |
| "grad_norm": 2.2996490001678467, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4023, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 0.2877717391304348, | |
| "grad_norm": 2.0972297191619873, | |
| "learning_rate": 3e-05, | |
| "loss": 3.038, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 0.28804347826086957, | |
| "grad_norm": 1.221664547920227, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0761, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.28831521739130433, | |
| "grad_norm": 2.2344908714294434, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8314, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 0.28858695652173916, | |
| "grad_norm": 1.9931285381317139, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3899, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 0.2888586956521739, | |
| "grad_norm": 1.924612045288086, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4548, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 0.2891304347826087, | |
| "grad_norm": 1.9507229328155518, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9302, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 0.28940217391304346, | |
| "grad_norm": 1.8761188983917236, | |
| "learning_rate": 3e-05, | |
| "loss": 3.195, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.2896739130434783, | |
| "grad_norm": 2.233293294906616, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1672, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 0.28994565217391305, | |
| "grad_norm": 1.7154324054718018, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1413, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 0.2902173913043478, | |
| "grad_norm": 1.8050798177719116, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4341, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 0.2904891304347826, | |
| "grad_norm": 1.838165044784546, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3827, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 0.2907608695652174, | |
| "grad_norm": 2.7229554653167725, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3938, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.2910326086956522, | |
| "grad_norm": 3.150635242462158, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8396, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 0.29130434782608694, | |
| "grad_norm": 2.2429847717285156, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9437, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 0.29157608695652176, | |
| "grad_norm": 1.305802345275879, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5962, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 0.29184782608695653, | |
| "grad_norm": 1.4509013891220093, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2975, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 0.2921195652173913, | |
| "grad_norm": 2.059382200241089, | |
| "learning_rate": 3e-05, | |
| "loss": 4.21, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.29239130434782606, | |
| "grad_norm": 0.9824801087379456, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6315, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 0.2926630434782609, | |
| "grad_norm": 1.0867946147918701, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6838, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 0.29293478260869565, | |
| "grad_norm": 1.992459774017334, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2482, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 0.2932065217391304, | |
| "grad_norm": 1.6963497400283813, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5684, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 0.29347826086956524, | |
| "grad_norm": 1.2850168943405151, | |
| "learning_rate": 3e-05, | |
| "loss": 3.399, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.29375, | |
| "grad_norm": 1.652712106704712, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3213, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 0.2940217391304348, | |
| "grad_norm": 1.733959436416626, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1942, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 0.29429347826086955, | |
| "grad_norm": 1.8719747066497803, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8789, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 0.29456521739130437, | |
| "grad_norm": 1.5426567792892456, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1319, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 0.29483695652173914, | |
| "grad_norm": 2.0286598205566406, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9002, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.2951086956521739, | |
| "grad_norm": 1.5031648874282837, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3979, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 0.29538043478260867, | |
| "grad_norm": 1.556738257408142, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9843, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 0.2956521739130435, | |
| "grad_norm": 1.8954527378082275, | |
| "learning_rate": 3e-05, | |
| "loss": 4.6858, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 0.29592391304347826, | |
| "grad_norm": 1.7330039739608765, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8431, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 0.296195652173913, | |
| "grad_norm": 2.0785205364227295, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6168, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.29646739130434785, | |
| "grad_norm": 1.708628535270691, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6387, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 0.2967391304347826, | |
| "grad_norm": 2.5519232749938965, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0809, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 0.2970108695652174, | |
| "grad_norm": 1.5967369079589844, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5908, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 0.29728260869565215, | |
| "grad_norm": 1.345123052597046, | |
| "learning_rate": 3e-05, | |
| "loss": 3.297, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 0.297554347826087, | |
| "grad_norm": 2.123605251312256, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1267, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.29782608695652174, | |
| "grad_norm": 1.6863003969192505, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4321, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 0.2980978260869565, | |
| "grad_norm": 2.0726163387298584, | |
| "learning_rate": 3e-05, | |
| "loss": 3.386, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 0.29836956521739133, | |
| "grad_norm": 1.5322779417037964, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9161, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 0.2986413043478261, | |
| "grad_norm": 1.1131296157836914, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7584, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 0.29891304347826086, | |
| "grad_norm": 1.9430760145187378, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4661, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.29918478260869563, | |
| "grad_norm": 2.6745107173919678, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0976, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 0.29945652173913045, | |
| "grad_norm": 3.127723217010498, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7013, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 0.2997282608695652, | |
| "grad_norm": 1.894990086555481, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4841, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.9465398788452148, | |
| "learning_rate": 3e-05, | |
| "loss": 3.48, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 0.30027173913043476, | |
| "grad_norm": 2.5459132194519043, | |
| "learning_rate": 3e-05, | |
| "loss": 3.239, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.3005434782608696, | |
| "grad_norm": 2.382801055908203, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2947, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 0.30081521739130435, | |
| "grad_norm": 1.913734793663025, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0162, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 0.3010869565217391, | |
| "grad_norm": 1.1874632835388184, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6723, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 0.30135869565217394, | |
| "grad_norm": 1.6929433345794678, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1864, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 0.3016304347826087, | |
| "grad_norm": 2.1046361923217773, | |
| "learning_rate": 3e-05, | |
| "loss": 3.442, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.30190217391304347, | |
| "grad_norm": 2.196091413497925, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2791, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 0.30217391304347824, | |
| "grad_norm": 2.6548757553100586, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0965, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 0.30244565217391306, | |
| "grad_norm": 1.7033164501190186, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5031, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 0.3027173913043478, | |
| "grad_norm": 1.5866395235061646, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9015, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 0.3029891304347826, | |
| "grad_norm": 2.0246646404266357, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1981, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.3032608695652174, | |
| "grad_norm": 2.0733206272125244, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3524, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 0.3035326086956522, | |
| "grad_norm": 1.6320792436599731, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6667, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 0.30380434782608695, | |
| "grad_norm": 1.4059102535247803, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4399, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 0.3040760869565217, | |
| "grad_norm": 1.6570987701416016, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2745, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 0.30434782608695654, | |
| "grad_norm": 1.9712703227996826, | |
| "learning_rate": 3e-05, | |
| "loss": 3.751, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.3046195652173913, | |
| "grad_norm": 1.4697049856185913, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4402, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 0.3048913043478261, | |
| "grad_norm": 1.8021398782730103, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8907, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 0.30516304347826084, | |
| "grad_norm": 1.5529664754867554, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7741, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 0.30543478260869567, | |
| "grad_norm": 1.5865482091903687, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7243, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 0.30570652173913043, | |
| "grad_norm": 2.221069097518921, | |
| "learning_rate": 3e-05, | |
| "loss": 3.65, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.3059782608695652, | |
| "grad_norm": 2.1728017330169678, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0357, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 0.30625, | |
| "grad_norm": 1.6755625009536743, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2292, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 0.3065217391304348, | |
| "grad_norm": 2.135910987854004, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0064, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 0.30679347826086956, | |
| "grad_norm": 1.7224137783050537, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6076, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 0.3070652173913043, | |
| "grad_norm": 1.5196242332458496, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2132, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.30733695652173915, | |
| "grad_norm": 2.016763210296631, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1373, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 0.3076086956521739, | |
| "grad_norm": 1.3778748512268066, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4815, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 0.3078804347826087, | |
| "grad_norm": 1.5161099433898926, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2094, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 0.3081521739130435, | |
| "grad_norm": 1.2191746234893799, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0117, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 0.30842391304347827, | |
| "grad_norm": 1.6270561218261719, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8738, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.30869565217391304, | |
| "grad_norm": 1.3209682703018188, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1873, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 0.3089673913043478, | |
| "grad_norm": 1.7960654497146606, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9211, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 0.3092391304347826, | |
| "grad_norm": 1.5645345449447632, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8176, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 0.3095108695652174, | |
| "grad_norm": 1.2274796962738037, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0552, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 0.30978260869565216, | |
| "grad_norm": 3.1637630462646484, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0908, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.31005434782608693, | |
| "grad_norm": 1.7452456951141357, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2379, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 0.31032608695652175, | |
| "grad_norm": 1.3035019636154175, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1619, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 0.3105978260869565, | |
| "grad_norm": 1.5721486806869507, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3092, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 0.3108695652173913, | |
| "grad_norm": 1.5714532136917114, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7343, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 0.3111413043478261, | |
| "grad_norm": 1.4483362436294556, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5843, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.3114130434782609, | |
| "grad_norm": 1.2886680364608765, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4743, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 0.31168478260869564, | |
| "grad_norm": 1.5609358549118042, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7947, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 0.3119565217391304, | |
| "grad_norm": 1.7903861999511719, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3165, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 0.31222826086956523, | |
| "grad_norm": 1.917932152748108, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3985, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 0.3125, | |
| "grad_norm": 1.5185085535049438, | |
| "learning_rate": 3e-05, | |
| "loss": 3.571, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.31277173913043477, | |
| "grad_norm": 1.3498207330703735, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3921, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 0.3130434782608696, | |
| "grad_norm": 1.6622629165649414, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4101, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 0.31331521739130436, | |
| "grad_norm": 1.516265630722046, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1228, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 0.3135869565217391, | |
| "grad_norm": 1.5660970211029053, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3751, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 0.3138586956521739, | |
| "grad_norm": 1.8406776189804077, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3181, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.3141304347826087, | |
| "grad_norm": 1.8162765502929688, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0117, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 0.3144021739130435, | |
| "grad_norm": 1.2983214855194092, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2412, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 0.31467391304347825, | |
| "grad_norm": 1.1409294605255127, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0329, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 0.31494565217391307, | |
| "grad_norm": 1.754345178604126, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6711, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 0.31521739130434784, | |
| "grad_norm": 1.868580937385559, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6684, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.3154891304347826, | |
| "grad_norm": 1.8012367486953735, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2884, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 0.3157608695652174, | |
| "grad_norm": 1.7688064575195312, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3512, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 0.3160326086956522, | |
| "grad_norm": 1.5674185752868652, | |
| "learning_rate": 3e-05, | |
| "loss": 3.443, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 0.31630434782608696, | |
| "grad_norm": 1.5843913555145264, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8519, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 0.31657608695652173, | |
| "grad_norm": 1.4522455930709839, | |
| "learning_rate": 3e-05, | |
| "loss": 3.123, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.3168478260869565, | |
| "grad_norm": 1.5610785484313965, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1963, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 0.3171195652173913, | |
| "grad_norm": 1.4327051639556885, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9382, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 0.3173913043478261, | |
| "grad_norm": 1.6065129041671753, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4928, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 0.31766304347826085, | |
| "grad_norm": 1.9382498264312744, | |
| "learning_rate": 3e-05, | |
| "loss": 4.3174, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 0.3179347826086957, | |
| "grad_norm": 1.164689064025879, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0678, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.31820652173913044, | |
| "grad_norm": 1.084055781364441, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0878, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 0.3184782608695652, | |
| "grad_norm": 1.4881168603897095, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5084, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 0.31875, | |
| "grad_norm": 1.5508403778076172, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6359, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 0.3190217391304348, | |
| "grad_norm": 1.7135038375854492, | |
| "learning_rate": 3e-05, | |
| "loss": 2.819, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 0.31929347826086957, | |
| "grad_norm": 1.9360867738723755, | |
| "learning_rate": 3e-05, | |
| "loss": 3.711, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.31956521739130433, | |
| "grad_norm": 1.3898394107818604, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0356, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 0.31983695652173916, | |
| "grad_norm": 1.37100088596344, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9441, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 0.3201086956521739, | |
| "grad_norm": 1.7066446542739868, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8696, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 0.3203804347826087, | |
| "grad_norm": 1.4085134267807007, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4616, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 0.32065217391304346, | |
| "grad_norm": 1.5345913171768188, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6033, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.3209239130434783, | |
| "grad_norm": 1.5240819454193115, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1863, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 0.32119565217391305, | |
| "grad_norm": 1.147559404373169, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2533, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 0.3214673913043478, | |
| "grad_norm": 1.3267796039581299, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3342, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 0.3217391304347826, | |
| "grad_norm": 1.567126989364624, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5055, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 0.3220108695652174, | |
| "grad_norm": 2.085658073425293, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1383, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.3222826086956522, | |
| "grad_norm": 1.7624791860580444, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2409, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 0.32255434782608694, | |
| "grad_norm": 1.4368826150894165, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7424, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 0.32282608695652176, | |
| "grad_norm": 1.5510128736495972, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3895, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 0.32309782608695653, | |
| "grad_norm": 1.471490740776062, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6772, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 0.3233695652173913, | |
| "grad_norm": 1.2040995359420776, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3302, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.32364130434782606, | |
| "grad_norm": 1.4731234312057495, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3842, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 0.3239130434782609, | |
| "grad_norm": 1.3904873132705688, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8537, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 0.32418478260869565, | |
| "grad_norm": 1.187960147857666, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0656, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 0.3244565217391304, | |
| "grad_norm": 1.1819937229156494, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7234, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 0.32472826086956524, | |
| "grad_norm": 1.6428884267807007, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4446, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.325, | |
| "grad_norm": 1.6877864599227905, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2674, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 0.3252717391304348, | |
| "grad_norm": 1.596166729927063, | |
| "learning_rate": 3e-05, | |
| "loss": 3.371, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 0.32554347826086955, | |
| "grad_norm": 1.3886607885360718, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3149, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 0.32581521739130437, | |
| "grad_norm": 1.4295148849487305, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0519, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 0.32608695652173914, | |
| "grad_norm": 1.5317387580871582, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3816, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.3263586956521739, | |
| "grad_norm": 1.1524282693862915, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2564, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 0.32663043478260867, | |
| "grad_norm": 1.3997209072113037, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3173, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 0.3269021739130435, | |
| "grad_norm": 1.357279896736145, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7316, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 0.32717391304347826, | |
| "grad_norm": 1.1962755918502808, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5203, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 0.327445652173913, | |
| "grad_norm": 1.4180536270141602, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7679, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.32771739130434785, | |
| "grad_norm": 1.4997798204421997, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9842, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 0.3279891304347826, | |
| "grad_norm": 1.5224746465682983, | |
| "learning_rate": 3e-05, | |
| "loss": 3.723, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 0.3282608695652174, | |
| "grad_norm": 1.4814677238464355, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3444, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 0.32853260869565215, | |
| "grad_norm": 1.8869359493255615, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8663, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 0.328804347826087, | |
| "grad_norm": 1.2305731773376465, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1855, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.32907608695652174, | |
| "grad_norm": 1.3822002410888672, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9625, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 0.3293478260869565, | |
| "grad_norm": 1.2895426750183105, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9574, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 0.32961956521739133, | |
| "grad_norm": 1.2724965810775757, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2984, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 0.3298913043478261, | |
| "grad_norm": 1.569366455078125, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4928, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 0.33016304347826086, | |
| "grad_norm": 1.538090467453003, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8567, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.33043478260869563, | |
| "grad_norm": 1.3484816551208496, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1096, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 0.33070652173913045, | |
| "grad_norm": 1.1450049877166748, | |
| "learning_rate": 3e-05, | |
| "loss": 2.773, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 0.3309782608695652, | |
| "grad_norm": 1.5292000770568848, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3972, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 0.33125, | |
| "grad_norm": 1.3615137338638306, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2462, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 0.33152173913043476, | |
| "grad_norm": 1.7036170959472656, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7338, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.3317934782608696, | |
| "grad_norm": 1.7132785320281982, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3378, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 0.33206521739130435, | |
| "grad_norm": 1.9641152620315552, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8121, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 0.3323369565217391, | |
| "grad_norm": 1.3222547769546509, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2911, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 0.33260869565217394, | |
| "grad_norm": 1.3695377111434937, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3004, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 0.3328804347826087, | |
| "grad_norm": 1.8578277826309204, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1831, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.33315217391304347, | |
| "grad_norm": 1.7893558740615845, | |
| "learning_rate": 3e-05, | |
| "loss": 3.359, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 0.33342391304347824, | |
| "grad_norm": 1.474753499031067, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9094, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 0.33369565217391306, | |
| "grad_norm": 1.973402738571167, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0505, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 0.3339673913043478, | |
| "grad_norm": 1.7613177299499512, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3559, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 0.3342391304347826, | |
| "grad_norm": 2.031470775604248, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3066, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.3345108695652174, | |
| "grad_norm": 2.1547582149505615, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6043, | |
| "step": 1231 | |
| }, | |
| { | |
| "epoch": 0.3347826086956522, | |
| "grad_norm": 2.2331910133361816, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9871, | |
| "step": 1232 | |
| }, | |
| { | |
| "epoch": 0.33505434782608695, | |
| "grad_norm": 2.156770944595337, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6777, | |
| "step": 1233 | |
| }, | |
| { | |
| "epoch": 0.3353260869565217, | |
| "grad_norm": 1.717960000038147, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6268, | |
| "step": 1234 | |
| }, | |
| { | |
| "epoch": 0.33559782608695654, | |
| "grad_norm": 1.649511694908142, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3226, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.3358695652173913, | |
| "grad_norm": 1.8623225688934326, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9329, | |
| "step": 1236 | |
| }, | |
| { | |
| "epoch": 0.3361413043478261, | |
| "grad_norm": 2.167483329772949, | |
| "learning_rate": 3e-05, | |
| "loss": 3.445, | |
| "step": 1237 | |
| }, | |
| { | |
| "epoch": 0.33641304347826084, | |
| "grad_norm": 1.7240395545959473, | |
| "learning_rate": 3e-05, | |
| "loss": 3.493, | |
| "step": 1238 | |
| }, | |
| { | |
| "epoch": 0.33668478260869567, | |
| "grad_norm": 1.3711011409759521, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7254, | |
| "step": 1239 | |
| }, | |
| { | |
| "epoch": 0.33695652173913043, | |
| "grad_norm": 2.091745615005493, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0388, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.3372282608695652, | |
| "grad_norm": 1.5960465669631958, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5036, | |
| "step": 1241 | |
| }, | |
| { | |
| "epoch": 0.3375, | |
| "grad_norm": 1.1491891145706177, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2282, | |
| "step": 1242 | |
| }, | |
| { | |
| "epoch": 0.3377717391304348, | |
| "grad_norm": 1.1655139923095703, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3578, | |
| "step": 1243 | |
| }, | |
| { | |
| "epoch": 0.33804347826086956, | |
| "grad_norm": 1.4522464275360107, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4827, | |
| "step": 1244 | |
| }, | |
| { | |
| "epoch": 0.3383152173913043, | |
| "grad_norm": 1.494310736656189, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7874, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.33858695652173915, | |
| "grad_norm": 1.98121976852417, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2323, | |
| "step": 1246 | |
| }, | |
| { | |
| "epoch": 0.3388586956521739, | |
| "grad_norm": 1.772510290145874, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9783, | |
| "step": 1247 | |
| }, | |
| { | |
| "epoch": 0.3391304347826087, | |
| "grad_norm": 1.3050928115844727, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3902, | |
| "step": 1248 | |
| }, | |
| { | |
| "epoch": 0.3394021739130435, | |
| "grad_norm": 1.590279221534729, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0477, | |
| "step": 1249 | |
| }, | |
| { | |
| "epoch": 0.33967391304347827, | |
| "grad_norm": 2.1541528701782227, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5042, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.33994565217391304, | |
| "grad_norm": 2.138265609741211, | |
| "learning_rate": 3e-05, | |
| "loss": 3.704, | |
| "step": 1251 | |
| }, | |
| { | |
| "epoch": 0.3402173913043478, | |
| "grad_norm": 1.5365831851959229, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3562, | |
| "step": 1252 | |
| }, | |
| { | |
| "epoch": 0.3404891304347826, | |
| "grad_norm": 1.3827742338180542, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2202, | |
| "step": 1253 | |
| }, | |
| { | |
| "epoch": 0.3407608695652174, | |
| "grad_norm": 1.4450342655181885, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4887, | |
| "step": 1254 | |
| }, | |
| { | |
| "epoch": 0.34103260869565216, | |
| "grad_norm": 1.2322391271591187, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7866, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.34130434782608693, | |
| "grad_norm": 1.3767555952072144, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8691, | |
| "step": 1256 | |
| }, | |
| { | |
| "epoch": 0.34157608695652175, | |
| "grad_norm": 1.9902418851852417, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5811, | |
| "step": 1257 | |
| }, | |
| { | |
| "epoch": 0.3418478260869565, | |
| "grad_norm": 1.7038296461105347, | |
| "learning_rate": 3e-05, | |
| "loss": 3.363, | |
| "step": 1258 | |
| }, | |
| { | |
| "epoch": 0.3421195652173913, | |
| "grad_norm": 1.82673978805542, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0324, | |
| "step": 1259 | |
| }, | |
| { | |
| "epoch": 0.3423913043478261, | |
| "grad_norm": 1.4924451112747192, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5862, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.3426630434782609, | |
| "grad_norm": 1.1290408372879028, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0983, | |
| "step": 1261 | |
| }, | |
| { | |
| "epoch": 0.34293478260869564, | |
| "grad_norm": 1.3652050495147705, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2806, | |
| "step": 1262 | |
| }, | |
| { | |
| "epoch": 0.3432065217391304, | |
| "grad_norm": 1.3618534803390503, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0708, | |
| "step": 1263 | |
| }, | |
| { | |
| "epoch": 0.34347826086956523, | |
| "grad_norm": 1.3616082668304443, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8319, | |
| "step": 1264 | |
| }, | |
| { | |
| "epoch": 0.34375, | |
| "grad_norm": 1.3398898839950562, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1065, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.34402173913043477, | |
| "grad_norm": 1.6687146425247192, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3584, | |
| "step": 1266 | |
| }, | |
| { | |
| "epoch": 0.3442934782608696, | |
| "grad_norm": 1.2948143482208252, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2489, | |
| "step": 1267 | |
| }, | |
| { | |
| "epoch": 0.34456521739130436, | |
| "grad_norm": 1.7158524990081787, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3558, | |
| "step": 1268 | |
| }, | |
| { | |
| "epoch": 0.3448369565217391, | |
| "grad_norm": 1.6835163831710815, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9967, | |
| "step": 1269 | |
| }, | |
| { | |
| "epoch": 0.3451086956521739, | |
| "grad_norm": 1.971177101135254, | |
| "learning_rate": 3e-05, | |
| "loss": 3.829, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.3453804347826087, | |
| "grad_norm": 1.660911202430725, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9975, | |
| "step": 1271 | |
| }, | |
| { | |
| "epoch": 0.3456521739130435, | |
| "grad_norm": 1.8289529085159302, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4466, | |
| "step": 1272 | |
| }, | |
| { | |
| "epoch": 0.34592391304347825, | |
| "grad_norm": 1.9580225944519043, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4431, | |
| "step": 1273 | |
| }, | |
| { | |
| "epoch": 0.34619565217391307, | |
| "grad_norm": 1.9081965684890747, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1295, | |
| "step": 1274 | |
| }, | |
| { | |
| "epoch": 0.34646739130434784, | |
| "grad_norm": 2.8098397254943848, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9265, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.3467391304347826, | |
| "grad_norm": 1.8996559381484985, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9118, | |
| "step": 1276 | |
| }, | |
| { | |
| "epoch": 0.3470108695652174, | |
| "grad_norm": 1.8447978496551514, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8908, | |
| "step": 1277 | |
| }, | |
| { | |
| "epoch": 0.3472826086956522, | |
| "grad_norm": 1.450376033782959, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1262, | |
| "step": 1278 | |
| }, | |
| { | |
| "epoch": 0.34755434782608696, | |
| "grad_norm": 2.3735806941986084, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4959, | |
| "step": 1279 | |
| }, | |
| { | |
| "epoch": 0.34782608695652173, | |
| "grad_norm": 2.2202954292297363, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2127, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.3480978260869565, | |
| "grad_norm": 1.6313713788986206, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7593, | |
| "step": 1281 | |
| }, | |
| { | |
| "epoch": 0.3483695652173913, | |
| "grad_norm": 1.8261613845825195, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5207, | |
| "step": 1282 | |
| }, | |
| { | |
| "epoch": 0.3486413043478261, | |
| "grad_norm": 1.6101502180099487, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1187, | |
| "step": 1283 | |
| }, | |
| { | |
| "epoch": 0.34891304347826085, | |
| "grad_norm": 1.4293962717056274, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3783, | |
| "step": 1284 | |
| }, | |
| { | |
| "epoch": 0.3491847826086957, | |
| "grad_norm": 1.335721731185913, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8365, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.34945652173913044, | |
| "grad_norm": 1.5614105463027954, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7661, | |
| "step": 1286 | |
| }, | |
| { | |
| "epoch": 0.3497282608695652, | |
| "grad_norm": 1.38046133518219, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1717, | |
| "step": 1287 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.3673638105392456, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7002, | |
| "step": 1288 | |
| }, | |
| { | |
| "epoch": 0.3502717391304348, | |
| "grad_norm": 1.2321789264678955, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8849, | |
| "step": 1289 | |
| }, | |
| { | |
| "epoch": 0.35054347826086957, | |
| "grad_norm": 1.3166979551315308, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1094, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.35081521739130433, | |
| "grad_norm": 1.5346661806106567, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2696, | |
| "step": 1291 | |
| }, | |
| { | |
| "epoch": 0.35108695652173916, | |
| "grad_norm": 1.550195336341858, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3044, | |
| "step": 1292 | |
| }, | |
| { | |
| "epoch": 0.3513586956521739, | |
| "grad_norm": 1.7930152416229248, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7782, | |
| "step": 1293 | |
| }, | |
| { | |
| "epoch": 0.3516304347826087, | |
| "grad_norm": 1.9929698705673218, | |
| "learning_rate": 3e-05, | |
| "loss": 3.12, | |
| "step": 1294 | |
| }, | |
| { | |
| "epoch": 0.35190217391304346, | |
| "grad_norm": 1.244729995727539, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9368, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.3521739130434783, | |
| "grad_norm": 1.3026173114776611, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1947, | |
| "step": 1296 | |
| }, | |
| { | |
| "epoch": 0.35244565217391305, | |
| "grad_norm": 1.297813892364502, | |
| "learning_rate": 3e-05, | |
| "loss": 2.5978, | |
| "step": 1297 | |
| }, | |
| { | |
| "epoch": 0.3527173913043478, | |
| "grad_norm": 1.4547146558761597, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4649, | |
| "step": 1298 | |
| }, | |
| { | |
| "epoch": 0.3529891304347826, | |
| "grad_norm": 1.2365776300430298, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1037, | |
| "step": 1299 | |
| }, | |
| { | |
| "epoch": 0.3532608695652174, | |
| "grad_norm": 1.2477210760116577, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1045, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.3535326086956522, | |
| "grad_norm": 1.4197901487350464, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2411, | |
| "step": 1301 | |
| }, | |
| { | |
| "epoch": 0.35380434782608694, | |
| "grad_norm": 1.1791913509368896, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4775, | |
| "step": 1302 | |
| }, | |
| { | |
| "epoch": 0.35407608695652176, | |
| "grad_norm": 1.1935291290283203, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9038, | |
| "step": 1303 | |
| }, | |
| { | |
| "epoch": 0.35434782608695653, | |
| "grad_norm": 1.515292763710022, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6569, | |
| "step": 1304 | |
| }, | |
| { | |
| "epoch": 0.3546195652173913, | |
| "grad_norm": 1.6459676027297974, | |
| "learning_rate": 3e-05, | |
| "loss": 3.638, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.35489130434782606, | |
| "grad_norm": 1.2799147367477417, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3, | |
| "step": 1306 | |
| }, | |
| { | |
| "epoch": 0.3551630434782609, | |
| "grad_norm": 1.1415566205978394, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7568, | |
| "step": 1307 | |
| }, | |
| { | |
| "epoch": 0.35543478260869565, | |
| "grad_norm": 1.4160751104354858, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5389, | |
| "step": 1308 | |
| }, | |
| { | |
| "epoch": 0.3557065217391304, | |
| "grad_norm": 1.5589027404785156, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5721, | |
| "step": 1309 | |
| }, | |
| { | |
| "epoch": 0.35597826086956524, | |
| "grad_norm": 1.4486863613128662, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5145, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.35625, | |
| "grad_norm": 1.1204112768173218, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2109, | |
| "step": 1311 | |
| }, | |
| { | |
| "epoch": 0.3565217391304348, | |
| "grad_norm": 1.4160523414611816, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9734, | |
| "step": 1312 | |
| }, | |
| { | |
| "epoch": 0.35679347826086955, | |
| "grad_norm": 1.359877347946167, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1197, | |
| "step": 1313 | |
| }, | |
| { | |
| "epoch": 0.35706521739130437, | |
| "grad_norm": 1.3469575643539429, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7031, | |
| "step": 1314 | |
| }, | |
| { | |
| "epoch": 0.35733695652173914, | |
| "grad_norm": 1.877429485321045, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6433, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.3576086956521739, | |
| "grad_norm": 1.6881115436553955, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8429, | |
| "step": 1316 | |
| }, | |
| { | |
| "epoch": 0.35788043478260867, | |
| "grad_norm": 0.9885056018829346, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9943, | |
| "step": 1317 | |
| }, | |
| { | |
| "epoch": 0.3581521739130435, | |
| "grad_norm": 1.1146260499954224, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0583, | |
| "step": 1318 | |
| }, | |
| { | |
| "epoch": 0.35842391304347826, | |
| "grad_norm": 1.0930460691452026, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1491, | |
| "step": 1319 | |
| }, | |
| { | |
| "epoch": 0.358695652173913, | |
| "grad_norm": 1.7721790075302124, | |
| "learning_rate": 3e-05, | |
| "loss": 3.682, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.35896739130434785, | |
| "grad_norm": 1.8233963251113892, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0548, | |
| "step": 1321 | |
| }, | |
| { | |
| "epoch": 0.3592391304347826, | |
| "grad_norm": 2.3196828365325928, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8065, | |
| "step": 1322 | |
| }, | |
| { | |
| "epoch": 0.3595108695652174, | |
| "grad_norm": 2.0975351333618164, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3098, | |
| "step": 1323 | |
| }, | |
| { | |
| "epoch": 0.35978260869565215, | |
| "grad_norm": 1.4240121841430664, | |
| "learning_rate": 3e-05, | |
| "loss": 3.327, | |
| "step": 1324 | |
| }, | |
| { | |
| "epoch": 0.360054347826087, | |
| "grad_norm": 1.4401570558547974, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9099, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.36032608695652174, | |
| "grad_norm": 1.7879765033721924, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9776, | |
| "step": 1326 | |
| }, | |
| { | |
| "epoch": 0.3605978260869565, | |
| "grad_norm": 1.8819643259048462, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3526, | |
| "step": 1327 | |
| }, | |
| { | |
| "epoch": 0.36086956521739133, | |
| "grad_norm": 1.8750115633010864, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7411, | |
| "step": 1328 | |
| }, | |
| { | |
| "epoch": 0.3611413043478261, | |
| "grad_norm": 1.802832007408142, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8997, | |
| "step": 1329 | |
| }, | |
| { | |
| "epoch": 0.36141304347826086, | |
| "grad_norm": 1.9432928562164307, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9711, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.36168478260869563, | |
| "grad_norm": 2.222259998321533, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5956, | |
| "step": 1331 | |
| }, | |
| { | |
| "epoch": 0.36195652173913045, | |
| "grad_norm": 1.0173529386520386, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1368, | |
| "step": 1332 | |
| }, | |
| { | |
| "epoch": 0.3622282608695652, | |
| "grad_norm": 1.3840457201004028, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3336, | |
| "step": 1333 | |
| }, | |
| { | |
| "epoch": 0.3625, | |
| "grad_norm": 2.199570417404175, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1846, | |
| "step": 1334 | |
| }, | |
| { | |
| "epoch": 0.36277173913043476, | |
| "grad_norm": 2.3063576221466064, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1996, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.3630434782608696, | |
| "grad_norm": 2.0329015254974365, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6631, | |
| "step": 1336 | |
| }, | |
| { | |
| "epoch": 0.36331521739130435, | |
| "grad_norm": 0.9080404043197632, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8746, | |
| "step": 1337 | |
| }, | |
| { | |
| "epoch": 0.3635869565217391, | |
| "grad_norm": 2.173773765563965, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8486, | |
| "step": 1338 | |
| }, | |
| { | |
| "epoch": 0.36385869565217394, | |
| "grad_norm": 2.493887186050415, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5707, | |
| "step": 1339 | |
| }, | |
| { | |
| "epoch": 0.3641304347826087, | |
| "grad_norm": 1.8862491846084595, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8585, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.36440217391304347, | |
| "grad_norm": 1.23379647731781, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5806, | |
| "step": 1341 | |
| }, | |
| { | |
| "epoch": 0.36467391304347824, | |
| "grad_norm": 1.3684219121932983, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1318, | |
| "step": 1342 | |
| }, | |
| { | |
| "epoch": 0.36494565217391306, | |
| "grad_norm": 1.2184761762619019, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4761, | |
| "step": 1343 | |
| }, | |
| { | |
| "epoch": 0.3652173913043478, | |
| "grad_norm": 1.230040431022644, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1053, | |
| "step": 1344 | |
| }, | |
| { | |
| "epoch": 0.3654891304347826, | |
| "grad_norm": 1.4862629175186157, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4211, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.3657608695652174, | |
| "grad_norm": 1.2832411527633667, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7821, | |
| "step": 1346 | |
| }, | |
| { | |
| "epoch": 0.3660326086956522, | |
| "grad_norm": 1.1333979368209839, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4129, | |
| "step": 1347 | |
| }, | |
| { | |
| "epoch": 0.36630434782608695, | |
| "grad_norm": 1.9462121725082397, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8102, | |
| "step": 1348 | |
| }, | |
| { | |
| "epoch": 0.3665760869565217, | |
| "grad_norm": 1.314512848854065, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5802, | |
| "step": 1349 | |
| }, | |
| { | |
| "epoch": 0.36684782608695654, | |
| "grad_norm": 0.9899564981460571, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2397, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.3671195652173913, | |
| "grad_norm": 0.9129654765129089, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0704, | |
| "step": 1351 | |
| }, | |
| { | |
| "epoch": 0.3673913043478261, | |
| "grad_norm": 1.2658374309539795, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9416, | |
| "step": 1352 | |
| }, | |
| { | |
| "epoch": 0.36766304347826084, | |
| "grad_norm": 1.156530499458313, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2254, | |
| "step": 1353 | |
| }, | |
| { | |
| "epoch": 0.36793478260869567, | |
| "grad_norm": 1.4215689897537231, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5928, | |
| "step": 1354 | |
| }, | |
| { | |
| "epoch": 0.36820652173913043, | |
| "grad_norm": 1.4695452451705933, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5298, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.3684782608695652, | |
| "grad_norm": 1.185398817062378, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9786, | |
| "step": 1356 | |
| }, | |
| { | |
| "epoch": 0.36875, | |
| "grad_norm": 1.6181942224502563, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8328, | |
| "step": 1357 | |
| }, | |
| { | |
| "epoch": 0.3690217391304348, | |
| "grad_norm": 1.7642178535461426, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6084, | |
| "step": 1358 | |
| }, | |
| { | |
| "epoch": 0.36929347826086956, | |
| "grad_norm": 1.3343443870544434, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0491, | |
| "step": 1359 | |
| }, | |
| { | |
| "epoch": 0.3695652173913043, | |
| "grad_norm": 2.167877435684204, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6741, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.36983695652173915, | |
| "grad_norm": 2.0322484970092773, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1298, | |
| "step": 1361 | |
| }, | |
| { | |
| "epoch": 0.3701086956521739, | |
| "grad_norm": 1.1205507516860962, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2478, | |
| "step": 1362 | |
| }, | |
| { | |
| "epoch": 0.3703804347826087, | |
| "grad_norm": 1.4173895120620728, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2024, | |
| "step": 1363 | |
| }, | |
| { | |
| "epoch": 0.3706521739130435, | |
| "grad_norm": 1.4186296463012695, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9463, | |
| "step": 1364 | |
| }, | |
| { | |
| "epoch": 0.37092391304347827, | |
| "grad_norm": 1.4897836446762085, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4138, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.37119565217391304, | |
| "grad_norm": 0.9213505983352661, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7726, | |
| "step": 1366 | |
| }, | |
| { | |
| "epoch": 0.3714673913043478, | |
| "grad_norm": 1.4468334913253784, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3552, | |
| "step": 1367 | |
| }, | |
| { | |
| "epoch": 0.3717391304347826, | |
| "grad_norm": 1.6020596027374268, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3098, | |
| "step": 1368 | |
| }, | |
| { | |
| "epoch": 0.3720108695652174, | |
| "grad_norm": 1.2568944692611694, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9896, | |
| "step": 1369 | |
| }, | |
| { | |
| "epoch": 0.37228260869565216, | |
| "grad_norm": 1.2092691659927368, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2886, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.37255434782608693, | |
| "grad_norm": 1.0227844715118408, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9555, | |
| "step": 1371 | |
| }, | |
| { | |
| "epoch": 0.37282608695652175, | |
| "grad_norm": 1.400106430053711, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2697, | |
| "step": 1372 | |
| }, | |
| { | |
| "epoch": 0.3730978260869565, | |
| "grad_norm": 1.3000365495681763, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1748, | |
| "step": 1373 | |
| }, | |
| { | |
| "epoch": 0.3733695652173913, | |
| "grad_norm": 1.197072148323059, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3346, | |
| "step": 1374 | |
| }, | |
| { | |
| "epoch": 0.3736413043478261, | |
| "grad_norm": 1.1778289079666138, | |
| "learning_rate": 3e-05, | |
| "loss": 3.089, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.3739130434782609, | |
| "grad_norm": 1.23442542552948, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0972, | |
| "step": 1376 | |
| }, | |
| { | |
| "epoch": 0.37418478260869564, | |
| "grad_norm": 1.135533094406128, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9638, | |
| "step": 1377 | |
| }, | |
| { | |
| "epoch": 0.3744565217391304, | |
| "grad_norm": 1.4869073629379272, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7439, | |
| "step": 1378 | |
| }, | |
| { | |
| "epoch": 0.37472826086956523, | |
| "grad_norm": 1.5528923273086548, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8187, | |
| "step": 1379 | |
| }, | |
| { | |
| "epoch": 0.375, | |
| "grad_norm": 1.0053822994232178, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2647, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.37527173913043477, | |
| "grad_norm": 1.2217366695404053, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5844, | |
| "step": 1381 | |
| }, | |
| { | |
| "epoch": 0.3755434782608696, | |
| "grad_norm": 1.245322823524475, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9917, | |
| "step": 1382 | |
| }, | |
| { | |
| "epoch": 0.37581521739130436, | |
| "grad_norm": 1.3003109693527222, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7163, | |
| "step": 1383 | |
| }, | |
| { | |
| "epoch": 0.3760869565217391, | |
| "grad_norm": 2.398679733276367, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2985, | |
| "step": 1384 | |
| }, | |
| { | |
| "epoch": 0.3763586956521739, | |
| "grad_norm": 1.2174817323684692, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3311, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.3766304347826087, | |
| "grad_norm": 1.2073396444320679, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4313, | |
| "step": 1386 | |
| }, | |
| { | |
| "epoch": 0.3769021739130435, | |
| "grad_norm": 1.2685997486114502, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1358, | |
| "step": 1387 | |
| }, | |
| { | |
| "epoch": 0.37717391304347825, | |
| "grad_norm": 1.3566850423812866, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0956, | |
| "step": 1388 | |
| }, | |
| { | |
| "epoch": 0.37744565217391307, | |
| "grad_norm": 1.6564099788665771, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7791, | |
| "step": 1389 | |
| }, | |
| { | |
| "epoch": 0.37771739130434784, | |
| "grad_norm": 1.3698561191558838, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3446, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.3779891304347826, | |
| "grad_norm": 1.2702010869979858, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0951, | |
| "step": 1391 | |
| }, | |
| { | |
| "epoch": 0.3782608695652174, | |
| "grad_norm": 1.4307619333267212, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3821, | |
| "step": 1392 | |
| }, | |
| { | |
| "epoch": 0.3785326086956522, | |
| "grad_norm": 1.207396149635315, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6105, | |
| "step": 1393 | |
| }, | |
| { | |
| "epoch": 0.37880434782608696, | |
| "grad_norm": 1.1929562091827393, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8538, | |
| "step": 1394 | |
| }, | |
| { | |
| "epoch": 0.37907608695652173, | |
| "grad_norm": 1.2328208684921265, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0622, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.3793478260869565, | |
| "grad_norm": 1.4275541305541992, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4482, | |
| "step": 1396 | |
| }, | |
| { | |
| "epoch": 0.3796195652173913, | |
| "grad_norm": 0.9579852223396301, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8646, | |
| "step": 1397 | |
| }, | |
| { | |
| "epoch": 0.3798913043478261, | |
| "grad_norm": 1.6573379039764404, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4417, | |
| "step": 1398 | |
| }, | |
| { | |
| "epoch": 0.38016304347826085, | |
| "grad_norm": 1.5388156175613403, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2041, | |
| "step": 1399 | |
| }, | |
| { | |
| "epoch": 0.3804347826086957, | |
| "grad_norm": 1.4631450176239014, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8626, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.38070652173913044, | |
| "grad_norm": 1.3570584058761597, | |
| "learning_rate": 3e-05, | |
| "loss": 3.441, | |
| "step": 1401 | |
| }, | |
| { | |
| "epoch": 0.3809782608695652, | |
| "grad_norm": 1.8016875982284546, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1616, | |
| "step": 1402 | |
| }, | |
| { | |
| "epoch": 0.38125, | |
| "grad_norm": 1.4279495477676392, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6129, | |
| "step": 1403 | |
| }, | |
| { | |
| "epoch": 0.3815217391304348, | |
| "grad_norm": 1.3305083513259888, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3956, | |
| "step": 1404 | |
| }, | |
| { | |
| "epoch": 0.38179347826086957, | |
| "grad_norm": 1.7289437055587769, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6499, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.38206521739130433, | |
| "grad_norm": 1.75969398021698, | |
| "learning_rate": 3e-05, | |
| "loss": 3.104, | |
| "step": 1406 | |
| }, | |
| { | |
| "epoch": 0.38233695652173916, | |
| "grad_norm": 1.545464277267456, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1157, | |
| "step": 1407 | |
| }, | |
| { | |
| "epoch": 0.3826086956521739, | |
| "grad_norm": 1.5299899578094482, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9975, | |
| "step": 1408 | |
| }, | |
| { | |
| "epoch": 0.3828804347826087, | |
| "grad_norm": 1.7116879224777222, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0605, | |
| "step": 1409 | |
| }, | |
| { | |
| "epoch": 0.38315217391304346, | |
| "grad_norm": 1.4728469848632812, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2079, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.3834239130434783, | |
| "grad_norm": 2.216775894165039, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8937, | |
| "step": 1411 | |
| }, | |
| { | |
| "epoch": 0.38369565217391305, | |
| "grad_norm": 1.6279774904251099, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7957, | |
| "step": 1412 | |
| }, | |
| { | |
| "epoch": 0.3839673913043478, | |
| "grad_norm": 0.9390795230865479, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8752, | |
| "step": 1413 | |
| }, | |
| { | |
| "epoch": 0.3842391304347826, | |
| "grad_norm": 1.3048573732376099, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0132, | |
| "step": 1414 | |
| }, | |
| { | |
| "epoch": 0.3845108695652174, | |
| "grad_norm": 1.132607340812683, | |
| "learning_rate": 3e-05, | |
| "loss": 2.965, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.3847826086956522, | |
| "grad_norm": 1.6570241451263428, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1474, | |
| "step": 1416 | |
| }, | |
| { | |
| "epoch": 0.38505434782608694, | |
| "grad_norm": 1.5501550436019897, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4049, | |
| "step": 1417 | |
| }, | |
| { | |
| "epoch": 0.38532608695652176, | |
| "grad_norm": 1.1150649785995483, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4114, | |
| "step": 1418 | |
| }, | |
| { | |
| "epoch": 0.38559782608695653, | |
| "grad_norm": 1.3886046409606934, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7872, | |
| "step": 1419 | |
| }, | |
| { | |
| "epoch": 0.3858695652173913, | |
| "grad_norm": 1.178165078163147, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1258, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.38614130434782606, | |
| "grad_norm": 1.2557275295257568, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4292, | |
| "step": 1421 | |
| }, | |
| { | |
| "epoch": 0.3864130434782609, | |
| "grad_norm": 0.9806421995162964, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8931, | |
| "step": 1422 | |
| }, | |
| { | |
| "epoch": 0.38668478260869565, | |
| "grad_norm": 1.6238960027694702, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5711, | |
| "step": 1423 | |
| }, | |
| { | |
| "epoch": 0.3869565217391304, | |
| "grad_norm": 1.3578873872756958, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9015, | |
| "step": 1424 | |
| }, | |
| { | |
| "epoch": 0.38722826086956524, | |
| "grad_norm": 1.2328130006790161, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2193, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.3875, | |
| "grad_norm": 2.146702289581299, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0931, | |
| "step": 1426 | |
| }, | |
| { | |
| "epoch": 0.3877717391304348, | |
| "grad_norm": 1.9415775537490845, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0855, | |
| "step": 1427 | |
| }, | |
| { | |
| "epoch": 0.38804347826086955, | |
| "grad_norm": 1.7980471849441528, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3307, | |
| "step": 1428 | |
| }, | |
| { | |
| "epoch": 0.38831521739130437, | |
| "grad_norm": 1.429101824760437, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1628, | |
| "step": 1429 | |
| }, | |
| { | |
| "epoch": 0.38858695652173914, | |
| "grad_norm": 2.225107431411743, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5771, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.3888586956521739, | |
| "grad_norm": 2.46109938621521, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7408, | |
| "step": 1431 | |
| }, | |
| { | |
| "epoch": 0.38913043478260867, | |
| "grad_norm": 2.695655345916748, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4776, | |
| "step": 1432 | |
| }, | |
| { | |
| "epoch": 0.3894021739130435, | |
| "grad_norm": 2.3022725582122803, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4633, | |
| "step": 1433 | |
| }, | |
| { | |
| "epoch": 0.38967391304347826, | |
| "grad_norm": 1.5871728658676147, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8911, | |
| "step": 1434 | |
| }, | |
| { | |
| "epoch": 0.389945652173913, | |
| "grad_norm": 1.3922131061553955, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7701, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.39021739130434785, | |
| "grad_norm": 2.131774663925171, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6811, | |
| "step": 1436 | |
| }, | |
| { | |
| "epoch": 0.3904891304347826, | |
| "grad_norm": 1.8943536281585693, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6957, | |
| "step": 1437 | |
| }, | |
| { | |
| "epoch": 0.3907608695652174, | |
| "grad_norm": 1.7963645458221436, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4574, | |
| "step": 1438 | |
| }, | |
| { | |
| "epoch": 0.39103260869565215, | |
| "grad_norm": 1.613253116607666, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5328, | |
| "step": 1439 | |
| }, | |
| { | |
| "epoch": 0.391304347826087, | |
| "grad_norm": 1.8886051177978516, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7514, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.39157608695652174, | |
| "grad_norm": 2.6512999534606934, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9306, | |
| "step": 1441 | |
| }, | |
| { | |
| "epoch": 0.3918478260869565, | |
| "grad_norm": 1.6623197793960571, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4983, | |
| "step": 1442 | |
| }, | |
| { | |
| "epoch": 0.39211956521739133, | |
| "grad_norm": 1.2289677858352661, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1967, | |
| "step": 1443 | |
| }, | |
| { | |
| "epoch": 0.3923913043478261, | |
| "grad_norm": 0.9220126867294312, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8288, | |
| "step": 1444 | |
| }, | |
| { | |
| "epoch": 0.39266304347826086, | |
| "grad_norm": 1.853948950767517, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2876, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.39293478260869563, | |
| "grad_norm": 2.040001392364502, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6805, | |
| "step": 1446 | |
| }, | |
| { | |
| "epoch": 0.39320652173913045, | |
| "grad_norm": 1.4386035203933716, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1173, | |
| "step": 1447 | |
| }, | |
| { | |
| "epoch": 0.3934782608695652, | |
| "grad_norm": 1.48452889919281, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3276, | |
| "step": 1448 | |
| }, | |
| { | |
| "epoch": 0.39375, | |
| "grad_norm": 1.058858871459961, | |
| "learning_rate": 3e-05, | |
| "loss": 2.5619, | |
| "step": 1449 | |
| }, | |
| { | |
| "epoch": 0.39402173913043476, | |
| "grad_norm": 2.131944179534912, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7079, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.3942934782608696, | |
| "grad_norm": 1.5783636569976807, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5856, | |
| "step": 1451 | |
| }, | |
| { | |
| "epoch": 0.39456521739130435, | |
| "grad_norm": 1.249036192893982, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0789, | |
| "step": 1452 | |
| }, | |
| { | |
| "epoch": 0.3948369565217391, | |
| "grad_norm": 1.6562423706054688, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9025, | |
| "step": 1453 | |
| }, | |
| { | |
| "epoch": 0.39510869565217394, | |
| "grad_norm": 1.6861488819122314, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5286, | |
| "step": 1454 | |
| }, | |
| { | |
| "epoch": 0.3953804347826087, | |
| "grad_norm": 1.503812313079834, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0909, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.39565217391304347, | |
| "grad_norm": 1.8524895906448364, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4152, | |
| "step": 1456 | |
| }, | |
| { | |
| "epoch": 0.39592391304347824, | |
| "grad_norm": 1.3550132513046265, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5444, | |
| "step": 1457 | |
| }, | |
| { | |
| "epoch": 0.39619565217391306, | |
| "grad_norm": 1.454416036605835, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6969, | |
| "step": 1458 | |
| }, | |
| { | |
| "epoch": 0.3964673913043478, | |
| "grad_norm": 1.5491242408752441, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4632, | |
| "step": 1459 | |
| }, | |
| { | |
| "epoch": 0.3967391304347826, | |
| "grad_norm": 1.177980661392212, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2844, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.3970108695652174, | |
| "grad_norm": 1.2985824346542358, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6743, | |
| "step": 1461 | |
| }, | |
| { | |
| "epoch": 0.3972826086956522, | |
| "grad_norm": 1.545882225036621, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4353, | |
| "step": 1462 | |
| }, | |
| { | |
| "epoch": 0.39755434782608695, | |
| "grad_norm": 1.3580163717269897, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7926, | |
| "step": 1463 | |
| }, | |
| { | |
| "epoch": 0.3978260869565217, | |
| "grad_norm": 1.4254475831985474, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7358, | |
| "step": 1464 | |
| }, | |
| { | |
| "epoch": 0.39809782608695654, | |
| "grad_norm": 1.2237005233764648, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3169, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.3983695652173913, | |
| "grad_norm": 1.1622488498687744, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1261, | |
| "step": 1466 | |
| }, | |
| { | |
| "epoch": 0.3986413043478261, | |
| "grad_norm": 0.9447959065437317, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8757, | |
| "step": 1467 | |
| }, | |
| { | |
| "epoch": 0.39891304347826084, | |
| "grad_norm": 1.5178827047348022, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7686, | |
| "step": 1468 | |
| }, | |
| { | |
| "epoch": 0.39918478260869567, | |
| "grad_norm": 1.2081400156021118, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7978, | |
| "step": 1469 | |
| }, | |
| { | |
| "epoch": 0.39945652173913043, | |
| "grad_norm": 1.3333414793014526, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8005, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.3997282608695652, | |
| "grad_norm": 1.2284449338912964, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2384, | |
| "step": 1471 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.6261060237884521, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6663, | |
| "step": 1472 | |
| }, | |
| { | |
| "epoch": 0.4002717391304348, | |
| "grad_norm": 1.2063846588134766, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2808, | |
| "step": 1473 | |
| }, | |
| { | |
| "epoch": 0.40054347826086956, | |
| "grad_norm": 1.1668663024902344, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6527, | |
| "step": 1474 | |
| }, | |
| { | |
| "epoch": 0.4008152173913043, | |
| "grad_norm": 1.1682161092758179, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0487, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.40108695652173915, | |
| "grad_norm": 1.2479826211929321, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8231, | |
| "step": 1476 | |
| }, | |
| { | |
| "epoch": 0.4013586956521739, | |
| "grad_norm": 1.3903083801269531, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3589, | |
| "step": 1477 | |
| }, | |
| { | |
| "epoch": 0.4016304347826087, | |
| "grad_norm": 2.756995677947998, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5956, | |
| "step": 1478 | |
| }, | |
| { | |
| "epoch": 0.4019021739130435, | |
| "grad_norm": 1.1125861406326294, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9482, | |
| "step": 1479 | |
| }, | |
| { | |
| "epoch": 0.40217391304347827, | |
| "grad_norm": 1.7494776248931885, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6373, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.40244565217391304, | |
| "grad_norm": 1.7383618354797363, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5196, | |
| "step": 1481 | |
| }, | |
| { | |
| "epoch": 0.4027173913043478, | |
| "grad_norm": 2.0332143306732178, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4659, | |
| "step": 1482 | |
| }, | |
| { | |
| "epoch": 0.4029891304347826, | |
| "grad_norm": 1.7549830675125122, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6027, | |
| "step": 1483 | |
| }, | |
| { | |
| "epoch": 0.4032608695652174, | |
| "grad_norm": 1.4020575284957886, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1451, | |
| "step": 1484 | |
| }, | |
| { | |
| "epoch": 0.40353260869565216, | |
| "grad_norm": 1.134787917137146, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6124, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.40380434782608693, | |
| "grad_norm": 1.6569262742996216, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8979, | |
| "step": 1486 | |
| }, | |
| { | |
| "epoch": 0.40407608695652175, | |
| "grad_norm": 1.7637388706207275, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8109, | |
| "step": 1487 | |
| }, | |
| { | |
| "epoch": 0.4043478260869565, | |
| "grad_norm": 1.2442704439163208, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6988, | |
| "step": 1488 | |
| }, | |
| { | |
| "epoch": 0.4046195652173913, | |
| "grad_norm": 1.0942063331604004, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0464, | |
| "step": 1489 | |
| }, | |
| { | |
| "epoch": 0.4048913043478261, | |
| "grad_norm": 1.337386965751648, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8127, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.4051630434782609, | |
| "grad_norm": 1.7307548522949219, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3854, | |
| "step": 1491 | |
| }, | |
| { | |
| "epoch": 0.40543478260869564, | |
| "grad_norm": 3.1193931102752686, | |
| "learning_rate": 3e-05, | |
| "loss": 4.9053, | |
| "step": 1492 | |
| }, | |
| { | |
| "epoch": 0.4057065217391304, | |
| "grad_norm": 1.5950829982757568, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6687, | |
| "step": 1493 | |
| }, | |
| { | |
| "epoch": 0.40597826086956523, | |
| "grad_norm": 1.6609991788864136, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4985, | |
| "step": 1494 | |
| }, | |
| { | |
| "epoch": 0.40625, | |
| "grad_norm": 1.824634075164795, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7576, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.40652173913043477, | |
| "grad_norm": 1.8175928592681885, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7615, | |
| "step": 1496 | |
| }, | |
| { | |
| "epoch": 0.4067934782608696, | |
| "grad_norm": 1.3160499334335327, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1647, | |
| "step": 1497 | |
| }, | |
| { | |
| "epoch": 0.40706521739130436, | |
| "grad_norm": 1.7744853496551514, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0396, | |
| "step": 1498 | |
| }, | |
| { | |
| "epoch": 0.4073369565217391, | |
| "grad_norm": 2.0241026878356934, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8311, | |
| "step": 1499 | |
| }, | |
| { | |
| "epoch": 0.4076086956521739, | |
| "grad_norm": 1.7789679765701294, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9538, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.4078804347826087, | |
| "grad_norm": 1.4469610452651978, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3083, | |
| "step": 1501 | |
| }, | |
| { | |
| "epoch": 0.4081521739130435, | |
| "grad_norm": 1.570556879043579, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0793, | |
| "step": 1502 | |
| }, | |
| { | |
| "epoch": 0.40842391304347825, | |
| "grad_norm": 1.3964000940322876, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9603, | |
| "step": 1503 | |
| }, | |
| { | |
| "epoch": 0.40869565217391307, | |
| "grad_norm": 1.1800280809402466, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2305, | |
| "step": 1504 | |
| }, | |
| { | |
| "epoch": 0.40896739130434784, | |
| "grad_norm": 1.762847661972046, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7564, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.4092391304347826, | |
| "grad_norm": 1.5233442783355713, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2744, | |
| "step": 1506 | |
| }, | |
| { | |
| "epoch": 0.4095108695652174, | |
| "grad_norm": 1.9167475700378418, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6962, | |
| "step": 1507 | |
| }, | |
| { | |
| "epoch": 0.4097826086956522, | |
| "grad_norm": 1.949634075164795, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1084, | |
| "step": 1508 | |
| }, | |
| { | |
| "epoch": 0.41005434782608696, | |
| "grad_norm": 1.9483072757720947, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1895, | |
| "step": 1509 | |
| }, | |
| { | |
| "epoch": 0.41032608695652173, | |
| "grad_norm": 1.368509292602539, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6804, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.4105978260869565, | |
| "grad_norm": 1.6476247310638428, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1565, | |
| "step": 1511 | |
| }, | |
| { | |
| "epoch": 0.4108695652173913, | |
| "grad_norm": 2.14841890335083, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5723, | |
| "step": 1512 | |
| }, | |
| { | |
| "epoch": 0.4111413043478261, | |
| "grad_norm": 2.1899356842041016, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9324, | |
| "step": 1513 | |
| }, | |
| { | |
| "epoch": 0.41141304347826085, | |
| "grad_norm": 1.971421480178833, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3302, | |
| "step": 1514 | |
| }, | |
| { | |
| "epoch": 0.4116847826086957, | |
| "grad_norm": 1.555216908454895, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8559, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.41195652173913044, | |
| "grad_norm": 2.025975227355957, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8605, | |
| "step": 1516 | |
| }, | |
| { | |
| "epoch": 0.4122282608695652, | |
| "grad_norm": 1.609168291091919, | |
| "learning_rate": 3e-05, | |
| "loss": 3.521, | |
| "step": 1517 | |
| }, | |
| { | |
| "epoch": 0.4125, | |
| "grad_norm": 1.4011493921279907, | |
| "learning_rate": 3e-05, | |
| "loss": 2.905, | |
| "step": 1518 | |
| }, | |
| { | |
| "epoch": 0.4127717391304348, | |
| "grad_norm": 1.813499927520752, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9861, | |
| "step": 1519 | |
| }, | |
| { | |
| "epoch": 0.41304347826086957, | |
| "grad_norm": 2.023961067199707, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3814, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.41331521739130433, | |
| "grad_norm": 1.9356021881103516, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9743, | |
| "step": 1521 | |
| }, | |
| { | |
| "epoch": 0.41358695652173916, | |
| "grad_norm": 1.6765419244766235, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9504, | |
| "step": 1522 | |
| }, | |
| { | |
| "epoch": 0.4138586956521739, | |
| "grad_norm": 1.954119324684143, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7254, | |
| "step": 1523 | |
| }, | |
| { | |
| "epoch": 0.4141304347826087, | |
| "grad_norm": 1.5637805461883545, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8543, | |
| "step": 1524 | |
| }, | |
| { | |
| "epoch": 0.41440217391304346, | |
| "grad_norm": 1.175187587738037, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2871, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.4146739130434783, | |
| "grad_norm": 1.3228040933609009, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9662, | |
| "step": 1526 | |
| }, | |
| { | |
| "epoch": 0.41494565217391305, | |
| "grad_norm": 1.4776461124420166, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9617, | |
| "step": 1527 | |
| }, | |
| { | |
| "epoch": 0.4152173913043478, | |
| "grad_norm": 1.5169886350631714, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6288, | |
| "step": 1528 | |
| }, | |
| { | |
| "epoch": 0.4154891304347826, | |
| "grad_norm": 1.4785473346710205, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0807, | |
| "step": 1529 | |
| }, | |
| { | |
| "epoch": 0.4157608695652174, | |
| "grad_norm": 1.4565258026123047, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3099, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.4160326086956522, | |
| "grad_norm": 1.2732715606689453, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2948, | |
| "step": 1531 | |
| }, | |
| { | |
| "epoch": 0.41630434782608694, | |
| "grad_norm": 1.0217174291610718, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0084, | |
| "step": 1532 | |
| }, | |
| { | |
| "epoch": 0.41657608695652176, | |
| "grad_norm": 1.2822792530059814, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9284, | |
| "step": 1533 | |
| }, | |
| { | |
| "epoch": 0.41684782608695653, | |
| "grad_norm": 1.641827940940857, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4684, | |
| "step": 1534 | |
| }, | |
| { | |
| "epoch": 0.4171195652173913, | |
| "grad_norm": 1.5296562910079956, | |
| "learning_rate": 3e-05, | |
| "loss": 2.5836, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.41739130434782606, | |
| "grad_norm": 1.3570102453231812, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1323, | |
| "step": 1536 | |
| }, | |
| { | |
| "epoch": 0.4176630434782609, | |
| "grad_norm": 0.9901162981987, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7749, | |
| "step": 1537 | |
| }, | |
| { | |
| "epoch": 0.41793478260869565, | |
| "grad_norm": 1.2664462327957153, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1213, | |
| "step": 1538 | |
| }, | |
| { | |
| "epoch": 0.4182065217391304, | |
| "grad_norm": 1.682426929473877, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8292, | |
| "step": 1539 | |
| }, | |
| { | |
| "epoch": 0.41847826086956524, | |
| "grad_norm": 1.3441531658172607, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8989, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.41875, | |
| "grad_norm": 1.6413555145263672, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4333, | |
| "step": 1541 | |
| }, | |
| { | |
| "epoch": 0.4190217391304348, | |
| "grad_norm": 1.2601666450500488, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2815, | |
| "step": 1542 | |
| }, | |
| { | |
| "epoch": 0.41929347826086955, | |
| "grad_norm": 1.5582549571990967, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5814, | |
| "step": 1543 | |
| }, | |
| { | |
| "epoch": 0.41956521739130437, | |
| "grad_norm": 1.0220012664794922, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7242, | |
| "step": 1544 | |
| }, | |
| { | |
| "epoch": 0.41983695652173914, | |
| "grad_norm": 1.710802674293518, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5271, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.4201086956521739, | |
| "grad_norm": 2.3148810863494873, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0931, | |
| "step": 1546 | |
| }, | |
| { | |
| "epoch": 0.42038043478260867, | |
| "grad_norm": 1.4409066438674927, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4806, | |
| "step": 1547 | |
| }, | |
| { | |
| "epoch": 0.4206521739130435, | |
| "grad_norm": 1.4869251251220703, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8039, | |
| "step": 1548 | |
| }, | |
| { | |
| "epoch": 0.42092391304347826, | |
| "grad_norm": 2.2359511852264404, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1842, | |
| "step": 1549 | |
| }, | |
| { | |
| "epoch": 0.421195652173913, | |
| "grad_norm": 2.2962846755981445, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8559, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.42146739130434785, | |
| "grad_norm": 1.5050580501556396, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6678, | |
| "step": 1551 | |
| }, | |
| { | |
| "epoch": 0.4217391304347826, | |
| "grad_norm": 1.2851321697235107, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4175, | |
| "step": 1552 | |
| }, | |
| { | |
| "epoch": 0.4220108695652174, | |
| "grad_norm": 1.2671363353729248, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0412, | |
| "step": 1553 | |
| }, | |
| { | |
| "epoch": 0.42228260869565215, | |
| "grad_norm": 1.2936915159225464, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0918, | |
| "step": 1554 | |
| }, | |
| { | |
| "epoch": 0.422554347826087, | |
| "grad_norm": 1.4296749830245972, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3257, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.42282608695652174, | |
| "grad_norm": 1.2652101516723633, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5451, | |
| "step": 1556 | |
| }, | |
| { | |
| "epoch": 0.4230978260869565, | |
| "grad_norm": 1.4265552759170532, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4145, | |
| "step": 1557 | |
| }, | |
| { | |
| "epoch": 0.42336956521739133, | |
| "grad_norm": 1.3775297403335571, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3248, | |
| "step": 1558 | |
| }, | |
| { | |
| "epoch": 0.4236413043478261, | |
| "grad_norm": 1.8461849689483643, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9212, | |
| "step": 1559 | |
| }, | |
| { | |
| "epoch": 0.42391304347826086, | |
| "grad_norm": 1.412374496459961, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4737, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.42418478260869563, | |
| "grad_norm": 1.6272127628326416, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4379, | |
| "step": 1561 | |
| }, | |
| { | |
| "epoch": 0.42445652173913045, | |
| "grad_norm": 1.754745364189148, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4164, | |
| "step": 1562 | |
| }, | |
| { | |
| "epoch": 0.4247282608695652, | |
| "grad_norm": 1.2676855325698853, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3106, | |
| "step": 1563 | |
| }, | |
| { | |
| "epoch": 0.425, | |
| "grad_norm": 1.7627284526824951, | |
| "learning_rate": 3e-05, | |
| "loss": 3.841, | |
| "step": 1564 | |
| }, | |
| { | |
| "epoch": 0.42527173913043476, | |
| "grad_norm": 1.143531084060669, | |
| "learning_rate": 3e-05, | |
| "loss": 2.82, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.4255434782608696, | |
| "grad_norm": 1.0254485607147217, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9556, | |
| "step": 1566 | |
| }, | |
| { | |
| "epoch": 0.42581521739130435, | |
| "grad_norm": 1.3668675422668457, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7105, | |
| "step": 1567 | |
| }, | |
| { | |
| "epoch": 0.4260869565217391, | |
| "grad_norm": 2.208592176437378, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4152, | |
| "step": 1568 | |
| }, | |
| { | |
| "epoch": 0.42635869565217394, | |
| "grad_norm": 1.1941901445388794, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3036, | |
| "step": 1569 | |
| }, | |
| { | |
| "epoch": 0.4266304347826087, | |
| "grad_norm": 1.536787748336792, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5194, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.42690217391304347, | |
| "grad_norm": 1.4054354429244995, | |
| "learning_rate": 3e-05, | |
| "loss": 3.263, | |
| "step": 1571 | |
| }, | |
| { | |
| "epoch": 0.42717391304347824, | |
| "grad_norm": 1.6471023559570312, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4855, | |
| "step": 1572 | |
| }, | |
| { | |
| "epoch": 0.42744565217391306, | |
| "grad_norm": 0.9407289028167725, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1382, | |
| "step": 1573 | |
| }, | |
| { | |
| "epoch": 0.4277173913043478, | |
| "grad_norm": 1.244971513748169, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4904, | |
| "step": 1574 | |
| }, | |
| { | |
| "epoch": 0.4279891304347826, | |
| "grad_norm": 1.305245041847229, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3049, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.4282608695652174, | |
| "grad_norm": 1.2542614936828613, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4931, | |
| "step": 1576 | |
| }, | |
| { | |
| "epoch": 0.4285326086956522, | |
| "grad_norm": 0.9759632349014282, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4818, | |
| "step": 1577 | |
| }, | |
| { | |
| "epoch": 0.42880434782608695, | |
| "grad_norm": 1.4484260082244873, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2934, | |
| "step": 1578 | |
| }, | |
| { | |
| "epoch": 0.4290760869565217, | |
| "grad_norm": 1.1898603439331055, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4601, | |
| "step": 1579 | |
| }, | |
| { | |
| "epoch": 0.42934782608695654, | |
| "grad_norm": 1.486471176147461, | |
| "learning_rate": 3e-05, | |
| "loss": 3.762, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.4296195652173913, | |
| "grad_norm": 1.2455788850784302, | |
| "learning_rate": 3e-05, | |
| "loss": 3.721, | |
| "step": 1581 | |
| }, | |
| { | |
| "epoch": 0.4298913043478261, | |
| "grad_norm": 1.1194945573806763, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3573, | |
| "step": 1582 | |
| }, | |
| { | |
| "epoch": 0.43016304347826084, | |
| "grad_norm": 1.666195273399353, | |
| "learning_rate": 3e-05, | |
| "loss": 3.531, | |
| "step": 1583 | |
| }, | |
| { | |
| "epoch": 0.43043478260869567, | |
| "grad_norm": 1.192292332649231, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1313, | |
| "step": 1584 | |
| }, | |
| { | |
| "epoch": 0.43070652173913043, | |
| "grad_norm": 1.1080567836761475, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2448, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.4309782608695652, | |
| "grad_norm": 1.7732088565826416, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4527, | |
| "step": 1586 | |
| }, | |
| { | |
| "epoch": 0.43125, | |
| "grad_norm": 1.2465471029281616, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8915, | |
| "step": 1587 | |
| }, | |
| { | |
| "epoch": 0.4315217391304348, | |
| "grad_norm": 1.43854820728302, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2366, | |
| "step": 1588 | |
| }, | |
| { | |
| "epoch": 0.43179347826086956, | |
| "grad_norm": 1.3088682889938354, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3654, | |
| "step": 1589 | |
| }, | |
| { | |
| "epoch": 0.4320652173913043, | |
| "grad_norm": 1.0148249864578247, | |
| "learning_rate": 3e-05, | |
| "loss": 3.083, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.43233695652173915, | |
| "grad_norm": 1.2219303846359253, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0298, | |
| "step": 1591 | |
| }, | |
| { | |
| "epoch": 0.4326086956521739, | |
| "grad_norm": 1.081207275390625, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2116, | |
| "step": 1592 | |
| }, | |
| { | |
| "epoch": 0.4328804347826087, | |
| "grad_norm": 1.515478491783142, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4368, | |
| "step": 1593 | |
| }, | |
| { | |
| "epoch": 0.4331521739130435, | |
| "grad_norm": 1.073887586593628, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1832, | |
| "step": 1594 | |
| }, | |
| { | |
| "epoch": 0.43342391304347827, | |
| "grad_norm": 1.1452287435531616, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5617, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.43369565217391304, | |
| "grad_norm": 1.512242078781128, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4468, | |
| "step": 1596 | |
| }, | |
| { | |
| "epoch": 0.4339673913043478, | |
| "grad_norm": 1.1862598657608032, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3795, | |
| "step": 1597 | |
| }, | |
| { | |
| "epoch": 0.4342391304347826, | |
| "grad_norm": 1.4696996212005615, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5239, | |
| "step": 1598 | |
| }, | |
| { | |
| "epoch": 0.4345108695652174, | |
| "grad_norm": 1.5985097885131836, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8709, | |
| "step": 1599 | |
| }, | |
| { | |
| "epoch": 0.43478260869565216, | |
| "grad_norm": 1.3134711980819702, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4582, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.43505434782608693, | |
| "grad_norm": 1.2656311988830566, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7025, | |
| "step": 1601 | |
| }, | |
| { | |
| "epoch": 0.43532608695652175, | |
| "grad_norm": 1.1780740022659302, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3486, | |
| "step": 1602 | |
| }, | |
| { | |
| "epoch": 0.4355978260869565, | |
| "grad_norm": 1.2048178911209106, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3115, | |
| "step": 1603 | |
| }, | |
| { | |
| "epoch": 0.4358695652173913, | |
| "grad_norm": 1.3589476346969604, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2494, | |
| "step": 1604 | |
| }, | |
| { | |
| "epoch": 0.4361413043478261, | |
| "grad_norm": 1.3171757459640503, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8635, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.4364130434782609, | |
| "grad_norm": 1.1358094215393066, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9426, | |
| "step": 1606 | |
| }, | |
| { | |
| "epoch": 0.43668478260869564, | |
| "grad_norm": 1.376583456993103, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2611, | |
| "step": 1607 | |
| }, | |
| { | |
| "epoch": 0.4369565217391304, | |
| "grad_norm": 1.7452890872955322, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7445, | |
| "step": 1608 | |
| }, | |
| { | |
| "epoch": 0.43722826086956523, | |
| "grad_norm": 1.715166687965393, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4322, | |
| "step": 1609 | |
| }, | |
| { | |
| "epoch": 0.4375, | |
| "grad_norm": 1.3614475727081299, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5131, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.43777173913043477, | |
| "grad_norm": 1.4811841249465942, | |
| "learning_rate": 3e-05, | |
| "loss": 3.714, | |
| "step": 1611 | |
| }, | |
| { | |
| "epoch": 0.4380434782608696, | |
| "grad_norm": 1.071926236152649, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0734, | |
| "step": 1612 | |
| }, | |
| { | |
| "epoch": 0.43831521739130436, | |
| "grad_norm": 1.5618953704833984, | |
| "learning_rate": 3e-05, | |
| "loss": 3.876, | |
| "step": 1613 | |
| }, | |
| { | |
| "epoch": 0.4385869565217391, | |
| "grad_norm": 1.1414976119995117, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5318, | |
| "step": 1614 | |
| }, | |
| { | |
| "epoch": 0.4388586956521739, | |
| "grad_norm": 1.0475927591323853, | |
| "learning_rate": 3e-05, | |
| "loss": 3.293, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.4391304347826087, | |
| "grad_norm": 1.392609715461731, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9974, | |
| "step": 1616 | |
| }, | |
| { | |
| "epoch": 0.4394021739130435, | |
| "grad_norm": 0.906802237033844, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9825, | |
| "step": 1617 | |
| }, | |
| { | |
| "epoch": 0.43967391304347825, | |
| "grad_norm": 1.4019746780395508, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8698, | |
| "step": 1618 | |
| }, | |
| { | |
| "epoch": 0.43994565217391307, | |
| "grad_norm": 1.321829915046692, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7109, | |
| "step": 1619 | |
| }, | |
| { | |
| "epoch": 0.44021739130434784, | |
| "grad_norm": 1.0838751792907715, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6205, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.4404891304347826, | |
| "grad_norm": 2.383323907852173, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2267, | |
| "step": 1621 | |
| }, | |
| { | |
| "epoch": 0.4407608695652174, | |
| "grad_norm": 1.0130791664123535, | |
| "learning_rate": 3e-05, | |
| "loss": 2.788, | |
| "step": 1622 | |
| }, | |
| { | |
| "epoch": 0.4410326086956522, | |
| "grad_norm": 1.3985093832015991, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1002, | |
| "step": 1623 | |
| }, | |
| { | |
| "epoch": 0.44130434782608696, | |
| "grad_norm": 1.3819383382797241, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2219, | |
| "step": 1624 | |
| }, | |
| { | |
| "epoch": 0.44157608695652173, | |
| "grad_norm": 1.4096386432647705, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6886, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.4418478260869565, | |
| "grad_norm": 1.4819231033325195, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2191, | |
| "step": 1626 | |
| }, | |
| { | |
| "epoch": 0.4421195652173913, | |
| "grad_norm": 1.3344846963882446, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3927, | |
| "step": 1627 | |
| }, | |
| { | |
| "epoch": 0.4423913043478261, | |
| "grad_norm": 1.469277024269104, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4069, | |
| "step": 1628 | |
| }, | |
| { | |
| "epoch": 0.44266304347826085, | |
| "grad_norm": 1.0877147912979126, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9891, | |
| "step": 1629 | |
| }, | |
| { | |
| "epoch": 0.4429347826086957, | |
| "grad_norm": 1.1775012016296387, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3463, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.44320652173913044, | |
| "grad_norm": 1.240936517715454, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5751, | |
| "step": 1631 | |
| }, | |
| { | |
| "epoch": 0.4434782608695652, | |
| "grad_norm": 0.877865195274353, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7317, | |
| "step": 1632 | |
| }, | |
| { | |
| "epoch": 0.44375, | |
| "grad_norm": 1.1812602281570435, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0307, | |
| "step": 1633 | |
| }, | |
| { | |
| "epoch": 0.4440217391304348, | |
| "grad_norm": 1.7113842964172363, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1471, | |
| "step": 1634 | |
| }, | |
| { | |
| "epoch": 0.44429347826086957, | |
| "grad_norm": 1.1820313930511475, | |
| "learning_rate": 3e-05, | |
| "loss": 3.362, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.44456521739130433, | |
| "grad_norm": 1.3378487825393677, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5815, | |
| "step": 1636 | |
| }, | |
| { | |
| "epoch": 0.44483695652173916, | |
| "grad_norm": 0.8756458163261414, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6882, | |
| "step": 1637 | |
| }, | |
| { | |
| "epoch": 0.4451086956521739, | |
| "grad_norm": 1.020820140838623, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1006, | |
| "step": 1638 | |
| }, | |
| { | |
| "epoch": 0.4453804347826087, | |
| "grad_norm": 1.5075011253356934, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0147, | |
| "step": 1639 | |
| }, | |
| { | |
| "epoch": 0.44565217391304346, | |
| "grad_norm": 1.1874289512634277, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0811, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.4459239130434783, | |
| "grad_norm": 1.5765689611434937, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4697, | |
| "step": 1641 | |
| }, | |
| { | |
| "epoch": 0.44619565217391305, | |
| "grad_norm": 1.3812084197998047, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6468, | |
| "step": 1642 | |
| }, | |
| { | |
| "epoch": 0.4464673913043478, | |
| "grad_norm": 1.1477484703063965, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2347, | |
| "step": 1643 | |
| }, | |
| { | |
| "epoch": 0.4467391304347826, | |
| "grad_norm": 1.431220531463623, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8073, | |
| "step": 1644 | |
| }, | |
| { | |
| "epoch": 0.4470108695652174, | |
| "grad_norm": 1.0271748304367065, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2165, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.4472826086956522, | |
| "grad_norm": 1.2913216352462769, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2766, | |
| "step": 1646 | |
| }, | |
| { | |
| "epoch": 0.44755434782608694, | |
| "grad_norm": 1.341670274734497, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8426, | |
| "step": 1647 | |
| }, | |
| { | |
| "epoch": 0.44782608695652176, | |
| "grad_norm": 1.1871728897094727, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1251, | |
| "step": 1648 | |
| }, | |
| { | |
| "epoch": 0.44809782608695653, | |
| "grad_norm": 1.2151528596878052, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3811, | |
| "step": 1649 | |
| }, | |
| { | |
| "epoch": 0.4483695652173913, | |
| "grad_norm": 1.7120990753173828, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7093, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.44864130434782606, | |
| "grad_norm": 1.1526025533676147, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1806, | |
| "step": 1651 | |
| }, | |
| { | |
| "epoch": 0.4489130434782609, | |
| "grad_norm": 1.5061256885528564, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7246, | |
| "step": 1652 | |
| }, | |
| { | |
| "epoch": 0.44918478260869565, | |
| "grad_norm": 1.442798376083374, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8975, | |
| "step": 1653 | |
| }, | |
| { | |
| "epoch": 0.4494565217391304, | |
| "grad_norm": 1.5733429193496704, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7047, | |
| "step": 1654 | |
| }, | |
| { | |
| "epoch": 0.44972826086956524, | |
| "grad_norm": 1.5891181230545044, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5587, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 2.2529733180999756, | |
| "learning_rate": 3e-05, | |
| "loss": 3.079, | |
| "step": 1656 | |
| }, | |
| { | |
| "epoch": 0.4502717391304348, | |
| "grad_norm": 2.397771120071411, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2517, | |
| "step": 1657 | |
| }, | |
| { | |
| "epoch": 0.45054347826086955, | |
| "grad_norm": 1.5895618200302124, | |
| "learning_rate": 3e-05, | |
| "loss": 3.289, | |
| "step": 1658 | |
| }, | |
| { | |
| "epoch": 0.45081521739130437, | |
| "grad_norm": 1.3885931968688965, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8401, | |
| "step": 1659 | |
| }, | |
| { | |
| "epoch": 0.45108695652173914, | |
| "grad_norm": 1.848649263381958, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2375, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.4513586956521739, | |
| "grad_norm": 2.288536787033081, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0737, | |
| "step": 1661 | |
| }, | |
| { | |
| "epoch": 0.45163043478260867, | |
| "grad_norm": 1.5626355409622192, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6127, | |
| "step": 1662 | |
| }, | |
| { | |
| "epoch": 0.4519021739130435, | |
| "grad_norm": 1.2159314155578613, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1681, | |
| "step": 1663 | |
| }, | |
| { | |
| "epoch": 0.45217391304347826, | |
| "grad_norm": 2.7480335235595703, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3753, | |
| "step": 1664 | |
| }, | |
| { | |
| "epoch": 0.452445652173913, | |
| "grad_norm": 1.5772864818572998, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4648, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.45271739130434785, | |
| "grad_norm": 2.1393795013427734, | |
| "learning_rate": 3e-05, | |
| "loss": 4.067, | |
| "step": 1666 | |
| }, | |
| { | |
| "epoch": 0.4529891304347826, | |
| "grad_norm": 1.48007071018219, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0416, | |
| "step": 1667 | |
| }, | |
| { | |
| "epoch": 0.4532608695652174, | |
| "grad_norm": 1.3958046436309814, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9871, | |
| "step": 1668 | |
| }, | |
| { | |
| "epoch": 0.45353260869565215, | |
| "grad_norm": 1.6530804634094238, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1665, | |
| "step": 1669 | |
| }, | |
| { | |
| "epoch": 0.453804347826087, | |
| "grad_norm": 1.5180555582046509, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1545, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.45407608695652174, | |
| "grad_norm": 1.3946945667266846, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0606, | |
| "step": 1671 | |
| }, | |
| { | |
| "epoch": 0.4543478260869565, | |
| "grad_norm": 1.655504584312439, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5141, | |
| "step": 1672 | |
| }, | |
| { | |
| "epoch": 0.45461956521739133, | |
| "grad_norm": 1.8746256828308105, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2537, | |
| "step": 1673 | |
| }, | |
| { | |
| "epoch": 0.4548913043478261, | |
| "grad_norm": 1.6152852773666382, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1725, | |
| "step": 1674 | |
| }, | |
| { | |
| "epoch": 0.45516304347826086, | |
| "grad_norm": 2.030758857727051, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1911, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.45543478260869563, | |
| "grad_norm": 1.573011040687561, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7004, | |
| "step": 1676 | |
| }, | |
| { | |
| "epoch": 0.45570652173913045, | |
| "grad_norm": 1.7782936096191406, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4129, | |
| "step": 1677 | |
| }, | |
| { | |
| "epoch": 0.4559782608695652, | |
| "grad_norm": 1.6364409923553467, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7828, | |
| "step": 1678 | |
| }, | |
| { | |
| "epoch": 0.45625, | |
| "grad_norm": 1.095005750656128, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9575, | |
| "step": 1679 | |
| }, | |
| { | |
| "epoch": 0.45652173913043476, | |
| "grad_norm": 1.4323863983154297, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9855, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.4567934782608696, | |
| "grad_norm": 1.8281795978546143, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2274, | |
| "step": 1681 | |
| }, | |
| { | |
| "epoch": 0.45706521739130435, | |
| "grad_norm": 1.553568959236145, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4245, | |
| "step": 1682 | |
| }, | |
| { | |
| "epoch": 0.4573369565217391, | |
| "grad_norm": 1.229791522026062, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8749, | |
| "step": 1683 | |
| }, | |
| { | |
| "epoch": 0.45760869565217394, | |
| "grad_norm": 1.4264166355133057, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0987, | |
| "step": 1684 | |
| }, | |
| { | |
| "epoch": 0.4578804347826087, | |
| "grad_norm": 1.4009737968444824, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2024, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.45815217391304347, | |
| "grad_norm": 1.2461766004562378, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1414, | |
| "step": 1686 | |
| }, | |
| { | |
| "epoch": 0.45842391304347824, | |
| "grad_norm": 1.1765496730804443, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7237, | |
| "step": 1687 | |
| }, | |
| { | |
| "epoch": 0.45869565217391306, | |
| "grad_norm": 1.2187762260437012, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5997, | |
| "step": 1688 | |
| }, | |
| { | |
| "epoch": 0.4589673913043478, | |
| "grad_norm": 2.117086410522461, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5497, | |
| "step": 1689 | |
| }, | |
| { | |
| "epoch": 0.4592391304347826, | |
| "grad_norm": 1.2980027198791504, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6001, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.4595108695652174, | |
| "grad_norm": 1.1031181812286377, | |
| "learning_rate": 3e-05, | |
| "loss": 2.814, | |
| "step": 1691 | |
| }, | |
| { | |
| "epoch": 0.4597826086956522, | |
| "grad_norm": 1.201127052307129, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4627, | |
| "step": 1692 | |
| }, | |
| { | |
| "epoch": 0.46005434782608695, | |
| "grad_norm": 1.9975850582122803, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8157, | |
| "step": 1693 | |
| }, | |
| { | |
| "epoch": 0.4603260869565217, | |
| "grad_norm": 1.4874250888824463, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5901, | |
| "step": 1694 | |
| }, | |
| { | |
| "epoch": 0.46059782608695654, | |
| "grad_norm": 1.9747381210327148, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8642, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 0.4608695652173913, | |
| "grad_norm": 1.405049443244934, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3367, | |
| "step": 1696 | |
| }, | |
| { | |
| "epoch": 0.4611413043478261, | |
| "grad_norm": 1.4599591493606567, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6499, | |
| "step": 1697 | |
| }, | |
| { | |
| "epoch": 0.46141304347826084, | |
| "grad_norm": 1.6565916538238525, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5368, | |
| "step": 1698 | |
| }, | |
| { | |
| "epoch": 0.46168478260869567, | |
| "grad_norm": 1.327285647392273, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7457, | |
| "step": 1699 | |
| }, | |
| { | |
| "epoch": 0.46195652173913043, | |
| "grad_norm": 1.317681074142456, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3931, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.4622282608695652, | |
| "grad_norm": 1.7210628986358643, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0996, | |
| "step": 1701 | |
| }, | |
| { | |
| "epoch": 0.4625, | |
| "grad_norm": 1.4575893878936768, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5176, | |
| "step": 1702 | |
| }, | |
| { | |
| "epoch": 0.4627717391304348, | |
| "grad_norm": 0.8817341327667236, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9138, | |
| "step": 1703 | |
| }, | |
| { | |
| "epoch": 0.46304347826086956, | |
| "grad_norm": 1.6268318891525269, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3346, | |
| "step": 1704 | |
| }, | |
| { | |
| "epoch": 0.4633152173913043, | |
| "grad_norm": 1.281497836112976, | |
| "learning_rate": 3e-05, | |
| "loss": 2.776, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.46358695652173915, | |
| "grad_norm": 1.6672241687774658, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5205, | |
| "step": 1706 | |
| }, | |
| { | |
| "epoch": 0.4638586956521739, | |
| "grad_norm": 1.4224839210510254, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7986, | |
| "step": 1707 | |
| }, | |
| { | |
| "epoch": 0.4641304347826087, | |
| "grad_norm": 1.1987775564193726, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2364, | |
| "step": 1708 | |
| }, | |
| { | |
| "epoch": 0.4644021739130435, | |
| "grad_norm": 1.4384889602661133, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6339, | |
| "step": 1709 | |
| }, | |
| { | |
| "epoch": 0.46467391304347827, | |
| "grad_norm": 1.1359198093414307, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2827, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.46494565217391304, | |
| "grad_norm": 1.3200854063034058, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2209, | |
| "step": 1711 | |
| }, | |
| { | |
| "epoch": 0.4652173913043478, | |
| "grad_norm": 1.613688588142395, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7578, | |
| "step": 1712 | |
| }, | |
| { | |
| "epoch": 0.4654891304347826, | |
| "grad_norm": 1.7866336107254028, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9267, | |
| "step": 1713 | |
| }, | |
| { | |
| "epoch": 0.4657608695652174, | |
| "grad_norm": 2.0430612564086914, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6663, | |
| "step": 1714 | |
| }, | |
| { | |
| "epoch": 0.46603260869565216, | |
| "grad_norm": 1.848181128501892, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3196, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 0.46630434782608693, | |
| "grad_norm": 1.4686863422393799, | |
| "learning_rate": 3e-05, | |
| "loss": 3.352, | |
| "step": 1716 | |
| }, | |
| { | |
| "epoch": 0.46657608695652175, | |
| "grad_norm": 1.1621712446212769, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1954, | |
| "step": 1717 | |
| }, | |
| { | |
| "epoch": 0.4668478260869565, | |
| "grad_norm": 1.2042642831802368, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7059, | |
| "step": 1718 | |
| }, | |
| { | |
| "epoch": 0.4671195652173913, | |
| "grad_norm": 2.4052507877349854, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6323, | |
| "step": 1719 | |
| }, | |
| { | |
| "epoch": 0.4673913043478261, | |
| "grad_norm": 0.8568060994148254, | |
| "learning_rate": 3e-05, | |
| "loss": 2.5241, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.4676630434782609, | |
| "grad_norm": 0.9677852988243103, | |
| "learning_rate": 3e-05, | |
| "loss": 2.5949, | |
| "step": 1721 | |
| }, | |
| { | |
| "epoch": 0.46793478260869564, | |
| "grad_norm": 1.2894710302352905, | |
| "learning_rate": 3e-05, | |
| "loss": 2.989, | |
| "step": 1722 | |
| }, | |
| { | |
| "epoch": 0.4682065217391304, | |
| "grad_norm": 1.5006341934204102, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6919, | |
| "step": 1723 | |
| }, | |
| { | |
| "epoch": 0.46847826086956523, | |
| "grad_norm": 1.2537362575531006, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3816, | |
| "step": 1724 | |
| }, | |
| { | |
| "epoch": 0.46875, | |
| "grad_norm": 1.333892822265625, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3992, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.46902173913043477, | |
| "grad_norm": 1.1863936185836792, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1168, | |
| "step": 1726 | |
| }, | |
| { | |
| "epoch": 0.4692934782608696, | |
| "grad_norm": 1.2577567100524902, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4444, | |
| "step": 1727 | |
| }, | |
| { | |
| "epoch": 0.46956521739130436, | |
| "grad_norm": 1.2792398929595947, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8464, | |
| "step": 1728 | |
| }, | |
| { | |
| "epoch": 0.4698369565217391, | |
| "grad_norm": 2.50295090675354, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2523, | |
| "step": 1729 | |
| }, | |
| { | |
| "epoch": 0.4701086956521739, | |
| "grad_norm": 1.842974066734314, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3193, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.4703804347826087, | |
| "grad_norm": 1.1899735927581787, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4586, | |
| "step": 1731 | |
| }, | |
| { | |
| "epoch": 0.4706521739130435, | |
| "grad_norm": 1.7522422075271606, | |
| "learning_rate": 3e-05, | |
| "loss": 3.44, | |
| "step": 1732 | |
| }, | |
| { | |
| "epoch": 0.47092391304347825, | |
| "grad_norm": 2.0558695793151855, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3303, | |
| "step": 1733 | |
| }, | |
| { | |
| "epoch": 0.47119565217391307, | |
| "grad_norm": 1.5954774618148804, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3366, | |
| "step": 1734 | |
| }, | |
| { | |
| "epoch": 0.47146739130434784, | |
| "grad_norm": 1.4976911544799805, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7435, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 0.4717391304347826, | |
| "grad_norm": 1.4843627214431763, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0997, | |
| "step": 1736 | |
| }, | |
| { | |
| "epoch": 0.4720108695652174, | |
| "grad_norm": 1.7294570207595825, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4683, | |
| "step": 1737 | |
| }, | |
| { | |
| "epoch": 0.4722826086956522, | |
| "grad_norm": 1.7918407917022705, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5634, | |
| "step": 1738 | |
| }, | |
| { | |
| "epoch": 0.47255434782608696, | |
| "grad_norm": 1.458388090133667, | |
| "learning_rate": 3e-05, | |
| "loss": 3.724, | |
| "step": 1739 | |
| }, | |
| { | |
| "epoch": 0.47282608695652173, | |
| "grad_norm": 1.7183854579925537, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7768, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.4730978260869565, | |
| "grad_norm": 1.6272084712982178, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1361, | |
| "step": 1741 | |
| }, | |
| { | |
| "epoch": 0.4733695652173913, | |
| "grad_norm": 1.2989143133163452, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1645, | |
| "step": 1742 | |
| }, | |
| { | |
| "epoch": 0.4736413043478261, | |
| "grad_norm": 1.1352357864379883, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9178, | |
| "step": 1743 | |
| }, | |
| { | |
| "epoch": 0.47391304347826085, | |
| "grad_norm": 1.6457839012145996, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8608, | |
| "step": 1744 | |
| }, | |
| { | |
| "epoch": 0.4741847826086957, | |
| "grad_norm": 1.3702744245529175, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8434, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 0.47445652173913044, | |
| "grad_norm": 1.2821577787399292, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0827, | |
| "step": 1746 | |
| }, | |
| { | |
| "epoch": 0.4747282608695652, | |
| "grad_norm": 1.4484645128250122, | |
| "learning_rate": 3e-05, | |
| "loss": 3.222, | |
| "step": 1747 | |
| }, | |
| { | |
| "epoch": 0.475, | |
| "grad_norm": 1.7668582201004028, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2441, | |
| "step": 1748 | |
| }, | |
| { | |
| "epoch": 0.4752717391304348, | |
| "grad_norm": 1.764919400215149, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7584, | |
| "step": 1749 | |
| }, | |
| { | |
| "epoch": 0.47554347826086957, | |
| "grad_norm": 1.640993356704712, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3807, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.47581521739130433, | |
| "grad_norm": 1.6005651950836182, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2452, | |
| "step": 1751 | |
| }, | |
| { | |
| "epoch": 0.47608695652173916, | |
| "grad_norm": 1.421935796737671, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0033, | |
| "step": 1752 | |
| }, | |
| { | |
| "epoch": 0.4763586956521739, | |
| "grad_norm": 1.7603181600570679, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2585, | |
| "step": 1753 | |
| }, | |
| { | |
| "epoch": 0.4766304347826087, | |
| "grad_norm": 1.378020167350769, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0198, | |
| "step": 1754 | |
| }, | |
| { | |
| "epoch": 0.47690217391304346, | |
| "grad_norm": 1.5630027055740356, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0409, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 0.4771739130434783, | |
| "grad_norm": 1.1110471487045288, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2521, | |
| "step": 1756 | |
| }, | |
| { | |
| "epoch": 0.47744565217391305, | |
| "grad_norm": 1.403874397277832, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9834, | |
| "step": 1757 | |
| }, | |
| { | |
| "epoch": 0.4777173913043478, | |
| "grad_norm": 1.2942219972610474, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1724, | |
| "step": 1758 | |
| }, | |
| { | |
| "epoch": 0.4779891304347826, | |
| "grad_norm": 1.152891993522644, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2869, | |
| "step": 1759 | |
| }, | |
| { | |
| "epoch": 0.4782608695652174, | |
| "grad_norm": 1.1676504611968994, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1582, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.4785326086956522, | |
| "grad_norm": 1.151185154914856, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3322, | |
| "step": 1761 | |
| }, | |
| { | |
| "epoch": 0.47880434782608694, | |
| "grad_norm": 1.4192887544631958, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3659, | |
| "step": 1762 | |
| }, | |
| { | |
| "epoch": 0.47907608695652176, | |
| "grad_norm": 1.5391749143600464, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2556, | |
| "step": 1763 | |
| }, | |
| { | |
| "epoch": 0.47934782608695653, | |
| "grad_norm": 1.4667121171951294, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8051, | |
| "step": 1764 | |
| }, | |
| { | |
| "epoch": 0.4796195652173913, | |
| "grad_norm": 1.3416023254394531, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4739, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 0.47989130434782606, | |
| "grad_norm": 1.7379714250564575, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2991, | |
| "step": 1766 | |
| }, | |
| { | |
| "epoch": 0.4801630434782609, | |
| "grad_norm": 1.6660994291305542, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1885, | |
| "step": 1767 | |
| }, | |
| { | |
| "epoch": 0.48043478260869565, | |
| "grad_norm": 1.9886304140090942, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3295, | |
| "step": 1768 | |
| }, | |
| { | |
| "epoch": 0.4807065217391304, | |
| "grad_norm": 2.345217227935791, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6796, | |
| "step": 1769 | |
| }, | |
| { | |
| "epoch": 0.48097826086956524, | |
| "grad_norm": 1.6064239740371704, | |
| "learning_rate": 3e-05, | |
| "loss": 2.977, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.48125, | |
| "grad_norm": 1.7928065061569214, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5848, | |
| "step": 1771 | |
| }, | |
| { | |
| "epoch": 0.4815217391304348, | |
| "grad_norm": 1.5981751680374146, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1049, | |
| "step": 1772 | |
| }, | |
| { | |
| "epoch": 0.48179347826086955, | |
| "grad_norm": 1.6446112394332886, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9694, | |
| "step": 1773 | |
| }, | |
| { | |
| "epoch": 0.48206521739130437, | |
| "grad_norm": 1.8991427421569824, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3139, | |
| "step": 1774 | |
| }, | |
| { | |
| "epoch": 0.48233695652173914, | |
| "grad_norm": 2.3716299533843994, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9361, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.4826086956521739, | |
| "grad_norm": 1.8573355674743652, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1394, | |
| "step": 1776 | |
| }, | |
| { | |
| "epoch": 0.48288043478260867, | |
| "grad_norm": 1.8625982999801636, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8017, | |
| "step": 1777 | |
| }, | |
| { | |
| "epoch": 0.4831521739130435, | |
| "grad_norm": 2.2275497913360596, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4565, | |
| "step": 1778 | |
| }, | |
| { | |
| "epoch": 0.48342391304347826, | |
| "grad_norm": 1.9856013059616089, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5517, | |
| "step": 1779 | |
| }, | |
| { | |
| "epoch": 0.483695652173913, | |
| "grad_norm": 1.3510781526565552, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1976, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.48396739130434785, | |
| "grad_norm": 1.0482627153396606, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0025, | |
| "step": 1781 | |
| }, | |
| { | |
| "epoch": 0.4842391304347826, | |
| "grad_norm": 1.3749173879623413, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2774, | |
| "step": 1782 | |
| }, | |
| { | |
| "epoch": 0.4845108695652174, | |
| "grad_norm": 1.405247449874878, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4099, | |
| "step": 1783 | |
| }, | |
| { | |
| "epoch": 0.48478260869565215, | |
| "grad_norm": 1.2630794048309326, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7092, | |
| "step": 1784 | |
| }, | |
| { | |
| "epoch": 0.485054347826087, | |
| "grad_norm": 1.1290115118026733, | |
| "learning_rate": 3e-05, | |
| "loss": 3.432, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 0.48532608695652174, | |
| "grad_norm": 1.2523903846740723, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1431, | |
| "step": 1786 | |
| }, | |
| { | |
| "epoch": 0.4855978260869565, | |
| "grad_norm": 1.218867540359497, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4881, | |
| "step": 1787 | |
| }, | |
| { | |
| "epoch": 0.48586956521739133, | |
| "grad_norm": 1.6144737005233765, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0674, | |
| "step": 1788 | |
| }, | |
| { | |
| "epoch": 0.4861413043478261, | |
| "grad_norm": 1.4506441354751587, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5498, | |
| "step": 1789 | |
| }, | |
| { | |
| "epoch": 0.48641304347826086, | |
| "grad_norm": 1.5907642841339111, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1341, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.48668478260869563, | |
| "grad_norm": 1.18953275680542, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0503, | |
| "step": 1791 | |
| }, | |
| { | |
| "epoch": 0.48695652173913045, | |
| "grad_norm": 1.2186987400054932, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7508, | |
| "step": 1792 | |
| }, | |
| { | |
| "epoch": 0.4872282608695652, | |
| "grad_norm": 1.111574649810791, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8275, | |
| "step": 1793 | |
| }, | |
| { | |
| "epoch": 0.4875, | |
| "grad_norm": 1.275896430015564, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4576, | |
| "step": 1794 | |
| }, | |
| { | |
| "epoch": 0.48777173913043476, | |
| "grad_norm": 1.182626485824585, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9678, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 0.4880434782608696, | |
| "grad_norm": 1.098120927810669, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1544, | |
| "step": 1796 | |
| }, | |
| { | |
| "epoch": 0.48831521739130435, | |
| "grad_norm": 0.9524686932563782, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0739, | |
| "step": 1797 | |
| }, | |
| { | |
| "epoch": 0.4885869565217391, | |
| "grad_norm": 1.302868366241455, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3036, | |
| "step": 1798 | |
| }, | |
| { | |
| "epoch": 0.48885869565217394, | |
| "grad_norm": 1.124603271484375, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2132, | |
| "step": 1799 | |
| }, | |
| { | |
| "epoch": 0.4891304347826087, | |
| "grad_norm": 1.3126977682113647, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6832, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.48940217391304347, | |
| "grad_norm": 1.436482310295105, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9607, | |
| "step": 1801 | |
| }, | |
| { | |
| "epoch": 0.48967391304347824, | |
| "grad_norm": 1.6329675912857056, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6751, | |
| "step": 1802 | |
| }, | |
| { | |
| "epoch": 0.48994565217391306, | |
| "grad_norm": 1.2873584032058716, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3793, | |
| "step": 1803 | |
| }, | |
| { | |
| "epoch": 0.4902173913043478, | |
| "grad_norm": 1.0539244413375854, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0761, | |
| "step": 1804 | |
| }, | |
| { | |
| "epoch": 0.4904891304347826, | |
| "grad_norm": 1.7557803392410278, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0488, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 0.4907608695652174, | |
| "grad_norm": 1.8407940864562988, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1567, | |
| "step": 1806 | |
| }, | |
| { | |
| "epoch": 0.4910326086956522, | |
| "grad_norm": 1.3472580909729004, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2331, | |
| "step": 1807 | |
| }, | |
| { | |
| "epoch": 0.49130434782608695, | |
| "grad_norm": 1.093360424041748, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1883, | |
| "step": 1808 | |
| }, | |
| { | |
| "epoch": 0.4915760869565217, | |
| "grad_norm": 1.650109887123108, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7661, | |
| "step": 1809 | |
| }, | |
| { | |
| "epoch": 0.49184782608695654, | |
| "grad_norm": 1.454372525215149, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1205, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.4921195652173913, | |
| "grad_norm": 1.4370765686035156, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8767, | |
| "step": 1811 | |
| }, | |
| { | |
| "epoch": 0.4923913043478261, | |
| "grad_norm": 2.6512532234191895, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0366, | |
| "step": 1812 | |
| }, | |
| { | |
| "epoch": 0.49266304347826084, | |
| "grad_norm": 2.1347718238830566, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5459, | |
| "step": 1813 | |
| }, | |
| { | |
| "epoch": 0.49293478260869567, | |
| "grad_norm": 1.5831135511398315, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4828, | |
| "step": 1814 | |
| }, | |
| { | |
| "epoch": 0.49320652173913043, | |
| "grad_norm": 1.0648609399795532, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0868, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 0.4934782608695652, | |
| "grad_norm": 1.467032551765442, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6537, | |
| "step": 1816 | |
| }, | |
| { | |
| "epoch": 0.49375, | |
| "grad_norm": 1.2471240758895874, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2671, | |
| "step": 1817 | |
| }, | |
| { | |
| "epoch": 0.4940217391304348, | |
| "grad_norm": 0.9745358228683472, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4916, | |
| "step": 1818 | |
| }, | |
| { | |
| "epoch": 0.49429347826086956, | |
| "grad_norm": 1.3291053771972656, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7441, | |
| "step": 1819 | |
| }, | |
| { | |
| "epoch": 0.4945652173913043, | |
| "grad_norm": 1.3420668840408325, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3106, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.49483695652173915, | |
| "grad_norm": 1.0097370147705078, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1215, | |
| "step": 1821 | |
| }, | |
| { | |
| "epoch": 0.4951086956521739, | |
| "grad_norm": 1.1169931888580322, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9595, | |
| "step": 1822 | |
| }, | |
| { | |
| "epoch": 0.4953804347826087, | |
| "grad_norm": 1.1370187997817993, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5224, | |
| "step": 1823 | |
| }, | |
| { | |
| "epoch": 0.4956521739130435, | |
| "grad_norm": 1.5470448732376099, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9269, | |
| "step": 1824 | |
| }, | |
| { | |
| "epoch": 0.49592391304347827, | |
| "grad_norm": 1.000895380973816, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9649, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.49619565217391304, | |
| "grad_norm": 1.2407017946243286, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5568, | |
| "step": 1826 | |
| }, | |
| { | |
| "epoch": 0.4964673913043478, | |
| "grad_norm": 1.226485013961792, | |
| "learning_rate": 3e-05, | |
| "loss": 2.5796, | |
| "step": 1827 | |
| }, | |
| { | |
| "epoch": 0.4967391304347826, | |
| "grad_norm": 1.1970787048339844, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8772, | |
| "step": 1828 | |
| }, | |
| { | |
| "epoch": 0.4970108695652174, | |
| "grad_norm": 1.2727575302124023, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6501, | |
| "step": 1829 | |
| }, | |
| { | |
| "epoch": 0.49728260869565216, | |
| "grad_norm": 1.2288799285888672, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0078, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.49755434782608693, | |
| "grad_norm": 1.688232660293579, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2542, | |
| "step": 1831 | |
| }, | |
| { | |
| "epoch": 0.49782608695652175, | |
| "grad_norm": 1.6907511949539185, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6193, | |
| "step": 1832 | |
| }, | |
| { | |
| "epoch": 0.4980978260869565, | |
| "grad_norm": 1.1391464471817017, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3688, | |
| "step": 1833 | |
| }, | |
| { | |
| "epoch": 0.4983695652173913, | |
| "grad_norm": 1.2691822052001953, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2763, | |
| "step": 1834 | |
| }, | |
| { | |
| "epoch": 0.4986413043478261, | |
| "grad_norm": 1.1176376342773438, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0038, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 0.4989130434782609, | |
| "grad_norm": 0.9173082709312439, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7976, | |
| "step": 1836 | |
| }, | |
| { | |
| "epoch": 0.49918478260869564, | |
| "grad_norm": 1.0989121198654175, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0254, | |
| "step": 1837 | |
| }, | |
| { | |
| "epoch": 0.4994565217391304, | |
| "grad_norm": 1.2885278463363647, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2596, | |
| "step": 1838 | |
| }, | |
| { | |
| "epoch": 0.49972826086956523, | |
| "grad_norm": 1.1294320821762085, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0833, | |
| "step": 1839 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.9139266014099121, | |
| "learning_rate": 3e-05, | |
| "loss": 3.043, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.5002717391304348, | |
| "grad_norm": 0.9999741315841675, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6746, | |
| "step": 1841 | |
| }, | |
| { | |
| "epoch": 0.5005434782608695, | |
| "grad_norm": 2.3439218997955322, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0552, | |
| "step": 1842 | |
| }, | |
| { | |
| "epoch": 0.5008152173913043, | |
| "grad_norm": 1.3185632228851318, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4046, | |
| "step": 1843 | |
| }, | |
| { | |
| "epoch": 0.5010869565217392, | |
| "grad_norm": 1.4254107475280762, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5323, | |
| "step": 1844 | |
| }, | |
| { | |
| "epoch": 0.501358695652174, | |
| "grad_norm": 1.0706696510314941, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0672, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 0.5016304347826087, | |
| "grad_norm": 1.316158413887024, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3709, | |
| "step": 1846 | |
| }, | |
| { | |
| "epoch": 0.5019021739130435, | |
| "grad_norm": 1.2480392456054688, | |
| "learning_rate": 3e-05, | |
| "loss": 3.673, | |
| "step": 1847 | |
| }, | |
| { | |
| "epoch": 0.5021739130434782, | |
| "grad_norm": 1.596869707107544, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3651, | |
| "step": 1848 | |
| }, | |
| { | |
| "epoch": 0.502445652173913, | |
| "grad_norm": 1.6036261320114136, | |
| "learning_rate": 3e-05, | |
| "loss": 3.68, | |
| "step": 1849 | |
| }, | |
| { | |
| "epoch": 0.5027173913043478, | |
| "grad_norm": 1.2420178651809692, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9173, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.5029891304347827, | |
| "grad_norm": 1.588339924812317, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7931, | |
| "step": 1851 | |
| }, | |
| { | |
| "epoch": 0.5032608695652174, | |
| "grad_norm": 1.2808369398117065, | |
| "learning_rate": 3e-05, | |
| "loss": 3.05, | |
| "step": 1852 | |
| }, | |
| { | |
| "epoch": 0.5035326086956522, | |
| "grad_norm": 1.234026551246643, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3401, | |
| "step": 1853 | |
| }, | |
| { | |
| "epoch": 0.503804347826087, | |
| "grad_norm": 1.6384221315383911, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5597, | |
| "step": 1854 | |
| }, | |
| { | |
| "epoch": 0.5040760869565217, | |
| "grad_norm": 1.8604109287261963, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6333, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 0.5043478260869565, | |
| "grad_norm": 1.4895154237747192, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4255, | |
| "step": 1856 | |
| }, | |
| { | |
| "epoch": 0.5046195652173913, | |
| "grad_norm": 1.3410334587097168, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7753, | |
| "step": 1857 | |
| }, | |
| { | |
| "epoch": 0.5048913043478261, | |
| "grad_norm": 1.446807861328125, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1229, | |
| "step": 1858 | |
| }, | |
| { | |
| "epoch": 0.5051630434782609, | |
| "grad_norm": 1.188097596168518, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2846, | |
| "step": 1859 | |
| }, | |
| { | |
| "epoch": 0.5054347826086957, | |
| "grad_norm": 1.2120987176895142, | |
| "learning_rate": 3e-05, | |
| "loss": 3.147, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.5057065217391304, | |
| "grad_norm": 1.5114277601242065, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0952, | |
| "step": 1861 | |
| }, | |
| { | |
| "epoch": 0.5059782608695652, | |
| "grad_norm": 1.360628366470337, | |
| "learning_rate": 3e-05, | |
| "loss": 2.907, | |
| "step": 1862 | |
| }, | |
| { | |
| "epoch": 0.50625, | |
| "grad_norm": 1.4083906412124634, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4222, | |
| "step": 1863 | |
| }, | |
| { | |
| "epoch": 0.5065217391304347, | |
| "grad_norm": 1.245968222618103, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5063, | |
| "step": 1864 | |
| }, | |
| { | |
| "epoch": 0.5067934782608695, | |
| "grad_norm": 1.3542323112487793, | |
| "learning_rate": 3e-05, | |
| "loss": 3.981, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 0.5070652173913044, | |
| "grad_norm": 2.4021403789520264, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9367, | |
| "step": 1866 | |
| }, | |
| { | |
| "epoch": 0.5073369565217392, | |
| "grad_norm": 1.4897315502166748, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7825, | |
| "step": 1867 | |
| }, | |
| { | |
| "epoch": 0.5076086956521739, | |
| "grad_norm": 1.7353469133377075, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5709, | |
| "step": 1868 | |
| }, | |
| { | |
| "epoch": 0.5078804347826087, | |
| "grad_norm": 1.216436505317688, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1314, | |
| "step": 1869 | |
| }, | |
| { | |
| "epoch": 0.5081521739130435, | |
| "grad_norm": 1.2940754890441895, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3858, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.5084239130434782, | |
| "grad_norm": 0.9794735312461853, | |
| "learning_rate": 3e-05, | |
| "loss": 2.823, | |
| "step": 1871 | |
| }, | |
| { | |
| "epoch": 0.508695652173913, | |
| "grad_norm": 1.4511076211929321, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0647, | |
| "step": 1872 | |
| }, | |
| { | |
| "epoch": 0.5089673913043479, | |
| "grad_norm": 1.559597373008728, | |
| "learning_rate": 3e-05, | |
| "loss": 3.093, | |
| "step": 1873 | |
| }, | |
| { | |
| "epoch": 0.5092391304347826, | |
| "grad_norm": 1.7877241373062134, | |
| "learning_rate": 3e-05, | |
| "loss": 3.742, | |
| "step": 1874 | |
| }, | |
| { | |
| "epoch": 0.5095108695652174, | |
| "grad_norm": 1.585822582244873, | |
| "learning_rate": 3e-05, | |
| "loss": 3.631, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 0.5097826086956522, | |
| "grad_norm": 1.284661054611206, | |
| "learning_rate": 3e-05, | |
| "loss": 3.228, | |
| "step": 1876 | |
| }, | |
| { | |
| "epoch": 0.5100543478260869, | |
| "grad_norm": 1.295775294303894, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7922, | |
| "step": 1877 | |
| }, | |
| { | |
| "epoch": 0.5103260869565217, | |
| "grad_norm": 1.2768347263336182, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0369, | |
| "step": 1878 | |
| }, | |
| { | |
| "epoch": 0.5105978260869565, | |
| "grad_norm": 1.4773716926574707, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3407, | |
| "step": 1879 | |
| }, | |
| { | |
| "epoch": 0.5108695652173914, | |
| "grad_norm": 1.2089797258377075, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0556, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.5111413043478261, | |
| "grad_norm": 1.4668608903884888, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4665, | |
| "step": 1881 | |
| }, | |
| { | |
| "epoch": 0.5114130434782609, | |
| "grad_norm": 1.998399257659912, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8042, | |
| "step": 1882 | |
| }, | |
| { | |
| "epoch": 0.5116847826086957, | |
| "grad_norm": 1.448439598083496, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7303, | |
| "step": 1883 | |
| }, | |
| { | |
| "epoch": 0.5119565217391304, | |
| "grad_norm": 0.9362940192222595, | |
| "learning_rate": 3e-05, | |
| "loss": 2.665, | |
| "step": 1884 | |
| }, | |
| { | |
| "epoch": 0.5122282608695652, | |
| "grad_norm": 1.6077910661697388, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4826, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 0.5125, | |
| "grad_norm": 1.5588127374649048, | |
| "learning_rate": 3e-05, | |
| "loss": 2.837, | |
| "step": 1886 | |
| }, | |
| { | |
| "epoch": 0.5127717391304348, | |
| "grad_norm": 1.473461389541626, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2511, | |
| "step": 1887 | |
| }, | |
| { | |
| "epoch": 0.5130434782608696, | |
| "grad_norm": 1.6276698112487793, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5821, | |
| "step": 1888 | |
| }, | |
| { | |
| "epoch": 0.5133152173913044, | |
| "grad_norm": 1.6201423406600952, | |
| "learning_rate": 3e-05, | |
| "loss": 3.195, | |
| "step": 1889 | |
| }, | |
| { | |
| "epoch": 0.5135869565217391, | |
| "grad_norm": 1.6411088705062866, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1111, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.5138586956521739, | |
| "grad_norm": 1.2973185777664185, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6324, | |
| "step": 1891 | |
| }, | |
| { | |
| "epoch": 0.5141304347826087, | |
| "grad_norm": 2.3508846759796143, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7621, | |
| "step": 1892 | |
| }, | |
| { | |
| "epoch": 0.5144021739130434, | |
| "grad_norm": 1.718246340751648, | |
| "learning_rate": 3e-05, | |
| "loss": 3.759, | |
| "step": 1893 | |
| }, | |
| { | |
| "epoch": 0.5146739130434783, | |
| "grad_norm": 1.5278515815734863, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2879, | |
| "step": 1894 | |
| }, | |
| { | |
| "epoch": 0.5149456521739131, | |
| "grad_norm": 1.308929681777954, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1075, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 0.5152173913043478, | |
| "grad_norm": 1.40326988697052, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5211, | |
| "step": 1896 | |
| }, | |
| { | |
| "epoch": 0.5154891304347826, | |
| "grad_norm": 1.5129557847976685, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2549, | |
| "step": 1897 | |
| }, | |
| { | |
| "epoch": 0.5157608695652174, | |
| "grad_norm": 1.1121063232421875, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9461, | |
| "step": 1898 | |
| }, | |
| { | |
| "epoch": 0.5160326086956522, | |
| "grad_norm": 1.4302382469177246, | |
| "learning_rate": 3e-05, | |
| "loss": 3.627, | |
| "step": 1899 | |
| }, | |
| { | |
| "epoch": 0.5163043478260869, | |
| "grad_norm": 1.6184173822402954, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9157, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.5165760869565217, | |
| "grad_norm": 1.1715192794799805, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9425, | |
| "step": 1901 | |
| }, | |
| { | |
| "epoch": 0.5168478260869566, | |
| "grad_norm": 2.065067768096924, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3198, | |
| "step": 1902 | |
| }, | |
| { | |
| "epoch": 0.5171195652173913, | |
| "grad_norm": 1.6942062377929688, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4998, | |
| "step": 1903 | |
| }, | |
| { | |
| "epoch": 0.5173913043478261, | |
| "grad_norm": 1.4067577123641968, | |
| "learning_rate": 3e-05, | |
| "loss": 3.501, | |
| "step": 1904 | |
| }, | |
| { | |
| "epoch": 0.5176630434782609, | |
| "grad_norm": 1.5794352293014526, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1307, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 0.5179347826086956, | |
| "grad_norm": 1.5043370723724365, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8714, | |
| "step": 1906 | |
| }, | |
| { | |
| "epoch": 0.5182065217391304, | |
| "grad_norm": 1.2884905338287354, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4459, | |
| "step": 1907 | |
| }, | |
| { | |
| "epoch": 0.5184782608695652, | |
| "grad_norm": 1.3515671491622925, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5685, | |
| "step": 1908 | |
| }, | |
| { | |
| "epoch": 0.51875, | |
| "grad_norm": 1.2115263938903809, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2234, | |
| "step": 1909 | |
| }, | |
| { | |
| "epoch": 0.5190217391304348, | |
| "grad_norm": 1.3323924541473389, | |
| "learning_rate": 3e-05, | |
| "loss": 3.42, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.5192934782608696, | |
| "grad_norm": 1.2837159633636475, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0186, | |
| "step": 1911 | |
| }, | |
| { | |
| "epoch": 0.5195652173913043, | |
| "grad_norm": 1.1012318134307861, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9993, | |
| "step": 1912 | |
| }, | |
| { | |
| "epoch": 0.5198369565217391, | |
| "grad_norm": 1.253250002861023, | |
| "learning_rate": 3e-05, | |
| "loss": 3.371, | |
| "step": 1913 | |
| }, | |
| { | |
| "epoch": 0.5201086956521739, | |
| "grad_norm": 0.9867514967918396, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7761, | |
| "step": 1914 | |
| }, | |
| { | |
| "epoch": 0.5203804347826086, | |
| "grad_norm": 1.0671703815460205, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5288, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 0.5206521739130435, | |
| "grad_norm": 0.9514213800430298, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7831, | |
| "step": 1916 | |
| }, | |
| { | |
| "epoch": 0.5209239130434783, | |
| "grad_norm": 1.3238492012023926, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1874, | |
| "step": 1917 | |
| }, | |
| { | |
| "epoch": 0.5211956521739131, | |
| "grad_norm": 1.5408774614334106, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1997, | |
| "step": 1918 | |
| }, | |
| { | |
| "epoch": 0.5214673913043478, | |
| "grad_norm": 1.1520451307296753, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1287, | |
| "step": 1919 | |
| }, | |
| { | |
| "epoch": 0.5217391304347826, | |
| "grad_norm": 1.5356744527816772, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5281, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.5220108695652174, | |
| "grad_norm": 1.0727752447128296, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0281, | |
| "step": 1921 | |
| }, | |
| { | |
| "epoch": 0.5222826086956521, | |
| "grad_norm": 1.4737282991409302, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0763, | |
| "step": 1922 | |
| }, | |
| { | |
| "epoch": 0.522554347826087, | |
| "grad_norm": 1.2212142944335938, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6583, | |
| "step": 1923 | |
| }, | |
| { | |
| "epoch": 0.5228260869565218, | |
| "grad_norm": 1.42307710647583, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9908, | |
| "step": 1924 | |
| }, | |
| { | |
| "epoch": 0.5230978260869565, | |
| "grad_norm": 1.2455291748046875, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7686, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 0.5233695652173913, | |
| "grad_norm": 1.1880292892456055, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8165, | |
| "step": 1926 | |
| }, | |
| { | |
| "epoch": 0.5236413043478261, | |
| "grad_norm": 1.329270362854004, | |
| "learning_rate": 3e-05, | |
| "loss": 3.421, | |
| "step": 1927 | |
| }, | |
| { | |
| "epoch": 0.5239130434782608, | |
| "grad_norm": 1.8960901498794556, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0437, | |
| "step": 1928 | |
| }, | |
| { | |
| "epoch": 0.5241847826086956, | |
| "grad_norm": 1.223769187927246, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3202, | |
| "step": 1929 | |
| }, | |
| { | |
| "epoch": 0.5244565217391305, | |
| "grad_norm": 1.5670809745788574, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6805, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.5247282608695653, | |
| "grad_norm": 1.9276920557022095, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4752, | |
| "step": 1931 | |
| }, | |
| { | |
| "epoch": 0.525, | |
| "grad_norm": 1.7917790412902832, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4745, | |
| "step": 1932 | |
| }, | |
| { | |
| "epoch": 0.5252717391304348, | |
| "grad_norm": 1.1239231824874878, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9622, | |
| "step": 1933 | |
| }, | |
| { | |
| "epoch": 0.5255434782608696, | |
| "grad_norm": 1.2177050113677979, | |
| "learning_rate": 3e-05, | |
| "loss": 2.691, | |
| "step": 1934 | |
| }, | |
| { | |
| "epoch": 0.5258152173913043, | |
| "grad_norm": 1.1526474952697754, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7313, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 0.5260869565217391, | |
| "grad_norm": 1.3403104543685913, | |
| "learning_rate": 3e-05, | |
| "loss": 3.215, | |
| "step": 1936 | |
| }, | |
| { | |
| "epoch": 0.5263586956521739, | |
| "grad_norm": 1.1823958158493042, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0382, | |
| "step": 1937 | |
| }, | |
| { | |
| "epoch": 0.5266304347826087, | |
| "grad_norm": 1.2578941583633423, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9962, | |
| "step": 1938 | |
| }, | |
| { | |
| "epoch": 0.5269021739130435, | |
| "grad_norm": 1.2342561483383179, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6362, | |
| "step": 1939 | |
| }, | |
| { | |
| "epoch": 0.5271739130434783, | |
| "grad_norm": 1.0608114004135132, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2318, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.527445652173913, | |
| "grad_norm": 1.2502753734588623, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1968, | |
| "step": 1941 | |
| }, | |
| { | |
| "epoch": 0.5277173913043478, | |
| "grad_norm": 0.9138715267181396, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0353, | |
| "step": 1942 | |
| }, | |
| { | |
| "epoch": 0.5279891304347826, | |
| "grad_norm": 1.257987380027771, | |
| "learning_rate": 3e-05, | |
| "loss": 2.5984, | |
| "step": 1943 | |
| }, | |
| { | |
| "epoch": 0.5282608695652173, | |
| "grad_norm": 1.5247408151626587, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4343, | |
| "step": 1944 | |
| }, | |
| { | |
| "epoch": 0.5285326086956522, | |
| "grad_norm": 1.1517291069030762, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7655, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 0.528804347826087, | |
| "grad_norm": 1.5516606569290161, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9976, | |
| "step": 1946 | |
| }, | |
| { | |
| "epoch": 0.5290760869565218, | |
| "grad_norm": 1.6808713674545288, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6858, | |
| "step": 1947 | |
| }, | |
| { | |
| "epoch": 0.5293478260869565, | |
| "grad_norm": 1.0476493835449219, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8933, | |
| "step": 1948 | |
| }, | |
| { | |
| "epoch": 0.5296195652173913, | |
| "grad_norm": 1.7297437191009521, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1754, | |
| "step": 1949 | |
| }, | |
| { | |
| "epoch": 0.529891304347826, | |
| "grad_norm": 1.4908910989761353, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9229, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.5301630434782608, | |
| "grad_norm": 1.3128184080123901, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5215, | |
| "step": 1951 | |
| }, | |
| { | |
| "epoch": 0.5304347826086957, | |
| "grad_norm": 1.7025582790374756, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5113, | |
| "step": 1952 | |
| }, | |
| { | |
| "epoch": 0.5307065217391305, | |
| "grad_norm": 1.4770921468734741, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0171, | |
| "step": 1953 | |
| }, | |
| { | |
| "epoch": 0.5309782608695652, | |
| "grad_norm": 1.6477012634277344, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5914, | |
| "step": 1954 | |
| }, | |
| { | |
| "epoch": 0.53125, | |
| "grad_norm": 1.5680129528045654, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6162, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 0.5315217391304348, | |
| "grad_norm": 2.2198116779327393, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2198, | |
| "step": 1956 | |
| }, | |
| { | |
| "epoch": 0.5317934782608695, | |
| "grad_norm": 1.9690414667129517, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0914, | |
| "step": 1957 | |
| }, | |
| { | |
| "epoch": 0.5320652173913043, | |
| "grad_norm": 1.4856075048446655, | |
| "learning_rate": 3e-05, | |
| "loss": 3.206, | |
| "step": 1958 | |
| }, | |
| { | |
| "epoch": 0.5323369565217392, | |
| "grad_norm": 1.1297136545181274, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8479, | |
| "step": 1959 | |
| }, | |
| { | |
| "epoch": 0.532608695652174, | |
| "grad_norm": 1.859328269958496, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3152, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.5328804347826087, | |
| "grad_norm": 1.939894199371338, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2557, | |
| "step": 1961 | |
| }, | |
| { | |
| "epoch": 0.5331521739130435, | |
| "grad_norm": 2.148735523223877, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6686, | |
| "step": 1962 | |
| }, | |
| { | |
| "epoch": 0.5334239130434782, | |
| "grad_norm": 1.4653475284576416, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8618, | |
| "step": 1963 | |
| }, | |
| { | |
| "epoch": 0.533695652173913, | |
| "grad_norm": 2.1480512619018555, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4809, | |
| "step": 1964 | |
| }, | |
| { | |
| "epoch": 0.5339673913043478, | |
| "grad_norm": 2.2585768699645996, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1685, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 0.5342391304347827, | |
| "grad_norm": 2.143669605255127, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8206, | |
| "step": 1966 | |
| }, | |
| { | |
| "epoch": 0.5345108695652174, | |
| "grad_norm": 1.5735530853271484, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5483, | |
| "step": 1967 | |
| }, | |
| { | |
| "epoch": 0.5347826086956522, | |
| "grad_norm": 1.5427213907241821, | |
| "learning_rate": 3e-05, | |
| "loss": 3.469, | |
| "step": 1968 | |
| }, | |
| { | |
| "epoch": 0.535054347826087, | |
| "grad_norm": 1.7961771488189697, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2229, | |
| "step": 1969 | |
| }, | |
| { | |
| "epoch": 0.5353260869565217, | |
| "grad_norm": 1.5997658967971802, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6575, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.5355978260869565, | |
| "grad_norm": 1.2979601621627808, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7012, | |
| "step": 1971 | |
| }, | |
| { | |
| "epoch": 0.5358695652173913, | |
| "grad_norm": 1.5162733793258667, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1471, | |
| "step": 1972 | |
| }, | |
| { | |
| "epoch": 0.5361413043478261, | |
| "grad_norm": 1.5043525695800781, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8777, | |
| "step": 1973 | |
| }, | |
| { | |
| "epoch": 0.5364130434782609, | |
| "grad_norm": 1.1250759363174438, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9937, | |
| "step": 1974 | |
| }, | |
| { | |
| "epoch": 0.5366847826086957, | |
| "grad_norm": 1.2006231546401978, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5737, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 0.5369565217391304, | |
| "grad_norm": 0.9950990080833435, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9763, | |
| "step": 1976 | |
| }, | |
| { | |
| "epoch": 0.5372282608695652, | |
| "grad_norm": 1.2111644744873047, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5871, | |
| "step": 1977 | |
| }, | |
| { | |
| "epoch": 0.5375, | |
| "grad_norm": 1.4334638118743896, | |
| "learning_rate": 3e-05, | |
| "loss": 3.645, | |
| "step": 1978 | |
| }, | |
| { | |
| "epoch": 0.5377717391304347, | |
| "grad_norm": 1.376451849937439, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5722, | |
| "step": 1979 | |
| }, | |
| { | |
| "epoch": 0.5380434782608695, | |
| "grad_norm": 1.1852439641952515, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2733, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.5383152173913044, | |
| "grad_norm": 1.208321452140808, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4223, | |
| "step": 1981 | |
| }, | |
| { | |
| "epoch": 0.5385869565217392, | |
| "grad_norm": 1.2566808462142944, | |
| "learning_rate": 3e-05, | |
| "loss": 3.227, | |
| "step": 1982 | |
| }, | |
| { | |
| "epoch": 0.5388586956521739, | |
| "grad_norm": 1.1662538051605225, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9731, | |
| "step": 1983 | |
| }, | |
| { | |
| "epoch": 0.5391304347826087, | |
| "grad_norm": 0.9991174340248108, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2856, | |
| "step": 1984 | |
| }, | |
| { | |
| "epoch": 0.5394021739130435, | |
| "grad_norm": 1.6792123317718506, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0315, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 0.5396739130434782, | |
| "grad_norm": 1.3554564714431763, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7884, | |
| "step": 1986 | |
| }, | |
| { | |
| "epoch": 0.539945652173913, | |
| "grad_norm": 2.0495409965515137, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0834, | |
| "step": 1987 | |
| }, | |
| { | |
| "epoch": 0.5402173913043479, | |
| "grad_norm": 1.3076777458190918, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9288, | |
| "step": 1988 | |
| }, | |
| { | |
| "epoch": 0.5404891304347826, | |
| "grad_norm": 1.2214155197143555, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9785, | |
| "step": 1989 | |
| }, | |
| { | |
| "epoch": 0.5407608695652174, | |
| "grad_norm": 1.6654658317565918, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6795, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.5410326086956522, | |
| "grad_norm": 1.224678874015808, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9736, | |
| "step": 1991 | |
| }, | |
| { | |
| "epoch": 0.5413043478260869, | |
| "grad_norm": 1.128450870513916, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0371, | |
| "step": 1992 | |
| }, | |
| { | |
| "epoch": 0.5415760869565217, | |
| "grad_norm": 1.1452875137329102, | |
| "learning_rate": 3e-05, | |
| "loss": 2.989, | |
| "step": 1993 | |
| }, | |
| { | |
| "epoch": 0.5418478260869565, | |
| "grad_norm": 1.2154463529586792, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9404, | |
| "step": 1994 | |
| }, | |
| { | |
| "epoch": 0.5421195652173914, | |
| "grad_norm": 1.1099878549575806, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1758, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 0.5423913043478261, | |
| "grad_norm": 0.9395751953125, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9722, | |
| "step": 1996 | |
| }, | |
| { | |
| "epoch": 0.5426630434782609, | |
| "grad_norm": 1.1049753427505493, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0526, | |
| "step": 1997 | |
| }, | |
| { | |
| "epoch": 0.5429347826086957, | |
| "grad_norm": 1.45954430103302, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7409, | |
| "step": 1998 | |
| }, | |
| { | |
| "epoch": 0.5432065217391304, | |
| "grad_norm": 1.2864134311676025, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4088, | |
| "step": 1999 | |
| }, | |
| { | |
| "epoch": 0.5434782608695652, | |
| "grad_norm": 1.2165127992630005, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2604, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.54375, | |
| "grad_norm": 1.272796869277954, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6574, | |
| "step": 2001 | |
| }, | |
| { | |
| "epoch": 0.5440217391304348, | |
| "grad_norm": 1.3245208263397217, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1406, | |
| "step": 2002 | |
| }, | |
| { | |
| "epoch": 0.5442934782608696, | |
| "grad_norm": 1.4110616445541382, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8426, | |
| "step": 2003 | |
| }, | |
| { | |
| "epoch": 0.5445652173913044, | |
| "grad_norm": 1.7510126829147339, | |
| "learning_rate": 3e-05, | |
| "loss": 3.926, | |
| "step": 2004 | |
| }, | |
| { | |
| "epoch": 0.5448369565217391, | |
| "grad_norm": 1.226434588432312, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5155, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 0.5451086956521739, | |
| "grad_norm": 1.5343637466430664, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7504, | |
| "step": 2006 | |
| }, | |
| { | |
| "epoch": 0.5453804347826087, | |
| "grad_norm": 1.3316307067871094, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0848, | |
| "step": 2007 | |
| }, | |
| { | |
| "epoch": 0.5456521739130434, | |
| "grad_norm": 1.2327113151550293, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0306, | |
| "step": 2008 | |
| }, | |
| { | |
| "epoch": 0.5459239130434783, | |
| "grad_norm": 1.180682897567749, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0591, | |
| "step": 2009 | |
| }, | |
| { | |
| "epoch": 0.5461956521739131, | |
| "grad_norm": 1.3480994701385498, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3818, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.5464673913043478, | |
| "grad_norm": 1.246237874031067, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3416, | |
| "step": 2011 | |
| }, | |
| { | |
| "epoch": 0.5467391304347826, | |
| "grad_norm": 0.8937397003173828, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0593, | |
| "step": 2012 | |
| }, | |
| { | |
| "epoch": 0.5470108695652174, | |
| "grad_norm": 1.3019452095031738, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2755, | |
| "step": 2013 | |
| }, | |
| { | |
| "epoch": 0.5472826086956522, | |
| "grad_norm": 1.408234715461731, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4283, | |
| "step": 2014 | |
| }, | |
| { | |
| "epoch": 0.5475543478260869, | |
| "grad_norm": 1.5709846019744873, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2933, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 0.5478260869565217, | |
| "grad_norm": 1.6085715293884277, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2472, | |
| "step": 2016 | |
| }, | |
| { | |
| "epoch": 0.5480978260869566, | |
| "grad_norm": 1.2559678554534912, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8599, | |
| "step": 2017 | |
| }, | |
| { | |
| "epoch": 0.5483695652173913, | |
| "grad_norm": 1.272606372833252, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7284, | |
| "step": 2018 | |
| }, | |
| { | |
| "epoch": 0.5486413043478261, | |
| "grad_norm": 1.098386287689209, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0488, | |
| "step": 2019 | |
| }, | |
| { | |
| "epoch": 0.5489130434782609, | |
| "grad_norm": 1.707297444343567, | |
| "learning_rate": 3e-05, | |
| "loss": 3.122, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.5491847826086956, | |
| "grad_norm": 1.7398796081542969, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9982, | |
| "step": 2021 | |
| }, | |
| { | |
| "epoch": 0.5494565217391304, | |
| "grad_norm": 1.1505324840545654, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6328, | |
| "step": 2022 | |
| }, | |
| { | |
| "epoch": 0.5497282608695652, | |
| "grad_norm": 0.978933572769165, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8985, | |
| "step": 2023 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.2565099000930786, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1667, | |
| "step": 2024 | |
| }, | |
| { | |
| "epoch": 0.5502717391304348, | |
| "grad_norm": 1.2417691946029663, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5036, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 0.5505434782608696, | |
| "grad_norm": 1.2185314893722534, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1685, | |
| "step": 2026 | |
| }, | |
| { | |
| "epoch": 0.5508152173913043, | |
| "grad_norm": 1.4619181156158447, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2336, | |
| "step": 2027 | |
| }, | |
| { | |
| "epoch": 0.5510869565217391, | |
| "grad_norm": 1.2455370426177979, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6007, | |
| "step": 2028 | |
| }, | |
| { | |
| "epoch": 0.5513586956521739, | |
| "grad_norm": 1.4226831197738647, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7129, | |
| "step": 2029 | |
| }, | |
| { | |
| "epoch": 0.5516304347826086, | |
| "grad_norm": 1.2961459159851074, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2041, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.5519021739130435, | |
| "grad_norm": 1.259477138519287, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1745, | |
| "step": 2031 | |
| }, | |
| { | |
| "epoch": 0.5521739130434783, | |
| "grad_norm": 1.691286325454712, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5502, | |
| "step": 2032 | |
| }, | |
| { | |
| "epoch": 0.5524456521739131, | |
| "grad_norm": 1.3523718118667603, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4055, | |
| "step": 2033 | |
| }, | |
| { | |
| "epoch": 0.5527173913043478, | |
| "grad_norm": 1.2605832815170288, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7262, | |
| "step": 2034 | |
| }, | |
| { | |
| "epoch": 0.5529891304347826, | |
| "grad_norm": 1.440855860710144, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9385, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 0.5532608695652174, | |
| "grad_norm": 2.026885986328125, | |
| "learning_rate": 3e-05, | |
| "loss": 4.3039, | |
| "step": 2036 | |
| }, | |
| { | |
| "epoch": 0.5535326086956521, | |
| "grad_norm": 1.1887251138687134, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8882, | |
| "step": 2037 | |
| }, | |
| { | |
| "epoch": 0.553804347826087, | |
| "grad_norm": 1.322061538696289, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2703, | |
| "step": 2038 | |
| }, | |
| { | |
| "epoch": 0.5540760869565218, | |
| "grad_norm": 1.3264415264129639, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2199, | |
| "step": 2039 | |
| }, | |
| { | |
| "epoch": 0.5543478260869565, | |
| "grad_norm": 1.6366621255874634, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6402, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.5546195652173913, | |
| "grad_norm": 1.592647910118103, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2358, | |
| "step": 2041 | |
| }, | |
| { | |
| "epoch": 0.5548913043478261, | |
| "grad_norm": 1.4537502527236938, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7269, | |
| "step": 2042 | |
| }, | |
| { | |
| "epoch": 0.5551630434782608, | |
| "grad_norm": 1.3483185768127441, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2456, | |
| "step": 2043 | |
| }, | |
| { | |
| "epoch": 0.5554347826086956, | |
| "grad_norm": 1.271835207939148, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5684, | |
| "step": 2044 | |
| }, | |
| { | |
| "epoch": 0.5557065217391305, | |
| "grad_norm": 1.2505134344100952, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2272, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 0.5559782608695653, | |
| "grad_norm": 1.3669381141662598, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8729, | |
| "step": 2046 | |
| }, | |
| { | |
| "epoch": 0.55625, | |
| "grad_norm": 1.1156485080718994, | |
| "learning_rate": 3e-05, | |
| "loss": 3.003, | |
| "step": 2047 | |
| }, | |
| { | |
| "epoch": 0.5565217391304348, | |
| "grad_norm": 1.458754301071167, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0448, | |
| "step": 2048 | |
| }, | |
| { | |
| "epoch": 0.5567934782608696, | |
| "grad_norm": 1.1323773860931396, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1986, | |
| "step": 2049 | |
| }, | |
| { | |
| "epoch": 0.5570652173913043, | |
| "grad_norm": 1.1786192655563354, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6913, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.5573369565217391, | |
| "grad_norm": 1.2700916528701782, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9963, | |
| "step": 2051 | |
| }, | |
| { | |
| "epoch": 0.5576086956521739, | |
| "grad_norm": 1.21451735496521, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0128, | |
| "step": 2052 | |
| }, | |
| { | |
| "epoch": 0.5578804347826087, | |
| "grad_norm": 1.5701782703399658, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8858, | |
| "step": 2053 | |
| }, | |
| { | |
| "epoch": 0.5581521739130435, | |
| "grad_norm": 1.7219531536102295, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8876, | |
| "step": 2054 | |
| }, | |
| { | |
| "epoch": 0.5584239130434783, | |
| "grad_norm": 1.694474220275879, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1169, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 0.558695652173913, | |
| "grad_norm": 1.684638500213623, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9718, | |
| "step": 2056 | |
| }, | |
| { | |
| "epoch": 0.5589673913043478, | |
| "grad_norm": 1.8228230476379395, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0664, | |
| "step": 2057 | |
| }, | |
| { | |
| "epoch": 0.5592391304347826, | |
| "grad_norm": 1.8484151363372803, | |
| "learning_rate": 3e-05, | |
| "loss": 3.148, | |
| "step": 2058 | |
| }, | |
| { | |
| "epoch": 0.5595108695652173, | |
| "grad_norm": 1.6520676612854004, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6549, | |
| "step": 2059 | |
| }, | |
| { | |
| "epoch": 0.5597826086956522, | |
| "grad_norm": 1.4072345495224, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7737, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.560054347826087, | |
| "grad_norm": 1.459381341934204, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2269, | |
| "step": 2061 | |
| }, | |
| { | |
| "epoch": 0.5603260869565218, | |
| "grad_norm": 1.6240458488464355, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2216, | |
| "step": 2062 | |
| }, | |
| { | |
| "epoch": 0.5605978260869565, | |
| "grad_norm": 1.3474336862564087, | |
| "learning_rate": 3e-05, | |
| "loss": 3.191, | |
| "step": 2063 | |
| }, | |
| { | |
| "epoch": 0.5608695652173913, | |
| "grad_norm": 1.2936657667160034, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1124, | |
| "step": 2064 | |
| }, | |
| { | |
| "epoch": 0.561141304347826, | |
| "grad_norm": 1.4614254236221313, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5676, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 0.5614130434782608, | |
| "grad_norm": 1.4121429920196533, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9918, | |
| "step": 2066 | |
| }, | |
| { | |
| "epoch": 0.5616847826086957, | |
| "grad_norm": 1.8185652494430542, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2873, | |
| "step": 2067 | |
| }, | |
| { | |
| "epoch": 0.5619565217391305, | |
| "grad_norm": 1.7184209823608398, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3629, | |
| "step": 2068 | |
| }, | |
| { | |
| "epoch": 0.5622282608695652, | |
| "grad_norm": 1.2696248292922974, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8962, | |
| "step": 2069 | |
| }, | |
| { | |
| "epoch": 0.5625, | |
| "grad_norm": 1.2504596710205078, | |
| "learning_rate": 3e-05, | |
| "loss": 3.111, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.5627717391304348, | |
| "grad_norm": 1.2336483001708984, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1268, | |
| "step": 2071 | |
| }, | |
| { | |
| "epoch": 0.5630434782608695, | |
| "grad_norm": 1.420301914215088, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0844, | |
| "step": 2072 | |
| }, | |
| { | |
| "epoch": 0.5633152173913043, | |
| "grad_norm": 1.4836974143981934, | |
| "learning_rate": 3e-05, | |
| "loss": 3.305, | |
| "step": 2073 | |
| }, | |
| { | |
| "epoch": 0.5635869565217392, | |
| "grad_norm": 1.4846380949020386, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9409, | |
| "step": 2074 | |
| }, | |
| { | |
| "epoch": 0.563858695652174, | |
| "grad_norm": 1.5037497282028198, | |
| "learning_rate": 3e-05, | |
| "loss": 3.691, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 0.5641304347826087, | |
| "grad_norm": 1.1565651893615723, | |
| "learning_rate": 3e-05, | |
| "loss": 3.678, | |
| "step": 2076 | |
| }, | |
| { | |
| "epoch": 0.5644021739130435, | |
| "grad_norm": 1.3678765296936035, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7943, | |
| "step": 2077 | |
| }, | |
| { | |
| "epoch": 0.5646739130434782, | |
| "grad_norm": 1.6229230165481567, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6795, | |
| "step": 2078 | |
| }, | |
| { | |
| "epoch": 0.564945652173913, | |
| "grad_norm": 1.3019323348999023, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2213, | |
| "step": 2079 | |
| }, | |
| { | |
| "epoch": 0.5652173913043478, | |
| "grad_norm": 1.5816229581832886, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9553, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.5654891304347827, | |
| "grad_norm": 1.5422357320785522, | |
| "learning_rate": 3e-05, | |
| "loss": 4.4725, | |
| "step": 2081 | |
| }, | |
| { | |
| "epoch": 0.5657608695652174, | |
| "grad_norm": 1.4155722856521606, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6369, | |
| "step": 2082 | |
| }, | |
| { | |
| "epoch": 0.5660326086956522, | |
| "grad_norm": 1.52280592918396, | |
| "learning_rate": 3e-05, | |
| "loss": 3.205, | |
| "step": 2083 | |
| }, | |
| { | |
| "epoch": 0.566304347826087, | |
| "grad_norm": 1.4019720554351807, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5101, | |
| "step": 2084 | |
| }, | |
| { | |
| "epoch": 0.5665760869565217, | |
| "grad_norm": 1.2434489727020264, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5674, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 0.5668478260869565, | |
| "grad_norm": 1.4792324304580688, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8302, | |
| "step": 2086 | |
| }, | |
| { | |
| "epoch": 0.5671195652173913, | |
| "grad_norm": 2.289886236190796, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8648, | |
| "step": 2087 | |
| }, | |
| { | |
| "epoch": 0.5673913043478261, | |
| "grad_norm": 1.3174787759780884, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2811, | |
| "step": 2088 | |
| }, | |
| { | |
| "epoch": 0.5676630434782609, | |
| "grad_norm": 1.2132010459899902, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6173, | |
| "step": 2089 | |
| }, | |
| { | |
| "epoch": 0.5679347826086957, | |
| "grad_norm": 1.528756856918335, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5117, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.5682065217391304, | |
| "grad_norm": 1.3807978630065918, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0206, | |
| "step": 2091 | |
| }, | |
| { | |
| "epoch": 0.5684782608695652, | |
| "grad_norm": 1.0497171878814697, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2278, | |
| "step": 2092 | |
| }, | |
| { | |
| "epoch": 0.56875, | |
| "grad_norm": 1.6013227701187134, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4125, | |
| "step": 2093 | |
| }, | |
| { | |
| "epoch": 0.5690217391304347, | |
| "grad_norm": 0.9287932515144348, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1837, | |
| "step": 2094 | |
| }, | |
| { | |
| "epoch": 0.5692934782608695, | |
| "grad_norm": 1.1170742511749268, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5987, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 0.5695652173913044, | |
| "grad_norm": 0.9773249626159668, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4219, | |
| "step": 2096 | |
| }, | |
| { | |
| "epoch": 0.5698369565217392, | |
| "grad_norm": 0.9633996486663818, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3764, | |
| "step": 2097 | |
| }, | |
| { | |
| "epoch": 0.5701086956521739, | |
| "grad_norm": 1.2055789232254028, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3378, | |
| "step": 2098 | |
| }, | |
| { | |
| "epoch": 0.5703804347826087, | |
| "grad_norm": 0.9966376423835754, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8607, | |
| "step": 2099 | |
| }, | |
| { | |
| "epoch": 0.5706521739130435, | |
| "grad_norm": 1.2070221900939941, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7183, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.5709239130434782, | |
| "grad_norm": 1.4068926572799683, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8481, | |
| "step": 2101 | |
| }, | |
| { | |
| "epoch": 0.571195652173913, | |
| "grad_norm": 1.5947731733322144, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9881, | |
| "step": 2102 | |
| }, | |
| { | |
| "epoch": 0.5714673913043479, | |
| "grad_norm": 1.2691866159439087, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4337, | |
| "step": 2103 | |
| }, | |
| { | |
| "epoch": 0.5717391304347826, | |
| "grad_norm": 1.850817322731018, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4459, | |
| "step": 2104 | |
| }, | |
| { | |
| "epoch": 0.5720108695652174, | |
| "grad_norm": 1.5024229288101196, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1217, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 0.5722826086956522, | |
| "grad_norm": 1.167541742324829, | |
| "learning_rate": 3e-05, | |
| "loss": 3.584, | |
| "step": 2106 | |
| }, | |
| { | |
| "epoch": 0.5725543478260869, | |
| "grad_norm": 1.89936363697052, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7818, | |
| "step": 2107 | |
| }, | |
| { | |
| "epoch": 0.5728260869565217, | |
| "grad_norm": 1.5929173231124878, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4213, | |
| "step": 2108 | |
| }, | |
| { | |
| "epoch": 0.5730978260869565, | |
| "grad_norm": 1.122673749923706, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6769, | |
| "step": 2109 | |
| }, | |
| { | |
| "epoch": 0.5733695652173914, | |
| "grad_norm": 1.0892047882080078, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2629, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.5736413043478261, | |
| "grad_norm": 0.9661053419113159, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0303, | |
| "step": 2111 | |
| }, | |
| { | |
| "epoch": 0.5739130434782609, | |
| "grad_norm": 1.2852338552474976, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6586, | |
| "step": 2112 | |
| }, | |
| { | |
| "epoch": 0.5741847826086957, | |
| "grad_norm": 1.211471676826477, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1448, | |
| "step": 2113 | |
| }, | |
| { | |
| "epoch": 0.5744565217391304, | |
| "grad_norm": 1.1296757459640503, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9875, | |
| "step": 2114 | |
| }, | |
| { | |
| "epoch": 0.5747282608695652, | |
| "grad_norm": 1.3020541667938232, | |
| "learning_rate": 3e-05, | |
| "loss": 3.111, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 0.575, | |
| "grad_norm": 1.0208520889282227, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7882, | |
| "step": 2116 | |
| }, | |
| { | |
| "epoch": 0.5752717391304348, | |
| "grad_norm": 1.4555299282073975, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3721, | |
| "step": 2117 | |
| }, | |
| { | |
| "epoch": 0.5755434782608696, | |
| "grad_norm": 2.195035934448242, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1556, | |
| "step": 2118 | |
| }, | |
| { | |
| "epoch": 0.5758152173913044, | |
| "grad_norm": 1.4949889183044434, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6564, | |
| "step": 2119 | |
| }, | |
| { | |
| "epoch": 0.5760869565217391, | |
| "grad_norm": 1.1324836015701294, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1073, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.5763586956521739, | |
| "grad_norm": 1.3076951503753662, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6178, | |
| "step": 2121 | |
| }, | |
| { | |
| "epoch": 0.5766304347826087, | |
| "grad_norm": 1.322385311126709, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9284, | |
| "step": 2122 | |
| }, | |
| { | |
| "epoch": 0.5769021739130434, | |
| "grad_norm": 1.567959189414978, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0229, | |
| "step": 2123 | |
| }, | |
| { | |
| "epoch": 0.5771739130434783, | |
| "grad_norm": 1.4464517831802368, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3669, | |
| "step": 2124 | |
| }, | |
| { | |
| "epoch": 0.5774456521739131, | |
| "grad_norm": 1.5971894264221191, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1792, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 0.5777173913043478, | |
| "grad_norm": 1.4091331958770752, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3196, | |
| "step": 2126 | |
| }, | |
| { | |
| "epoch": 0.5779891304347826, | |
| "grad_norm": 1.4950571060180664, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6238, | |
| "step": 2127 | |
| }, | |
| { | |
| "epoch": 0.5782608695652174, | |
| "grad_norm": 1.0344115495681763, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8622, | |
| "step": 2128 | |
| }, | |
| { | |
| "epoch": 0.5785326086956522, | |
| "grad_norm": 1.1362147331237793, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7986, | |
| "step": 2129 | |
| }, | |
| { | |
| "epoch": 0.5788043478260869, | |
| "grad_norm": 1.1396723985671997, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7951, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.5790760869565217, | |
| "grad_norm": 1.355010747909546, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1708, | |
| "step": 2131 | |
| }, | |
| { | |
| "epoch": 0.5793478260869566, | |
| "grad_norm": 1.5592600107192993, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5758, | |
| "step": 2132 | |
| }, | |
| { | |
| "epoch": 0.5796195652173913, | |
| "grad_norm": 1.56844961643219, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4122, | |
| "step": 2133 | |
| }, | |
| { | |
| "epoch": 0.5798913043478261, | |
| "grad_norm": 1.3906667232513428, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7639, | |
| "step": 2134 | |
| }, | |
| { | |
| "epoch": 0.5801630434782609, | |
| "grad_norm": 1.131856918334961, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3024, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 0.5804347826086956, | |
| "grad_norm": 1.2349239587783813, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2338, | |
| "step": 2136 | |
| }, | |
| { | |
| "epoch": 0.5807065217391304, | |
| "grad_norm": 1.320646047592163, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2089, | |
| "step": 2137 | |
| }, | |
| { | |
| "epoch": 0.5809782608695652, | |
| "grad_norm": 1.0962297916412354, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9312, | |
| "step": 2138 | |
| }, | |
| { | |
| "epoch": 0.58125, | |
| "grad_norm": 1.2109733819961548, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6762, | |
| "step": 2139 | |
| }, | |
| { | |
| "epoch": 0.5815217391304348, | |
| "grad_norm": 1.3556448221206665, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6022, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.5817934782608696, | |
| "grad_norm": 1.216602087020874, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9107, | |
| "step": 2141 | |
| }, | |
| { | |
| "epoch": 0.5820652173913043, | |
| "grad_norm": 1.2305996417999268, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2447, | |
| "step": 2142 | |
| }, | |
| { | |
| "epoch": 0.5823369565217391, | |
| "grad_norm": 1.2739956378936768, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4232, | |
| "step": 2143 | |
| }, | |
| { | |
| "epoch": 0.5826086956521739, | |
| "grad_norm": 1.2404019832611084, | |
| "learning_rate": 3e-05, | |
| "loss": 3.467, | |
| "step": 2144 | |
| }, | |
| { | |
| "epoch": 0.5828804347826086, | |
| "grad_norm": 1.3793765306472778, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4627, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 0.5831521739130435, | |
| "grad_norm": 1.6223127841949463, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2725, | |
| "step": 2146 | |
| }, | |
| { | |
| "epoch": 0.5834239130434783, | |
| "grad_norm": 1.055362343788147, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2517, | |
| "step": 2147 | |
| }, | |
| { | |
| "epoch": 0.5836956521739131, | |
| "grad_norm": 1.2135682106018066, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0423, | |
| "step": 2148 | |
| }, | |
| { | |
| "epoch": 0.5839673913043478, | |
| "grad_norm": 1.7276960611343384, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2823, | |
| "step": 2149 | |
| }, | |
| { | |
| "epoch": 0.5842391304347826, | |
| "grad_norm": 1.1459511518478394, | |
| "learning_rate": 3e-05, | |
| "loss": 3.129, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.5845108695652174, | |
| "grad_norm": 1.7933846712112427, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6957, | |
| "step": 2151 | |
| }, | |
| { | |
| "epoch": 0.5847826086956521, | |
| "grad_norm": 1.3888658285140991, | |
| "learning_rate": 3e-05, | |
| "loss": 4.2824, | |
| "step": 2152 | |
| }, | |
| { | |
| "epoch": 0.585054347826087, | |
| "grad_norm": 1.18975031375885, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9272, | |
| "step": 2153 | |
| }, | |
| { | |
| "epoch": 0.5853260869565218, | |
| "grad_norm": 1.0910744667053223, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4617, | |
| "step": 2154 | |
| }, | |
| { | |
| "epoch": 0.5855978260869565, | |
| "grad_norm": 1.3888293504714966, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9697, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 0.5858695652173913, | |
| "grad_norm": 1.1816877126693726, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5301, | |
| "step": 2156 | |
| }, | |
| { | |
| "epoch": 0.5861413043478261, | |
| "grad_norm": 1.2659231424331665, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5646, | |
| "step": 2157 | |
| }, | |
| { | |
| "epoch": 0.5864130434782608, | |
| "grad_norm": 1.5958353281021118, | |
| "learning_rate": 3e-05, | |
| "loss": 3.378, | |
| "step": 2158 | |
| }, | |
| { | |
| "epoch": 0.5866847826086956, | |
| "grad_norm": 1.2585597038269043, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0523, | |
| "step": 2159 | |
| }, | |
| { | |
| "epoch": 0.5869565217391305, | |
| "grad_norm": 1.0975054502487183, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0933, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.5872282608695653, | |
| "grad_norm": 1.5233417749404907, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2665, | |
| "step": 2161 | |
| }, | |
| { | |
| "epoch": 0.5875, | |
| "grad_norm": 2.02248215675354, | |
| "learning_rate": 3e-05, | |
| "loss": 3.754, | |
| "step": 2162 | |
| }, | |
| { | |
| "epoch": 0.5877717391304348, | |
| "grad_norm": 0.9959807395935059, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9276, | |
| "step": 2163 | |
| }, | |
| { | |
| "epoch": 0.5880434782608696, | |
| "grad_norm": 1.1323211193084717, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0789, | |
| "step": 2164 | |
| }, | |
| { | |
| "epoch": 0.5883152173913043, | |
| "grad_norm": 1.0242100954055786, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3039, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 0.5885869565217391, | |
| "grad_norm": 1.0082273483276367, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3631, | |
| "step": 2166 | |
| }, | |
| { | |
| "epoch": 0.5888586956521739, | |
| "grad_norm": 1.1713404655456543, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7448, | |
| "step": 2167 | |
| }, | |
| { | |
| "epoch": 0.5891304347826087, | |
| "grad_norm": 1.1318339109420776, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3552, | |
| "step": 2168 | |
| }, | |
| { | |
| "epoch": 0.5894021739130435, | |
| "grad_norm": 1.3056354522705078, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8349, | |
| "step": 2169 | |
| }, | |
| { | |
| "epoch": 0.5896739130434783, | |
| "grad_norm": 1.1058902740478516, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9931, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.589945652173913, | |
| "grad_norm": 1.3379881381988525, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0709, | |
| "step": 2171 | |
| }, | |
| { | |
| "epoch": 0.5902173913043478, | |
| "grad_norm": 1.3458565473556519, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5521, | |
| "step": 2172 | |
| }, | |
| { | |
| "epoch": 0.5904891304347826, | |
| "grad_norm": 1.2749676704406738, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3535, | |
| "step": 2173 | |
| }, | |
| { | |
| "epoch": 0.5907608695652173, | |
| "grad_norm": 1.3268214464187622, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6918, | |
| "step": 2174 | |
| }, | |
| { | |
| "epoch": 0.5910326086956522, | |
| "grad_norm": 1.4857879877090454, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6046, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 0.591304347826087, | |
| "grad_norm": 1.009945034980774, | |
| "learning_rate": 3e-05, | |
| "loss": 3.435, | |
| "step": 2176 | |
| }, | |
| { | |
| "epoch": 0.5915760869565218, | |
| "grad_norm": 1.1983013153076172, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5871, | |
| "step": 2177 | |
| }, | |
| { | |
| "epoch": 0.5918478260869565, | |
| "grad_norm": 0.9718300700187683, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3072, | |
| "step": 2178 | |
| }, | |
| { | |
| "epoch": 0.5921195652173913, | |
| "grad_norm": 1.101165771484375, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2276, | |
| "step": 2179 | |
| }, | |
| { | |
| "epoch": 0.592391304347826, | |
| "grad_norm": 1.012749433517456, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1112, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.5926630434782608, | |
| "grad_norm": 0.9688409566879272, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0918, | |
| "step": 2181 | |
| }, | |
| { | |
| "epoch": 0.5929347826086957, | |
| "grad_norm": 0.9854469895362854, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1777, | |
| "step": 2182 | |
| }, | |
| { | |
| "epoch": 0.5932065217391305, | |
| "grad_norm": 0.9278433918952942, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1989, | |
| "step": 2183 | |
| }, | |
| { | |
| "epoch": 0.5934782608695652, | |
| "grad_norm": 0.983065664768219, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3761, | |
| "step": 2184 | |
| }, | |
| { | |
| "epoch": 0.59375, | |
| "grad_norm": 1.473644733428955, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6095, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 0.5940217391304348, | |
| "grad_norm": 1.2839797735214233, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9857, | |
| "step": 2186 | |
| }, | |
| { | |
| "epoch": 0.5942934782608695, | |
| "grad_norm": 1.7509822845458984, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7711, | |
| "step": 2187 | |
| }, | |
| { | |
| "epoch": 0.5945652173913043, | |
| "grad_norm": 1.171937346458435, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6335, | |
| "step": 2188 | |
| }, | |
| { | |
| "epoch": 0.5948369565217392, | |
| "grad_norm": 1.4512766599655151, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9629, | |
| "step": 2189 | |
| }, | |
| { | |
| "epoch": 0.595108695652174, | |
| "grad_norm": 1.0719918012619019, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8943, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.5953804347826087, | |
| "grad_norm": 1.11574387550354, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7779, | |
| "step": 2191 | |
| }, | |
| { | |
| "epoch": 0.5956521739130435, | |
| "grad_norm": 1.6633427143096924, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7736, | |
| "step": 2192 | |
| }, | |
| { | |
| "epoch": 0.5959239130434782, | |
| "grad_norm": 1.1903231143951416, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9944, | |
| "step": 2193 | |
| }, | |
| { | |
| "epoch": 0.596195652173913, | |
| "grad_norm": 1.2331459522247314, | |
| "learning_rate": 3e-05, | |
| "loss": 3.639, | |
| "step": 2194 | |
| }, | |
| { | |
| "epoch": 0.5964673913043478, | |
| "grad_norm": 1.8560791015625, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0684, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 0.5967391304347827, | |
| "grad_norm": 1.2733112573623657, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1905, | |
| "step": 2196 | |
| }, | |
| { | |
| "epoch": 0.5970108695652174, | |
| "grad_norm": 1.156823992729187, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3605, | |
| "step": 2197 | |
| }, | |
| { | |
| "epoch": 0.5972826086956522, | |
| "grad_norm": 1.0290706157684326, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2214, | |
| "step": 2198 | |
| }, | |
| { | |
| "epoch": 0.597554347826087, | |
| "grad_norm": 1.1795909404754639, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9137, | |
| "step": 2199 | |
| }, | |
| { | |
| "epoch": 0.5978260869565217, | |
| "grad_norm": 1.3558768033981323, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6129, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.5980978260869565, | |
| "grad_norm": 0.9279618859291077, | |
| "learning_rate": 3e-05, | |
| "loss": 2.5655, | |
| "step": 2201 | |
| }, | |
| { | |
| "epoch": 0.5983695652173913, | |
| "grad_norm": 1.2650532722473145, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8838, | |
| "step": 2202 | |
| }, | |
| { | |
| "epoch": 0.5986413043478261, | |
| "grad_norm": 1.2165402173995972, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5675, | |
| "step": 2203 | |
| }, | |
| { | |
| "epoch": 0.5989130434782609, | |
| "grad_norm": 1.1845487356185913, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9909, | |
| "step": 2204 | |
| }, | |
| { | |
| "epoch": 0.5991847826086957, | |
| "grad_norm": 1.1865789890289307, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6638, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 0.5994565217391304, | |
| "grad_norm": 0.9880366921424866, | |
| "learning_rate": 3e-05, | |
| "loss": 3.023, | |
| "step": 2206 | |
| }, | |
| { | |
| "epoch": 0.5997282608695652, | |
| "grad_norm": 1.1242929697036743, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1062, | |
| "step": 2207 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.1712896823883057, | |
| "learning_rate": 3e-05, | |
| "loss": 3.029, | |
| "step": 2208 | |
| }, | |
| { | |
| "epoch": 0.6002717391304347, | |
| "grad_norm": 1.5733115673065186, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7851, | |
| "step": 2209 | |
| }, | |
| { | |
| "epoch": 0.6005434782608695, | |
| "grad_norm": 1.2813576459884644, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4965, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.6008152173913044, | |
| "grad_norm": 1.3263084888458252, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1356, | |
| "step": 2211 | |
| }, | |
| { | |
| "epoch": 0.6010869565217392, | |
| "grad_norm": 1.1176859140396118, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4671, | |
| "step": 2212 | |
| }, | |
| { | |
| "epoch": 0.6013586956521739, | |
| "grad_norm": 1.2956467866897583, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8482, | |
| "step": 2213 | |
| }, | |
| { | |
| "epoch": 0.6016304347826087, | |
| "grad_norm": 1.8947595357894897, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2963, | |
| "step": 2214 | |
| }, | |
| { | |
| "epoch": 0.6019021739130435, | |
| "grad_norm": 1.3561947345733643, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1028, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 0.6021739130434782, | |
| "grad_norm": 1.1945207118988037, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2949, | |
| "step": 2216 | |
| }, | |
| { | |
| "epoch": 0.602445652173913, | |
| "grad_norm": 1.3198587894439697, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5389, | |
| "step": 2217 | |
| }, | |
| { | |
| "epoch": 0.6027173913043479, | |
| "grad_norm": 1.6522905826568604, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2054, | |
| "step": 2218 | |
| }, | |
| { | |
| "epoch": 0.6029891304347826, | |
| "grad_norm": 1.2735859155654907, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4955, | |
| "step": 2219 | |
| }, | |
| { | |
| "epoch": 0.6032608695652174, | |
| "grad_norm": 1.2329673767089844, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7713, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.6035326086956522, | |
| "grad_norm": 1.1366288661956787, | |
| "learning_rate": 3e-05, | |
| "loss": 3.333, | |
| "step": 2221 | |
| }, | |
| { | |
| "epoch": 0.6038043478260869, | |
| "grad_norm": 1.0295037031173706, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2991, | |
| "step": 2222 | |
| }, | |
| { | |
| "epoch": 0.6040760869565217, | |
| "grad_norm": 1.0760974884033203, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6266, | |
| "step": 2223 | |
| }, | |
| { | |
| "epoch": 0.6043478260869565, | |
| "grad_norm": 1.0044188499450684, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1854, | |
| "step": 2224 | |
| }, | |
| { | |
| "epoch": 0.6046195652173914, | |
| "grad_norm": 0.79954993724823, | |
| "learning_rate": 3e-05, | |
| "loss": 2.5833, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 0.6048913043478261, | |
| "grad_norm": 1.3376729488372803, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8299, | |
| "step": 2226 | |
| }, | |
| { | |
| "epoch": 0.6051630434782609, | |
| "grad_norm": 1.3323661088943481, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5493, | |
| "step": 2227 | |
| }, | |
| { | |
| "epoch": 0.6054347826086957, | |
| "grad_norm": 0.9946414828300476, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0806, | |
| "step": 2228 | |
| }, | |
| { | |
| "epoch": 0.6057065217391304, | |
| "grad_norm": 1.486099123954773, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5046, | |
| "step": 2229 | |
| }, | |
| { | |
| "epoch": 0.6059782608695652, | |
| "grad_norm": 1.4763749837875366, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2249, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.60625, | |
| "grad_norm": 1.1273298263549805, | |
| "learning_rate": 3e-05, | |
| "loss": 3.097, | |
| "step": 2231 | |
| }, | |
| { | |
| "epoch": 0.6065217391304348, | |
| "grad_norm": 1.273410677909851, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3963, | |
| "step": 2232 | |
| }, | |
| { | |
| "epoch": 0.6067934782608696, | |
| "grad_norm": 1.6567277908325195, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4074, | |
| "step": 2233 | |
| }, | |
| { | |
| "epoch": 0.6070652173913044, | |
| "grad_norm": 1.272204875946045, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5062, | |
| "step": 2234 | |
| }, | |
| { | |
| "epoch": 0.6073369565217391, | |
| "grad_norm": 0.9623717665672302, | |
| "learning_rate": 3e-05, | |
| "loss": 3.348, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 0.6076086956521739, | |
| "grad_norm": 1.7306931018829346, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5976, | |
| "step": 2236 | |
| }, | |
| { | |
| "epoch": 0.6078804347826087, | |
| "grad_norm": 1.2716137170791626, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8244, | |
| "step": 2237 | |
| }, | |
| { | |
| "epoch": 0.6081521739130434, | |
| "grad_norm": 1.3230034112930298, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4558, | |
| "step": 2238 | |
| }, | |
| { | |
| "epoch": 0.6084239130434783, | |
| "grad_norm": 1.733973503112793, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0608, | |
| "step": 2239 | |
| }, | |
| { | |
| "epoch": 0.6086956521739131, | |
| "grad_norm": 1.2597112655639648, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8695, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.6089673913043478, | |
| "grad_norm": 1.033078670501709, | |
| "learning_rate": 3e-05, | |
| "loss": 2.528, | |
| "step": 2241 | |
| }, | |
| { | |
| "epoch": 0.6092391304347826, | |
| "grad_norm": 1.4419554471969604, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5569, | |
| "step": 2242 | |
| }, | |
| { | |
| "epoch": 0.6095108695652174, | |
| "grad_norm": 1.376265287399292, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4915, | |
| "step": 2243 | |
| }, | |
| { | |
| "epoch": 0.6097826086956522, | |
| "grad_norm": 1.1557855606079102, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4314, | |
| "step": 2244 | |
| }, | |
| { | |
| "epoch": 0.6100543478260869, | |
| "grad_norm": 1.3031344413757324, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5682, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 0.6103260869565217, | |
| "grad_norm": 1.2761789560317993, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4629, | |
| "step": 2246 | |
| }, | |
| { | |
| "epoch": 0.6105978260869566, | |
| "grad_norm": 1.1963956356048584, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2122, | |
| "step": 2247 | |
| }, | |
| { | |
| "epoch": 0.6108695652173913, | |
| "grad_norm": 1.357904314994812, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0868, | |
| "step": 2248 | |
| }, | |
| { | |
| "epoch": 0.6111413043478261, | |
| "grad_norm": 1.3014525175094604, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9693, | |
| "step": 2249 | |
| }, | |
| { | |
| "epoch": 0.6114130434782609, | |
| "grad_norm": 1.3314582109451294, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3714, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.6116847826086956, | |
| "grad_norm": 1.3849120140075684, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7755, | |
| "step": 2251 | |
| }, | |
| { | |
| "epoch": 0.6119565217391304, | |
| "grad_norm": 0.9548225402832031, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0885, | |
| "step": 2252 | |
| }, | |
| { | |
| "epoch": 0.6122282608695652, | |
| "grad_norm": 1.4415459632873535, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8783, | |
| "step": 2253 | |
| }, | |
| { | |
| "epoch": 0.6125, | |
| "grad_norm": 1.7835477590560913, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6217, | |
| "step": 2254 | |
| }, | |
| { | |
| "epoch": 0.6127717391304348, | |
| "grad_norm": 1.4011732339859009, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9328, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 0.6130434782608696, | |
| "grad_norm": 1.0685231685638428, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2949, | |
| "step": 2256 | |
| }, | |
| { | |
| "epoch": 0.6133152173913043, | |
| "grad_norm": 1.2185416221618652, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5132, | |
| "step": 2257 | |
| }, | |
| { | |
| "epoch": 0.6135869565217391, | |
| "grad_norm": 1.0910768508911133, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0972, | |
| "step": 2258 | |
| }, | |
| { | |
| "epoch": 0.6138586956521739, | |
| "grad_norm": 1.0861088037490845, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3816, | |
| "step": 2259 | |
| }, | |
| { | |
| "epoch": 0.6141304347826086, | |
| "grad_norm": 1.0227428674697876, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1733, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.6144021739130435, | |
| "grad_norm": 1.3343944549560547, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2618, | |
| "step": 2261 | |
| }, | |
| { | |
| "epoch": 0.6146739130434783, | |
| "grad_norm": 1.3029199838638306, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4307, | |
| "step": 2262 | |
| }, | |
| { | |
| "epoch": 0.6149456521739131, | |
| "grad_norm": 1.152552843093872, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4878, | |
| "step": 2263 | |
| }, | |
| { | |
| "epoch": 0.6152173913043478, | |
| "grad_norm": 1.5189865827560425, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4252, | |
| "step": 2264 | |
| }, | |
| { | |
| "epoch": 0.6154891304347826, | |
| "grad_norm": 1.1164250373840332, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5027, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 0.6157608695652174, | |
| "grad_norm": 0.9906913638114929, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9876, | |
| "step": 2266 | |
| }, | |
| { | |
| "epoch": 0.6160326086956521, | |
| "grad_norm": 1.1681506633758545, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2722, | |
| "step": 2267 | |
| }, | |
| { | |
| "epoch": 0.616304347826087, | |
| "grad_norm": 1.183396816253662, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2224, | |
| "step": 2268 | |
| }, | |
| { | |
| "epoch": 0.6165760869565218, | |
| "grad_norm": 1.0938737392425537, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2871, | |
| "step": 2269 | |
| }, | |
| { | |
| "epoch": 0.6168478260869565, | |
| "grad_norm": 1.5161495208740234, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7379, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.6171195652173913, | |
| "grad_norm": 1.2433841228485107, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9899, | |
| "step": 2271 | |
| }, | |
| { | |
| "epoch": 0.6173913043478261, | |
| "grad_norm": 1.092914342880249, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7804, | |
| "step": 2272 | |
| }, | |
| { | |
| "epoch": 0.6176630434782608, | |
| "grad_norm": 1.2224652767181396, | |
| "learning_rate": 3e-05, | |
| "loss": 3.628, | |
| "step": 2273 | |
| }, | |
| { | |
| "epoch": 0.6179347826086956, | |
| "grad_norm": 1.3725236654281616, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6826, | |
| "step": 2274 | |
| }, | |
| { | |
| "epoch": 0.6182065217391305, | |
| "grad_norm": 1.0092295408248901, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9988, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 0.6184782608695653, | |
| "grad_norm": 1.259446144104004, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4234, | |
| "step": 2276 | |
| }, | |
| { | |
| "epoch": 0.61875, | |
| "grad_norm": 1.220897912979126, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2565, | |
| "step": 2277 | |
| }, | |
| { | |
| "epoch": 0.6190217391304348, | |
| "grad_norm": 0.9857214093208313, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2311, | |
| "step": 2278 | |
| }, | |
| { | |
| "epoch": 0.6192934782608696, | |
| "grad_norm": 1.256536841392517, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4595, | |
| "step": 2279 | |
| }, | |
| { | |
| "epoch": 0.6195652173913043, | |
| "grad_norm": 1.0857690572738647, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8696, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.6198369565217391, | |
| "grad_norm": 0.9030542969703674, | |
| "learning_rate": 3e-05, | |
| "loss": 2.931, | |
| "step": 2281 | |
| }, | |
| { | |
| "epoch": 0.6201086956521739, | |
| "grad_norm": 1.245595097541809, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0565, | |
| "step": 2282 | |
| }, | |
| { | |
| "epoch": 0.6203804347826087, | |
| "grad_norm": 1.130018949508667, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6569, | |
| "step": 2283 | |
| }, | |
| { | |
| "epoch": 0.6206521739130435, | |
| "grad_norm": 1.1791630983352661, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9299, | |
| "step": 2284 | |
| }, | |
| { | |
| "epoch": 0.6209239130434783, | |
| "grad_norm": 1.6365814208984375, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7201, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 0.621195652173913, | |
| "grad_norm": 1.023141622543335, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4364, | |
| "step": 2286 | |
| }, | |
| { | |
| "epoch": 0.6214673913043478, | |
| "grad_norm": 1.3121358156204224, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4811, | |
| "step": 2287 | |
| }, | |
| { | |
| "epoch": 0.6217391304347826, | |
| "grad_norm": 1.009194254875183, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0576, | |
| "step": 2288 | |
| }, | |
| { | |
| "epoch": 0.6220108695652173, | |
| "grad_norm": 1.0715727806091309, | |
| "learning_rate": 3e-05, | |
| "loss": 3.193, | |
| "step": 2289 | |
| }, | |
| { | |
| "epoch": 0.6222826086956522, | |
| "grad_norm": 1.0925318002700806, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1963, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.622554347826087, | |
| "grad_norm": 1.6540546417236328, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7859, | |
| "step": 2291 | |
| }, | |
| { | |
| "epoch": 0.6228260869565218, | |
| "grad_norm": 1.0642199516296387, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8112, | |
| "step": 2292 | |
| }, | |
| { | |
| "epoch": 0.6230978260869565, | |
| "grad_norm": 1.2560967206954956, | |
| "learning_rate": 3e-05, | |
| "loss": 3.528, | |
| "step": 2293 | |
| }, | |
| { | |
| "epoch": 0.6233695652173913, | |
| "grad_norm": 1.2816760540008545, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1706, | |
| "step": 2294 | |
| }, | |
| { | |
| "epoch": 0.623641304347826, | |
| "grad_norm": 0.9735274314880371, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2275, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 0.6239130434782608, | |
| "grad_norm": 1.1143537759780884, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2471, | |
| "step": 2296 | |
| }, | |
| { | |
| "epoch": 0.6241847826086957, | |
| "grad_norm": 1.2723908424377441, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8132, | |
| "step": 2297 | |
| }, | |
| { | |
| "epoch": 0.6244565217391305, | |
| "grad_norm": 1.269571304321289, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2307, | |
| "step": 2298 | |
| }, | |
| { | |
| "epoch": 0.6247282608695652, | |
| "grad_norm": 1.0772123336791992, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2448, | |
| "step": 2299 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 1.5265069007873535, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2314, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.6252717391304348, | |
| "grad_norm": 1.133500576019287, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1259, | |
| "step": 2301 | |
| }, | |
| { | |
| "epoch": 0.6255434782608695, | |
| "grad_norm": 1.0310444831848145, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1374, | |
| "step": 2302 | |
| }, | |
| { | |
| "epoch": 0.6258152173913043, | |
| "grad_norm": 1.789737582206726, | |
| "learning_rate": 3e-05, | |
| "loss": 3.657, | |
| "step": 2303 | |
| }, | |
| { | |
| "epoch": 0.6260869565217392, | |
| "grad_norm": 1.028426170349121, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0235, | |
| "step": 2304 | |
| }, | |
| { | |
| "epoch": 0.626358695652174, | |
| "grad_norm": 1.0289982557296753, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0216, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 0.6266304347826087, | |
| "grad_norm": 1.2136385440826416, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2615, | |
| "step": 2306 | |
| }, | |
| { | |
| "epoch": 0.6269021739130435, | |
| "grad_norm": 1.1237839460372925, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1519, | |
| "step": 2307 | |
| }, | |
| { | |
| "epoch": 0.6271739130434782, | |
| "grad_norm": 1.0916386842727661, | |
| "learning_rate": 3e-05, | |
| "loss": 3.404, | |
| "step": 2308 | |
| }, | |
| { | |
| "epoch": 0.627445652173913, | |
| "grad_norm": 1.1526830196380615, | |
| "learning_rate": 3e-05, | |
| "loss": 3.671, | |
| "step": 2309 | |
| }, | |
| { | |
| "epoch": 0.6277173913043478, | |
| "grad_norm": 1.0757122039794922, | |
| "learning_rate": 3e-05, | |
| "loss": 3.069, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.6279891304347827, | |
| "grad_norm": 1.0339561700820923, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9702, | |
| "step": 2311 | |
| }, | |
| { | |
| "epoch": 0.6282608695652174, | |
| "grad_norm": 1.2000614404678345, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9536, | |
| "step": 2312 | |
| }, | |
| { | |
| "epoch": 0.6285326086956522, | |
| "grad_norm": 1.3104127645492554, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1653, | |
| "step": 2313 | |
| }, | |
| { | |
| "epoch": 0.628804347826087, | |
| "grad_norm": 1.2230424880981445, | |
| "learning_rate": 3e-05, | |
| "loss": 3.464, | |
| "step": 2314 | |
| }, | |
| { | |
| "epoch": 0.6290760869565217, | |
| "grad_norm": 1.258482813835144, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4273, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 0.6293478260869565, | |
| "grad_norm": 1.1824918985366821, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5311, | |
| "step": 2316 | |
| }, | |
| { | |
| "epoch": 0.6296195652173913, | |
| "grad_norm": 0.9511439800262451, | |
| "learning_rate": 3e-05, | |
| "loss": 2.979, | |
| "step": 2317 | |
| }, | |
| { | |
| "epoch": 0.6298913043478261, | |
| "grad_norm": 1.0357087850570679, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8735, | |
| "step": 2318 | |
| }, | |
| { | |
| "epoch": 0.6301630434782609, | |
| "grad_norm": 1.392822504043579, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3224, | |
| "step": 2319 | |
| }, | |
| { | |
| "epoch": 0.6304347826086957, | |
| "grad_norm": 1.4284133911132812, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8771, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.6307065217391304, | |
| "grad_norm": 1.2343987226486206, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4731, | |
| "step": 2321 | |
| }, | |
| { | |
| "epoch": 0.6309782608695652, | |
| "grad_norm": 1.3236418962478638, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0388, | |
| "step": 2322 | |
| }, | |
| { | |
| "epoch": 0.63125, | |
| "grad_norm": 1.332114577293396, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1262, | |
| "step": 2323 | |
| }, | |
| { | |
| "epoch": 0.6315217391304347, | |
| "grad_norm": 1.0655372142791748, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2109, | |
| "step": 2324 | |
| }, | |
| { | |
| "epoch": 0.6317934782608695, | |
| "grad_norm": 1.0099713802337646, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2809, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 0.6320652173913044, | |
| "grad_norm": 1.0744123458862305, | |
| "learning_rate": 3e-05, | |
| "loss": 2.862, | |
| "step": 2326 | |
| }, | |
| { | |
| "epoch": 0.6323369565217392, | |
| "grad_norm": 1.051435112953186, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8705, | |
| "step": 2327 | |
| }, | |
| { | |
| "epoch": 0.6326086956521739, | |
| "grad_norm": 1.400758147239685, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7773, | |
| "step": 2328 | |
| }, | |
| { | |
| "epoch": 0.6328804347826087, | |
| "grad_norm": 1.1519436836242676, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9399, | |
| "step": 2329 | |
| }, | |
| { | |
| "epoch": 0.6331521739130435, | |
| "grad_norm": 1.264049768447876, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7764, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.6334239130434782, | |
| "grad_norm": 1.0223599672317505, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9021, | |
| "step": 2331 | |
| }, | |
| { | |
| "epoch": 0.633695652173913, | |
| "grad_norm": 1.255913257598877, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6456, | |
| "step": 2332 | |
| }, | |
| { | |
| "epoch": 0.6339673913043479, | |
| "grad_norm": 1.7114654779434204, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0404, | |
| "step": 2333 | |
| }, | |
| { | |
| "epoch": 0.6342391304347826, | |
| "grad_norm": 2.052917718887329, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1781, | |
| "step": 2334 | |
| }, | |
| { | |
| "epoch": 0.6345108695652174, | |
| "grad_norm": 1.376478672027588, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4097, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 0.6347826086956522, | |
| "grad_norm": 1.3323421478271484, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0153, | |
| "step": 2336 | |
| }, | |
| { | |
| "epoch": 0.6350543478260869, | |
| "grad_norm": 1.7771837711334229, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7671, | |
| "step": 2337 | |
| }, | |
| { | |
| "epoch": 0.6353260869565217, | |
| "grad_norm": 2.060122489929199, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5854, | |
| "step": 2338 | |
| }, | |
| { | |
| "epoch": 0.6355978260869565, | |
| "grad_norm": 1.3856672048568726, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0497, | |
| "step": 2339 | |
| }, | |
| { | |
| "epoch": 0.6358695652173914, | |
| "grad_norm": 1.3146904706954956, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1533, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.6361413043478261, | |
| "grad_norm": 1.4848661422729492, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5613, | |
| "step": 2341 | |
| }, | |
| { | |
| "epoch": 0.6364130434782609, | |
| "grad_norm": 1.4037156105041504, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8576, | |
| "step": 2342 | |
| }, | |
| { | |
| "epoch": 0.6366847826086957, | |
| "grad_norm": 1.288294792175293, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5626, | |
| "step": 2343 | |
| }, | |
| { | |
| "epoch": 0.6369565217391304, | |
| "grad_norm": 1.080474853515625, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2761, | |
| "step": 2344 | |
| }, | |
| { | |
| "epoch": 0.6372282608695652, | |
| "grad_norm": 1.1792863607406616, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9453, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 0.6375, | |
| "grad_norm": 1.4226300716400146, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8206, | |
| "step": 2346 | |
| }, | |
| { | |
| "epoch": 0.6377717391304348, | |
| "grad_norm": 1.5433789491653442, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4793, | |
| "step": 2347 | |
| }, | |
| { | |
| "epoch": 0.6380434782608696, | |
| "grad_norm": 1.0570566654205322, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0796, | |
| "step": 2348 | |
| }, | |
| { | |
| "epoch": 0.6383152173913044, | |
| "grad_norm": 1.4246288537979126, | |
| "learning_rate": 3e-05, | |
| "loss": 3.259, | |
| "step": 2349 | |
| }, | |
| { | |
| "epoch": 0.6385869565217391, | |
| "grad_norm": 2.0091404914855957, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2464, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.6388586956521739, | |
| "grad_norm": 1.7661538124084473, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0348, | |
| "step": 2351 | |
| }, | |
| { | |
| "epoch": 0.6391304347826087, | |
| "grad_norm": 1.6644985675811768, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2656, | |
| "step": 2352 | |
| }, | |
| { | |
| "epoch": 0.6394021739130434, | |
| "grad_norm": 1.192597508430481, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2846, | |
| "step": 2353 | |
| }, | |
| { | |
| "epoch": 0.6396739130434783, | |
| "grad_norm": 1.234485149383545, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2553, | |
| "step": 2354 | |
| }, | |
| { | |
| "epoch": 0.6399456521739131, | |
| "grad_norm": 1.2841020822525024, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8951, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 0.6402173913043478, | |
| "grad_norm": 1.119943618774414, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7422, | |
| "step": 2356 | |
| }, | |
| { | |
| "epoch": 0.6404891304347826, | |
| "grad_norm": 1.5436328649520874, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9712, | |
| "step": 2357 | |
| }, | |
| { | |
| "epoch": 0.6407608695652174, | |
| "grad_norm": 1.1272965669631958, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1857, | |
| "step": 2358 | |
| }, | |
| { | |
| "epoch": 0.6410326086956522, | |
| "grad_norm": 1.340259075164795, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7488, | |
| "step": 2359 | |
| }, | |
| { | |
| "epoch": 0.6413043478260869, | |
| "grad_norm": 1.486802577972412, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8155, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.6415760869565217, | |
| "grad_norm": 0.9581712484359741, | |
| "learning_rate": 3e-05, | |
| "loss": 3.117, | |
| "step": 2361 | |
| }, | |
| { | |
| "epoch": 0.6418478260869566, | |
| "grad_norm": 1.012144923210144, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0744, | |
| "step": 2362 | |
| }, | |
| { | |
| "epoch": 0.6421195652173913, | |
| "grad_norm": 1.1179242134094238, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2825, | |
| "step": 2363 | |
| }, | |
| { | |
| "epoch": 0.6423913043478261, | |
| "grad_norm": 1.3681845664978027, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1833, | |
| "step": 2364 | |
| }, | |
| { | |
| "epoch": 0.6426630434782609, | |
| "grad_norm": 1.192631721496582, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3842, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 0.6429347826086956, | |
| "grad_norm": 1.2317907810211182, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1083, | |
| "step": 2366 | |
| }, | |
| { | |
| "epoch": 0.6432065217391304, | |
| "grad_norm": 1.0752184391021729, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2455, | |
| "step": 2367 | |
| }, | |
| { | |
| "epoch": 0.6434782608695652, | |
| "grad_norm": 1.035239338874817, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9436, | |
| "step": 2368 | |
| }, | |
| { | |
| "epoch": 0.64375, | |
| "grad_norm": 1.3296703100204468, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2708, | |
| "step": 2369 | |
| }, | |
| { | |
| "epoch": 0.6440217391304348, | |
| "grad_norm": 1.2996970415115356, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8312, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.6442934782608696, | |
| "grad_norm": 0.9831019639968872, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2478, | |
| "step": 2371 | |
| }, | |
| { | |
| "epoch": 0.6445652173913043, | |
| "grad_norm": 0.9067811965942383, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1556, | |
| "step": 2372 | |
| }, | |
| { | |
| "epoch": 0.6448369565217391, | |
| "grad_norm": 1.2001372575759888, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7225, | |
| "step": 2373 | |
| }, | |
| { | |
| "epoch": 0.6451086956521739, | |
| "grad_norm": 1.1935441493988037, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3109, | |
| "step": 2374 | |
| }, | |
| { | |
| "epoch": 0.6453804347826086, | |
| "grad_norm": 0.9141666889190674, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0955, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 0.6456521739130435, | |
| "grad_norm": 1.0943185091018677, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3094, | |
| "step": 2376 | |
| }, | |
| { | |
| "epoch": 0.6459239130434783, | |
| "grad_norm": 1.1921093463897705, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3715, | |
| "step": 2377 | |
| }, | |
| { | |
| "epoch": 0.6461956521739131, | |
| "grad_norm": 1.0566505193710327, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1073, | |
| "step": 2378 | |
| }, | |
| { | |
| "epoch": 0.6464673913043478, | |
| "grad_norm": 1.5691936016082764, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4321, | |
| "step": 2379 | |
| }, | |
| { | |
| "epoch": 0.6467391304347826, | |
| "grad_norm": 1.041152000427246, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6529, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.6470108695652174, | |
| "grad_norm": 1.2687933444976807, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9547, | |
| "step": 2381 | |
| }, | |
| { | |
| "epoch": 0.6472826086956521, | |
| "grad_norm": 1.074779748916626, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0635, | |
| "step": 2382 | |
| }, | |
| { | |
| "epoch": 0.647554347826087, | |
| "grad_norm": 1.0904431343078613, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8928, | |
| "step": 2383 | |
| }, | |
| { | |
| "epoch": 0.6478260869565218, | |
| "grad_norm": 1.3329259157180786, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6509, | |
| "step": 2384 | |
| }, | |
| { | |
| "epoch": 0.6480978260869565, | |
| "grad_norm": 1.2522311210632324, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6357, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 0.6483695652173913, | |
| "grad_norm": 1.2754337787628174, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7422, | |
| "step": 2386 | |
| }, | |
| { | |
| "epoch": 0.6486413043478261, | |
| "grad_norm": 1.338080883026123, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3859, | |
| "step": 2387 | |
| }, | |
| { | |
| "epoch": 0.6489130434782608, | |
| "grad_norm": 1.1422319412231445, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8218, | |
| "step": 2388 | |
| }, | |
| { | |
| "epoch": 0.6491847826086956, | |
| "grad_norm": 1.647072196006775, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6864, | |
| "step": 2389 | |
| }, | |
| { | |
| "epoch": 0.6494565217391305, | |
| "grad_norm": 1.1428362131118774, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8359, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.6497282608695653, | |
| "grad_norm": 1.1382925510406494, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1163, | |
| "step": 2391 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.0694186687469482, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1842, | |
| "step": 2392 | |
| }, | |
| { | |
| "epoch": 0.6502717391304348, | |
| "grad_norm": 1.413272738456726, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7837, | |
| "step": 2393 | |
| }, | |
| { | |
| "epoch": 0.6505434782608696, | |
| "grad_norm": 1.353119134902954, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2845, | |
| "step": 2394 | |
| }, | |
| { | |
| "epoch": 0.6508152173913043, | |
| "grad_norm": 1.542140245437622, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4894, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 0.6510869565217391, | |
| "grad_norm": 1.6058558225631714, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4778, | |
| "step": 2396 | |
| }, | |
| { | |
| "epoch": 0.6513586956521739, | |
| "grad_norm": 1.719277262687683, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5328, | |
| "step": 2397 | |
| }, | |
| { | |
| "epoch": 0.6516304347826087, | |
| "grad_norm": 1.7350586652755737, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5851, | |
| "step": 2398 | |
| }, | |
| { | |
| "epoch": 0.6519021739130435, | |
| "grad_norm": 1.4615671634674072, | |
| "learning_rate": 3e-05, | |
| "loss": 4.099, | |
| "step": 2399 | |
| }, | |
| { | |
| "epoch": 0.6521739130434783, | |
| "grad_norm": 1.194122314453125, | |
| "learning_rate": 3e-05, | |
| "loss": 3.353, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.652445652173913, | |
| "grad_norm": 1.4127830266952515, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8697, | |
| "step": 2401 | |
| }, | |
| { | |
| "epoch": 0.6527173913043478, | |
| "grad_norm": 1.6707136631011963, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9621, | |
| "step": 2402 | |
| }, | |
| { | |
| "epoch": 0.6529891304347826, | |
| "grad_norm": 1.4606815576553345, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7677, | |
| "step": 2403 | |
| }, | |
| { | |
| "epoch": 0.6532608695652173, | |
| "grad_norm": 1.2474243640899658, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5581, | |
| "step": 2404 | |
| }, | |
| { | |
| "epoch": 0.6535326086956522, | |
| "grad_norm": 1.1824653148651123, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9751, | |
| "step": 2405 | |
| }, | |
| { | |
| "epoch": 0.653804347826087, | |
| "grad_norm": 1.22133469581604, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3986, | |
| "step": 2406 | |
| }, | |
| { | |
| "epoch": 0.6540760869565218, | |
| "grad_norm": 1.160557746887207, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2008, | |
| "step": 2407 | |
| }, | |
| { | |
| "epoch": 0.6543478260869565, | |
| "grad_norm": 1.2002159357070923, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6029, | |
| "step": 2408 | |
| }, | |
| { | |
| "epoch": 0.6546195652173913, | |
| "grad_norm": 1.1110812425613403, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2051, | |
| "step": 2409 | |
| }, | |
| { | |
| "epoch": 0.654891304347826, | |
| "grad_norm": 0.9706496596336365, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9527, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.6551630434782608, | |
| "grad_norm": 1.0990405082702637, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6553, | |
| "step": 2411 | |
| }, | |
| { | |
| "epoch": 0.6554347826086957, | |
| "grad_norm": 1.065306544303894, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3728, | |
| "step": 2412 | |
| }, | |
| { | |
| "epoch": 0.6557065217391305, | |
| "grad_norm": 0.956051766872406, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9055, | |
| "step": 2413 | |
| }, | |
| { | |
| "epoch": 0.6559782608695652, | |
| "grad_norm": 1.1310175657272339, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8269, | |
| "step": 2414 | |
| }, | |
| { | |
| "epoch": 0.65625, | |
| "grad_norm": 0.9513333439826965, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2811, | |
| "step": 2415 | |
| }, | |
| { | |
| "epoch": 0.6565217391304348, | |
| "grad_norm": 1.301697015762329, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5579, | |
| "step": 2416 | |
| }, | |
| { | |
| "epoch": 0.6567934782608695, | |
| "grad_norm": 0.9810994267463684, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9914, | |
| "step": 2417 | |
| }, | |
| { | |
| "epoch": 0.6570652173913043, | |
| "grad_norm": 1.3848271369934082, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8109, | |
| "step": 2418 | |
| }, | |
| { | |
| "epoch": 0.6573369565217392, | |
| "grad_norm": 1.169350504875183, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8291, | |
| "step": 2419 | |
| }, | |
| { | |
| "epoch": 0.657608695652174, | |
| "grad_norm": 1.3478142023086548, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5609, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.6578804347826087, | |
| "grad_norm": 1.1918085813522339, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5339, | |
| "step": 2421 | |
| }, | |
| { | |
| "epoch": 0.6581521739130435, | |
| "grad_norm": 1.4257116317749023, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4636, | |
| "step": 2422 | |
| }, | |
| { | |
| "epoch": 0.6584239130434782, | |
| "grad_norm": 1.2269481420516968, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2741, | |
| "step": 2423 | |
| }, | |
| { | |
| "epoch": 0.658695652173913, | |
| "grad_norm": 1.2066056728363037, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2177, | |
| "step": 2424 | |
| }, | |
| { | |
| "epoch": 0.6589673913043478, | |
| "grad_norm": 1.3328955173492432, | |
| "learning_rate": 3e-05, | |
| "loss": 3.527, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 0.6592391304347827, | |
| "grad_norm": 1.1746604442596436, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1194, | |
| "step": 2426 | |
| }, | |
| { | |
| "epoch": 0.6595108695652174, | |
| "grad_norm": 1.028620958328247, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1119, | |
| "step": 2427 | |
| }, | |
| { | |
| "epoch": 0.6597826086956522, | |
| "grad_norm": 1.0080275535583496, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7994, | |
| "step": 2428 | |
| }, | |
| { | |
| "epoch": 0.660054347826087, | |
| "grad_norm": 0.8145027756690979, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9428, | |
| "step": 2429 | |
| }, | |
| { | |
| "epoch": 0.6603260869565217, | |
| "grad_norm": 1.0437175035476685, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9878, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.6605978260869565, | |
| "grad_norm": 1.130467414855957, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7007, | |
| "step": 2431 | |
| }, | |
| { | |
| "epoch": 0.6608695652173913, | |
| "grad_norm": 1.0865567922592163, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1175, | |
| "step": 2432 | |
| }, | |
| { | |
| "epoch": 0.6611413043478261, | |
| "grad_norm": 1.1606049537658691, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3582, | |
| "step": 2433 | |
| }, | |
| { | |
| "epoch": 0.6614130434782609, | |
| "grad_norm": 0.9023473262786865, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9861, | |
| "step": 2434 | |
| }, | |
| { | |
| "epoch": 0.6616847826086957, | |
| "grad_norm": 0.856779932975769, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0402, | |
| "step": 2435 | |
| }, | |
| { | |
| "epoch": 0.6619565217391304, | |
| "grad_norm": 1.0253337621688843, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0909, | |
| "step": 2436 | |
| }, | |
| { | |
| "epoch": 0.6622282608695652, | |
| "grad_norm": 1.0018707513809204, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9996, | |
| "step": 2437 | |
| }, | |
| { | |
| "epoch": 0.6625, | |
| "grad_norm": 1.0722147226333618, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3102, | |
| "step": 2438 | |
| }, | |
| { | |
| "epoch": 0.6627717391304347, | |
| "grad_norm": 1.0798076391220093, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9907, | |
| "step": 2439 | |
| }, | |
| { | |
| "epoch": 0.6630434782608695, | |
| "grad_norm": 1.1593507528305054, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3741, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.6633152173913044, | |
| "grad_norm": 1.0947400331497192, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0087, | |
| "step": 2441 | |
| }, | |
| { | |
| "epoch": 0.6635869565217392, | |
| "grad_norm": 1.2882115840911865, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1978, | |
| "step": 2442 | |
| }, | |
| { | |
| "epoch": 0.6638586956521739, | |
| "grad_norm": 1.2732001543045044, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8371, | |
| "step": 2443 | |
| }, | |
| { | |
| "epoch": 0.6641304347826087, | |
| "grad_norm": 0.9186998605728149, | |
| "learning_rate": 3e-05, | |
| "loss": 2.643, | |
| "step": 2444 | |
| }, | |
| { | |
| "epoch": 0.6644021739130435, | |
| "grad_norm": 1.0889346599578857, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9247, | |
| "step": 2445 | |
| }, | |
| { | |
| "epoch": 0.6646739130434782, | |
| "grad_norm": 1.094017505645752, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1954, | |
| "step": 2446 | |
| }, | |
| { | |
| "epoch": 0.664945652173913, | |
| "grad_norm": 0.9859854578971863, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7335, | |
| "step": 2447 | |
| }, | |
| { | |
| "epoch": 0.6652173913043479, | |
| "grad_norm": 1.8823328018188477, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4934, | |
| "step": 2448 | |
| }, | |
| { | |
| "epoch": 0.6654891304347826, | |
| "grad_norm": 0.9667043685913086, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0711, | |
| "step": 2449 | |
| }, | |
| { | |
| "epoch": 0.6657608695652174, | |
| "grad_norm": 1.0037490129470825, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1836, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.6660326086956522, | |
| "grad_norm": 1.0636874437332153, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3269, | |
| "step": 2451 | |
| }, | |
| { | |
| "epoch": 0.6663043478260869, | |
| "grad_norm": 1.02604079246521, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4928, | |
| "step": 2452 | |
| }, | |
| { | |
| "epoch": 0.6665760869565217, | |
| "grad_norm": 1.050881028175354, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3983, | |
| "step": 2453 | |
| }, | |
| { | |
| "epoch": 0.6668478260869565, | |
| "grad_norm": 1.1651184558868408, | |
| "learning_rate": 3e-05, | |
| "loss": 3.573, | |
| "step": 2454 | |
| }, | |
| { | |
| "epoch": 0.6671195652173914, | |
| "grad_norm": 0.9464408755302429, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4415, | |
| "step": 2455 | |
| }, | |
| { | |
| "epoch": 0.6673913043478261, | |
| "grad_norm": 0.9894149899482727, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4124, | |
| "step": 2456 | |
| }, | |
| { | |
| "epoch": 0.6676630434782609, | |
| "grad_norm": 1.1918611526489258, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1563, | |
| "step": 2457 | |
| }, | |
| { | |
| "epoch": 0.6679347826086957, | |
| "grad_norm": 1.1251368522644043, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0224, | |
| "step": 2458 | |
| }, | |
| { | |
| "epoch": 0.6682065217391304, | |
| "grad_norm": 0.9575350880622864, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1342, | |
| "step": 2459 | |
| }, | |
| { | |
| "epoch": 0.6684782608695652, | |
| "grad_norm": 1.1674573421478271, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5287, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.66875, | |
| "grad_norm": 1.3104429244995117, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3732, | |
| "step": 2461 | |
| }, | |
| { | |
| "epoch": 0.6690217391304348, | |
| "grad_norm": 1.0205787420272827, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3107, | |
| "step": 2462 | |
| }, | |
| { | |
| "epoch": 0.6692934782608696, | |
| "grad_norm": 1.1491562128067017, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1578, | |
| "step": 2463 | |
| }, | |
| { | |
| "epoch": 0.6695652173913044, | |
| "grad_norm": 1.2550066709518433, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9625, | |
| "step": 2464 | |
| }, | |
| { | |
| "epoch": 0.6698369565217391, | |
| "grad_norm": 1.396528720855713, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4526, | |
| "step": 2465 | |
| }, | |
| { | |
| "epoch": 0.6701086956521739, | |
| "grad_norm": 1.3484258651733398, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0718, | |
| "step": 2466 | |
| }, | |
| { | |
| "epoch": 0.6703804347826087, | |
| "grad_norm": 1.2837543487548828, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9352, | |
| "step": 2467 | |
| }, | |
| { | |
| "epoch": 0.6706521739130434, | |
| "grad_norm": 1.126395583152771, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2975, | |
| "step": 2468 | |
| }, | |
| { | |
| "epoch": 0.6709239130434783, | |
| "grad_norm": 1.0975403785705566, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9911, | |
| "step": 2469 | |
| }, | |
| { | |
| "epoch": 0.6711956521739131, | |
| "grad_norm": 1.260852336883545, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4067, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.6714673913043478, | |
| "grad_norm": 1.3457390069961548, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3277, | |
| "step": 2471 | |
| }, | |
| { | |
| "epoch": 0.6717391304347826, | |
| "grad_norm": 1.6091570854187012, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8467, | |
| "step": 2472 | |
| }, | |
| { | |
| "epoch": 0.6720108695652174, | |
| "grad_norm": 1.3068939447402954, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5756, | |
| "step": 2473 | |
| }, | |
| { | |
| "epoch": 0.6722826086956522, | |
| "grad_norm": 1.532850742340088, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6188, | |
| "step": 2474 | |
| }, | |
| { | |
| "epoch": 0.6725543478260869, | |
| "grad_norm": 1.179315447807312, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3408, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 0.6728260869565217, | |
| "grad_norm": 1.278639793395996, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9328, | |
| "step": 2476 | |
| }, | |
| { | |
| "epoch": 0.6730978260869566, | |
| "grad_norm": 0.9712943434715271, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7786, | |
| "step": 2477 | |
| }, | |
| { | |
| "epoch": 0.6733695652173913, | |
| "grad_norm": 1.1749228239059448, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7182, | |
| "step": 2478 | |
| }, | |
| { | |
| "epoch": 0.6736413043478261, | |
| "grad_norm": 1.3901921510696411, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2455, | |
| "step": 2479 | |
| }, | |
| { | |
| "epoch": 0.6739130434782609, | |
| "grad_norm": 1.3088897466659546, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4134, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.6741847826086956, | |
| "grad_norm": 1.2072702646255493, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7264, | |
| "step": 2481 | |
| }, | |
| { | |
| "epoch": 0.6744565217391304, | |
| "grad_norm": 1.3006356954574585, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1591, | |
| "step": 2482 | |
| }, | |
| { | |
| "epoch": 0.6747282608695652, | |
| "grad_norm": 2.3080663681030273, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6425, | |
| "step": 2483 | |
| }, | |
| { | |
| "epoch": 0.675, | |
| "grad_norm": 1.3426471948623657, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9936, | |
| "step": 2484 | |
| }, | |
| { | |
| "epoch": 0.6752717391304348, | |
| "grad_norm": 1.1594306230545044, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3481, | |
| "step": 2485 | |
| }, | |
| { | |
| "epoch": 0.6755434782608696, | |
| "grad_norm": 1.0936838388442993, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0749, | |
| "step": 2486 | |
| }, | |
| { | |
| "epoch": 0.6758152173913043, | |
| "grad_norm": 1.3417373895645142, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2727, | |
| "step": 2487 | |
| }, | |
| { | |
| "epoch": 0.6760869565217391, | |
| "grad_norm": 1.6536362171173096, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3898, | |
| "step": 2488 | |
| }, | |
| { | |
| "epoch": 0.6763586956521739, | |
| "grad_norm": 1.2044962644577026, | |
| "learning_rate": 3e-05, | |
| "loss": 3.06, | |
| "step": 2489 | |
| }, | |
| { | |
| "epoch": 0.6766304347826086, | |
| "grad_norm": 1.6772176027297974, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6963, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.6769021739130435, | |
| "grad_norm": 1.2547991275787354, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9258, | |
| "step": 2491 | |
| }, | |
| { | |
| "epoch": 0.6771739130434783, | |
| "grad_norm": 1.2318508625030518, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4305, | |
| "step": 2492 | |
| }, | |
| { | |
| "epoch": 0.6774456521739131, | |
| "grad_norm": 1.4965567588806152, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4216, | |
| "step": 2493 | |
| }, | |
| { | |
| "epoch": 0.6777173913043478, | |
| "grad_norm": 1.6711788177490234, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8026, | |
| "step": 2494 | |
| }, | |
| { | |
| "epoch": 0.6779891304347826, | |
| "grad_norm": 1.285457968711853, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9369, | |
| "step": 2495 | |
| }, | |
| { | |
| "epoch": 0.6782608695652174, | |
| "grad_norm": 1.1391198635101318, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6831, | |
| "step": 2496 | |
| }, | |
| { | |
| "epoch": 0.6785326086956521, | |
| "grad_norm": 1.4873690605163574, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5054, | |
| "step": 2497 | |
| }, | |
| { | |
| "epoch": 0.678804347826087, | |
| "grad_norm": 1.5623383522033691, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5831, | |
| "step": 2498 | |
| }, | |
| { | |
| "epoch": 0.6790760869565218, | |
| "grad_norm": 1.0868808031082153, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5378, | |
| "step": 2499 | |
| }, | |
| { | |
| "epoch": 0.6793478260869565, | |
| "grad_norm": 1.5260769128799438, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6442, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.6796195652173913, | |
| "grad_norm": 1.5094234943389893, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8721, | |
| "step": 2501 | |
| }, | |
| { | |
| "epoch": 0.6798913043478261, | |
| "grad_norm": 1.03408944606781, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2489, | |
| "step": 2502 | |
| }, | |
| { | |
| "epoch": 0.6801630434782608, | |
| "grad_norm": 1.5139715671539307, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1632, | |
| "step": 2503 | |
| }, | |
| { | |
| "epoch": 0.6804347826086956, | |
| "grad_norm": 0.8952838778495789, | |
| "learning_rate": 3e-05, | |
| "loss": 2.678, | |
| "step": 2504 | |
| }, | |
| { | |
| "epoch": 0.6807065217391305, | |
| "grad_norm": 0.886978268623352, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9663, | |
| "step": 2505 | |
| }, | |
| { | |
| "epoch": 0.6809782608695653, | |
| "grad_norm": 1.4015369415283203, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4536, | |
| "step": 2506 | |
| }, | |
| { | |
| "epoch": 0.68125, | |
| "grad_norm": 1.1252248287200928, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1302, | |
| "step": 2507 | |
| }, | |
| { | |
| "epoch": 0.6815217391304348, | |
| "grad_norm": 1.2304083108901978, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5204, | |
| "step": 2508 | |
| }, | |
| { | |
| "epoch": 0.6817934782608696, | |
| "grad_norm": 1.2945035696029663, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7562, | |
| "step": 2509 | |
| }, | |
| { | |
| "epoch": 0.6820652173913043, | |
| "grad_norm": 1.1755324602127075, | |
| "learning_rate": 3e-05, | |
| "loss": 2.787, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.6823369565217391, | |
| "grad_norm": 1.413987159729004, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5305, | |
| "step": 2511 | |
| }, | |
| { | |
| "epoch": 0.6826086956521739, | |
| "grad_norm": 1.0807994604110718, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2698, | |
| "step": 2512 | |
| }, | |
| { | |
| "epoch": 0.6828804347826087, | |
| "grad_norm": 1.5065345764160156, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3808, | |
| "step": 2513 | |
| }, | |
| { | |
| "epoch": 0.6831521739130435, | |
| "grad_norm": 1.2206026315689087, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9492, | |
| "step": 2514 | |
| }, | |
| { | |
| "epoch": 0.6834239130434783, | |
| "grad_norm": 1.1851751804351807, | |
| "learning_rate": 3e-05, | |
| "loss": 3.425, | |
| "step": 2515 | |
| }, | |
| { | |
| "epoch": 0.683695652173913, | |
| "grad_norm": 1.1727910041809082, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6993, | |
| "step": 2516 | |
| }, | |
| { | |
| "epoch": 0.6839673913043478, | |
| "grad_norm": 1.5769599676132202, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3359, | |
| "step": 2517 | |
| }, | |
| { | |
| "epoch": 0.6842391304347826, | |
| "grad_norm": 1.5909152030944824, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1606, | |
| "step": 2518 | |
| }, | |
| { | |
| "epoch": 0.6845108695652173, | |
| "grad_norm": 1.094890832901001, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2958, | |
| "step": 2519 | |
| }, | |
| { | |
| "epoch": 0.6847826086956522, | |
| "grad_norm": 1.5951069593429565, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6897, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.685054347826087, | |
| "grad_norm": 1.3260122537612915, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5941, | |
| "step": 2521 | |
| }, | |
| { | |
| "epoch": 0.6853260869565218, | |
| "grad_norm": 1.431256651878357, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8581, | |
| "step": 2522 | |
| }, | |
| { | |
| "epoch": 0.6855978260869565, | |
| "grad_norm": 1.3032617568969727, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3358, | |
| "step": 2523 | |
| }, | |
| { | |
| "epoch": 0.6858695652173913, | |
| "grad_norm": 1.0501641035079956, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9872, | |
| "step": 2524 | |
| }, | |
| { | |
| "epoch": 0.686141304347826, | |
| "grad_norm": 1.1217620372772217, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1064, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 0.6864130434782608, | |
| "grad_norm": 1.0204346179962158, | |
| "learning_rate": 3e-05, | |
| "loss": 3.13, | |
| "step": 2526 | |
| }, | |
| { | |
| "epoch": 0.6866847826086957, | |
| "grad_norm": 1.479447603225708, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3739, | |
| "step": 2527 | |
| }, | |
| { | |
| "epoch": 0.6869565217391305, | |
| "grad_norm": 1.3281017541885376, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1517, | |
| "step": 2528 | |
| }, | |
| { | |
| "epoch": 0.6872282608695652, | |
| "grad_norm": 1.358286738395691, | |
| "learning_rate": 3e-05, | |
| "loss": 3.215, | |
| "step": 2529 | |
| }, | |
| { | |
| "epoch": 0.6875, | |
| "grad_norm": 1.169724464416504, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9665, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.6877717391304348, | |
| "grad_norm": 1.373205542564392, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3161, | |
| "step": 2531 | |
| }, | |
| { | |
| "epoch": 0.6880434782608695, | |
| "grad_norm": 1.0364346504211426, | |
| "learning_rate": 3e-05, | |
| "loss": 3.135, | |
| "step": 2532 | |
| }, | |
| { | |
| "epoch": 0.6883152173913043, | |
| "grad_norm": 1.2910298109054565, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2629, | |
| "step": 2533 | |
| }, | |
| { | |
| "epoch": 0.6885869565217392, | |
| "grad_norm": 1.7945650815963745, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5541, | |
| "step": 2534 | |
| }, | |
| { | |
| "epoch": 0.688858695652174, | |
| "grad_norm": 1.3837659358978271, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5515, | |
| "step": 2535 | |
| }, | |
| { | |
| "epoch": 0.6891304347826087, | |
| "grad_norm": 1.3154077529907227, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6571, | |
| "step": 2536 | |
| }, | |
| { | |
| "epoch": 0.6894021739130435, | |
| "grad_norm": 1.3466911315917969, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0786, | |
| "step": 2537 | |
| }, | |
| { | |
| "epoch": 0.6896739130434782, | |
| "grad_norm": 1.3170411586761475, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9075, | |
| "step": 2538 | |
| }, | |
| { | |
| "epoch": 0.689945652173913, | |
| "grad_norm": 1.1219418048858643, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1026, | |
| "step": 2539 | |
| }, | |
| { | |
| "epoch": 0.6902173913043478, | |
| "grad_norm": 1.15764319896698, | |
| "learning_rate": 3e-05, | |
| "loss": 3.45, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.6904891304347827, | |
| "grad_norm": 1.196807861328125, | |
| "learning_rate": 3e-05, | |
| "loss": 3.185, | |
| "step": 2541 | |
| }, | |
| { | |
| "epoch": 0.6907608695652174, | |
| "grad_norm": 1.187324047088623, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4707, | |
| "step": 2542 | |
| }, | |
| { | |
| "epoch": 0.6910326086956522, | |
| "grad_norm": 1.0559121370315552, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9871, | |
| "step": 2543 | |
| }, | |
| { | |
| "epoch": 0.691304347826087, | |
| "grad_norm": 1.1243149042129517, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5086, | |
| "step": 2544 | |
| }, | |
| { | |
| "epoch": 0.6915760869565217, | |
| "grad_norm": 1.1120831966400146, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1197, | |
| "step": 2545 | |
| }, | |
| { | |
| "epoch": 0.6918478260869565, | |
| "grad_norm": 1.294692039489746, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9801, | |
| "step": 2546 | |
| }, | |
| { | |
| "epoch": 0.6921195652173913, | |
| "grad_norm": 1.3371599912643433, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8999, | |
| "step": 2547 | |
| }, | |
| { | |
| "epoch": 0.6923913043478261, | |
| "grad_norm": 1.219671368598938, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3713, | |
| "step": 2548 | |
| }, | |
| { | |
| "epoch": 0.6926630434782609, | |
| "grad_norm": 0.9593479037284851, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8705, | |
| "step": 2549 | |
| }, | |
| { | |
| "epoch": 0.6929347826086957, | |
| "grad_norm": 1.2361105680465698, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6656, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.6932065217391304, | |
| "grad_norm": 0.9579165577888489, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9437, | |
| "step": 2551 | |
| }, | |
| { | |
| "epoch": 0.6934782608695652, | |
| "grad_norm": 1.0255228281021118, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0341, | |
| "step": 2552 | |
| }, | |
| { | |
| "epoch": 0.69375, | |
| "grad_norm": 1.2201510667800903, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6407, | |
| "step": 2553 | |
| }, | |
| { | |
| "epoch": 0.6940217391304347, | |
| "grad_norm": 1.1329454183578491, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4387, | |
| "step": 2554 | |
| }, | |
| { | |
| "epoch": 0.6942934782608695, | |
| "grad_norm": 1.0753947496414185, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1663, | |
| "step": 2555 | |
| }, | |
| { | |
| "epoch": 0.6945652173913044, | |
| "grad_norm": 1.8046010732650757, | |
| "learning_rate": 3e-05, | |
| "loss": 3.624, | |
| "step": 2556 | |
| }, | |
| { | |
| "epoch": 0.6948369565217392, | |
| "grad_norm": 1.0631359815597534, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2537, | |
| "step": 2557 | |
| }, | |
| { | |
| "epoch": 0.6951086956521739, | |
| "grad_norm": 1.0694079399108887, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8613, | |
| "step": 2558 | |
| }, | |
| { | |
| "epoch": 0.6953804347826087, | |
| "grad_norm": 1.2790333032608032, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0732, | |
| "step": 2559 | |
| }, | |
| { | |
| "epoch": 0.6956521739130435, | |
| "grad_norm": 1.253812551498413, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0818, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.6959239130434782, | |
| "grad_norm": 1.2076342105865479, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7011, | |
| "step": 2561 | |
| }, | |
| { | |
| "epoch": 0.696195652173913, | |
| "grad_norm": 1.2318416833877563, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7658, | |
| "step": 2562 | |
| }, | |
| { | |
| "epoch": 0.6964673913043479, | |
| "grad_norm": 1.442963719367981, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7564, | |
| "step": 2563 | |
| }, | |
| { | |
| "epoch": 0.6967391304347826, | |
| "grad_norm": 1.3947499990463257, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4476, | |
| "step": 2564 | |
| }, | |
| { | |
| "epoch": 0.6970108695652174, | |
| "grad_norm": 1.8033784627914429, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7798, | |
| "step": 2565 | |
| }, | |
| { | |
| "epoch": 0.6972826086956522, | |
| "grad_norm": 1.3678721189498901, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3788, | |
| "step": 2566 | |
| }, | |
| { | |
| "epoch": 0.6975543478260869, | |
| "grad_norm": 1.0561424493789673, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1749, | |
| "step": 2567 | |
| }, | |
| { | |
| "epoch": 0.6978260869565217, | |
| "grad_norm": 1.2744587659835815, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3584, | |
| "step": 2568 | |
| }, | |
| { | |
| "epoch": 0.6980978260869565, | |
| "grad_norm": 1.2167631387710571, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4043, | |
| "step": 2569 | |
| }, | |
| { | |
| "epoch": 0.6983695652173914, | |
| "grad_norm": 1.1461526155471802, | |
| "learning_rate": 3e-05, | |
| "loss": 3.381, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.6986413043478261, | |
| "grad_norm": 1.173651933670044, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8328, | |
| "step": 2571 | |
| }, | |
| { | |
| "epoch": 0.6989130434782609, | |
| "grad_norm": 1.3456881046295166, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9301, | |
| "step": 2572 | |
| }, | |
| { | |
| "epoch": 0.6991847826086957, | |
| "grad_norm": 1.6022355556488037, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0052, | |
| "step": 2573 | |
| }, | |
| { | |
| "epoch": 0.6994565217391304, | |
| "grad_norm": 1.4133738279342651, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6391, | |
| "step": 2574 | |
| }, | |
| { | |
| "epoch": 0.6997282608695652, | |
| "grad_norm": 1.2311711311340332, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6895, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.0733444690704346, | |
| "learning_rate": 3e-05, | |
| "loss": 2.685, | |
| "step": 2576 | |
| }, | |
| { | |
| "epoch": 0.7002717391304348, | |
| "grad_norm": 1.596375823020935, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2422, | |
| "step": 2577 | |
| }, | |
| { | |
| "epoch": 0.7005434782608696, | |
| "grad_norm": 1.098339319229126, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0258, | |
| "step": 2578 | |
| }, | |
| { | |
| "epoch": 0.7008152173913044, | |
| "grad_norm": 1.215551495552063, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6583, | |
| "step": 2579 | |
| }, | |
| { | |
| "epoch": 0.7010869565217391, | |
| "grad_norm": 1.175678014755249, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9082, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.7013586956521739, | |
| "grad_norm": 1.285050868988037, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0373, | |
| "step": 2581 | |
| }, | |
| { | |
| "epoch": 0.7016304347826087, | |
| "grad_norm": 1.0280994176864624, | |
| "learning_rate": 3e-05, | |
| "loss": 3.027, | |
| "step": 2582 | |
| }, | |
| { | |
| "epoch": 0.7019021739130434, | |
| "grad_norm": 1.1611984968185425, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9426, | |
| "step": 2583 | |
| }, | |
| { | |
| "epoch": 0.7021739130434783, | |
| "grad_norm": 1.475341558456421, | |
| "learning_rate": 3e-05, | |
| "loss": 3.366, | |
| "step": 2584 | |
| }, | |
| { | |
| "epoch": 0.7024456521739131, | |
| "grad_norm": 1.6377277374267578, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4074, | |
| "step": 2585 | |
| }, | |
| { | |
| "epoch": 0.7027173913043478, | |
| "grad_norm": 1.4452388286590576, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9369, | |
| "step": 2586 | |
| }, | |
| { | |
| "epoch": 0.7029891304347826, | |
| "grad_norm": 1.533266305923462, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9352, | |
| "step": 2587 | |
| }, | |
| { | |
| "epoch": 0.7032608695652174, | |
| "grad_norm": 1.6853458881378174, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4523, | |
| "step": 2588 | |
| }, | |
| { | |
| "epoch": 0.7035326086956522, | |
| "grad_norm": 1.3552666902542114, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7867, | |
| "step": 2589 | |
| }, | |
| { | |
| "epoch": 0.7038043478260869, | |
| "grad_norm": 1.5494905710220337, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4699, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.7040760869565217, | |
| "grad_norm": 1.74055016040802, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3874, | |
| "step": 2591 | |
| }, | |
| { | |
| "epoch": 0.7043478260869566, | |
| "grad_norm": 1.250354290008545, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1795, | |
| "step": 2592 | |
| }, | |
| { | |
| "epoch": 0.7046195652173913, | |
| "grad_norm": 1.302441954612732, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9465, | |
| "step": 2593 | |
| }, | |
| { | |
| "epoch": 0.7048913043478261, | |
| "grad_norm": 1.7971372604370117, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6084, | |
| "step": 2594 | |
| }, | |
| { | |
| "epoch": 0.7051630434782609, | |
| "grad_norm": 1.188605546951294, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6716, | |
| "step": 2595 | |
| }, | |
| { | |
| "epoch": 0.7054347826086956, | |
| "grad_norm": 0.9954262971878052, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1982, | |
| "step": 2596 | |
| }, | |
| { | |
| "epoch": 0.7057065217391304, | |
| "grad_norm": 1.174482822418213, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1708, | |
| "step": 2597 | |
| }, | |
| { | |
| "epoch": 0.7059782608695652, | |
| "grad_norm": 1.039506435394287, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1857, | |
| "step": 2598 | |
| }, | |
| { | |
| "epoch": 0.70625, | |
| "grad_norm": 0.9666784405708313, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6005, | |
| "step": 2599 | |
| }, | |
| { | |
| "epoch": 0.7065217391304348, | |
| "grad_norm": 0.921465277671814, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0565, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.7067934782608696, | |
| "grad_norm": 1.018864393234253, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7895, | |
| "step": 2601 | |
| }, | |
| { | |
| "epoch": 0.7070652173913043, | |
| "grad_norm": 1.0360243320465088, | |
| "learning_rate": 3e-05, | |
| "loss": 3.125, | |
| "step": 2602 | |
| }, | |
| { | |
| "epoch": 0.7073369565217391, | |
| "grad_norm": 0.9393570423126221, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1148, | |
| "step": 2603 | |
| }, | |
| { | |
| "epoch": 0.7076086956521739, | |
| "grad_norm": 1.1420108079910278, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1415, | |
| "step": 2604 | |
| }, | |
| { | |
| "epoch": 0.7078804347826086, | |
| "grad_norm": 1.1641017198562622, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0936, | |
| "step": 2605 | |
| }, | |
| { | |
| "epoch": 0.7081521739130435, | |
| "grad_norm": 1.6107722520828247, | |
| "learning_rate": 3e-05, | |
| "loss": 4.061, | |
| "step": 2606 | |
| }, | |
| { | |
| "epoch": 0.7084239130434783, | |
| "grad_norm": 1.2036430835723877, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1156, | |
| "step": 2607 | |
| }, | |
| { | |
| "epoch": 0.7086956521739131, | |
| "grad_norm": 1.2490811347961426, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0078, | |
| "step": 2608 | |
| }, | |
| { | |
| "epoch": 0.7089673913043478, | |
| "grad_norm": 1.0128928422927856, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8499, | |
| "step": 2609 | |
| }, | |
| { | |
| "epoch": 0.7092391304347826, | |
| "grad_norm": 1.363193154335022, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9375, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.7095108695652174, | |
| "grad_norm": 0.8343818187713623, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1519, | |
| "step": 2611 | |
| }, | |
| { | |
| "epoch": 0.7097826086956521, | |
| "grad_norm": 1.0047922134399414, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8608, | |
| "step": 2612 | |
| }, | |
| { | |
| "epoch": 0.710054347826087, | |
| "grad_norm": 1.18521249294281, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7399, | |
| "step": 2613 | |
| }, | |
| { | |
| "epoch": 0.7103260869565218, | |
| "grad_norm": 1.3591742515563965, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5885, | |
| "step": 2614 | |
| }, | |
| { | |
| "epoch": 0.7105978260869565, | |
| "grad_norm": 1.1936426162719727, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7192, | |
| "step": 2615 | |
| }, | |
| { | |
| "epoch": 0.7108695652173913, | |
| "grad_norm": 1.114531397819519, | |
| "learning_rate": 3e-05, | |
| "loss": 3.22, | |
| "step": 2616 | |
| }, | |
| { | |
| "epoch": 0.7111413043478261, | |
| "grad_norm": 1.1929512023925781, | |
| "learning_rate": 3e-05, | |
| "loss": 2.751, | |
| "step": 2617 | |
| }, | |
| { | |
| "epoch": 0.7114130434782608, | |
| "grad_norm": 1.3775954246520996, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6234, | |
| "step": 2618 | |
| }, | |
| { | |
| "epoch": 0.7116847826086956, | |
| "grad_norm": 1.313889980316162, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2312, | |
| "step": 2619 | |
| }, | |
| { | |
| "epoch": 0.7119565217391305, | |
| "grad_norm": 1.4679901599884033, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2006, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.7122282608695653, | |
| "grad_norm": 0.9835330843925476, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2365, | |
| "step": 2621 | |
| }, | |
| { | |
| "epoch": 0.7125, | |
| "grad_norm": 1.9934345483779907, | |
| "learning_rate": 3e-05, | |
| "loss": 4.4249, | |
| "step": 2622 | |
| }, | |
| { | |
| "epoch": 0.7127717391304348, | |
| "grad_norm": 1.2943999767303467, | |
| "learning_rate": 3e-05, | |
| "loss": 3.517, | |
| "step": 2623 | |
| }, | |
| { | |
| "epoch": 0.7130434782608696, | |
| "grad_norm": 1.1526103019714355, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6404, | |
| "step": 2624 | |
| }, | |
| { | |
| "epoch": 0.7133152173913043, | |
| "grad_norm": 1.3137702941894531, | |
| "learning_rate": 3e-05, | |
| "loss": 3.305, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 0.7135869565217391, | |
| "grad_norm": 1.2102550268173218, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0545, | |
| "step": 2626 | |
| }, | |
| { | |
| "epoch": 0.7138586956521739, | |
| "grad_norm": 1.0959724187850952, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0383, | |
| "step": 2627 | |
| }, | |
| { | |
| "epoch": 0.7141304347826087, | |
| "grad_norm": 1.7450772523880005, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5891, | |
| "step": 2628 | |
| }, | |
| { | |
| "epoch": 0.7144021739130435, | |
| "grad_norm": 1.3901242017745972, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5405, | |
| "step": 2629 | |
| }, | |
| { | |
| "epoch": 0.7146739130434783, | |
| "grad_norm": 1.2243647575378418, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4307, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.714945652173913, | |
| "grad_norm": 2.3953914642333984, | |
| "learning_rate": 3e-05, | |
| "loss": 4.3341, | |
| "step": 2631 | |
| }, | |
| { | |
| "epoch": 0.7152173913043478, | |
| "grad_norm": 1.8682715892791748, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0704, | |
| "step": 2632 | |
| }, | |
| { | |
| "epoch": 0.7154891304347826, | |
| "grad_norm": 1.4622515439987183, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3376, | |
| "step": 2633 | |
| }, | |
| { | |
| "epoch": 0.7157608695652173, | |
| "grad_norm": 1.2892646789550781, | |
| "learning_rate": 3e-05, | |
| "loss": 3.312, | |
| "step": 2634 | |
| }, | |
| { | |
| "epoch": 0.7160326086956522, | |
| "grad_norm": 1.4950429201126099, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3923, | |
| "step": 2635 | |
| }, | |
| { | |
| "epoch": 0.716304347826087, | |
| "grad_norm": 1.380149245262146, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5931, | |
| "step": 2636 | |
| }, | |
| { | |
| "epoch": 0.7165760869565218, | |
| "grad_norm": 1.433959722518921, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9042, | |
| "step": 2637 | |
| }, | |
| { | |
| "epoch": 0.7168478260869565, | |
| "grad_norm": 1.2683796882629395, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3012, | |
| "step": 2638 | |
| }, | |
| { | |
| "epoch": 0.7171195652173913, | |
| "grad_norm": 1.0387176275253296, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9117, | |
| "step": 2639 | |
| }, | |
| { | |
| "epoch": 0.717391304347826, | |
| "grad_norm": 1.5096675157546997, | |
| "learning_rate": 3e-05, | |
| "loss": 3.125, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.7176630434782608, | |
| "grad_norm": 1.1815725564956665, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9721, | |
| "step": 2641 | |
| }, | |
| { | |
| "epoch": 0.7179347826086957, | |
| "grad_norm": 1.1765364408493042, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3088, | |
| "step": 2642 | |
| }, | |
| { | |
| "epoch": 0.7182065217391305, | |
| "grad_norm": 1.087518572807312, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1549, | |
| "step": 2643 | |
| }, | |
| { | |
| "epoch": 0.7184782608695652, | |
| "grad_norm": 1.1884835958480835, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2079, | |
| "step": 2644 | |
| }, | |
| { | |
| "epoch": 0.71875, | |
| "grad_norm": 1.1198290586471558, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9726, | |
| "step": 2645 | |
| }, | |
| { | |
| "epoch": 0.7190217391304348, | |
| "grad_norm": 1.2286672592163086, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2783, | |
| "step": 2646 | |
| }, | |
| { | |
| "epoch": 0.7192934782608695, | |
| "grad_norm": 1.3148912191390991, | |
| "learning_rate": 3e-05, | |
| "loss": 3.914, | |
| "step": 2647 | |
| }, | |
| { | |
| "epoch": 0.7195652173913043, | |
| "grad_norm": 1.1227190494537354, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1269, | |
| "step": 2648 | |
| }, | |
| { | |
| "epoch": 0.7198369565217392, | |
| "grad_norm": 1.3971885442733765, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3385, | |
| "step": 2649 | |
| }, | |
| { | |
| "epoch": 0.720108695652174, | |
| "grad_norm": 1.0572166442871094, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8119, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.7203804347826087, | |
| "grad_norm": 1.3299329280853271, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5842, | |
| "step": 2651 | |
| }, | |
| { | |
| "epoch": 0.7206521739130435, | |
| "grad_norm": 1.2632654905319214, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8276, | |
| "step": 2652 | |
| }, | |
| { | |
| "epoch": 0.7209239130434782, | |
| "grad_norm": 1.2086924314498901, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3622, | |
| "step": 2653 | |
| }, | |
| { | |
| "epoch": 0.721195652173913, | |
| "grad_norm": 1.1867645978927612, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1258, | |
| "step": 2654 | |
| }, | |
| { | |
| "epoch": 0.7214673913043478, | |
| "grad_norm": 1.123406171798706, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3561, | |
| "step": 2655 | |
| }, | |
| { | |
| "epoch": 0.7217391304347827, | |
| "grad_norm": 1.3678390979766846, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4183, | |
| "step": 2656 | |
| }, | |
| { | |
| "epoch": 0.7220108695652174, | |
| "grad_norm": 1.21256685256958, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5207, | |
| "step": 2657 | |
| }, | |
| { | |
| "epoch": 0.7222826086956522, | |
| "grad_norm": 1.1133040189743042, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8218, | |
| "step": 2658 | |
| }, | |
| { | |
| "epoch": 0.722554347826087, | |
| "grad_norm": 1.0846513509750366, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9519, | |
| "step": 2659 | |
| }, | |
| { | |
| "epoch": 0.7228260869565217, | |
| "grad_norm": 1.0870620012283325, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8142, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.7230978260869565, | |
| "grad_norm": 1.2407505512237549, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0965, | |
| "step": 2661 | |
| }, | |
| { | |
| "epoch": 0.7233695652173913, | |
| "grad_norm": 1.0677931308746338, | |
| "learning_rate": 3e-05, | |
| "loss": 2.841, | |
| "step": 2662 | |
| }, | |
| { | |
| "epoch": 0.7236413043478261, | |
| "grad_norm": 1.1866375207901, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2199, | |
| "step": 2663 | |
| }, | |
| { | |
| "epoch": 0.7239130434782609, | |
| "grad_norm": 1.4401320219039917, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1391, | |
| "step": 2664 | |
| }, | |
| { | |
| "epoch": 0.7241847826086957, | |
| "grad_norm": 1.8525235652923584, | |
| "learning_rate": 3e-05, | |
| "loss": 2.954, | |
| "step": 2665 | |
| }, | |
| { | |
| "epoch": 0.7244565217391304, | |
| "grad_norm": 1.4307048320770264, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6121, | |
| "step": 2666 | |
| }, | |
| { | |
| "epoch": 0.7247282608695652, | |
| "grad_norm": 1.4132686853408813, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2657, | |
| "step": 2667 | |
| }, | |
| { | |
| "epoch": 0.725, | |
| "grad_norm": 1.1710587739944458, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5075, | |
| "step": 2668 | |
| }, | |
| { | |
| "epoch": 0.7252717391304347, | |
| "grad_norm": 1.4402532577514648, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5403, | |
| "step": 2669 | |
| }, | |
| { | |
| "epoch": 0.7255434782608695, | |
| "grad_norm": 1.0295389890670776, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2313, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.7258152173913044, | |
| "grad_norm": 1.9836853742599487, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7605, | |
| "step": 2671 | |
| }, | |
| { | |
| "epoch": 0.7260869565217392, | |
| "grad_norm": 1.313320279121399, | |
| "learning_rate": 3e-05, | |
| "loss": 2.892, | |
| "step": 2672 | |
| }, | |
| { | |
| "epoch": 0.7263586956521739, | |
| "grad_norm": 1.227516770362854, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4072, | |
| "step": 2673 | |
| }, | |
| { | |
| "epoch": 0.7266304347826087, | |
| "grad_norm": 1.343525767326355, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1093, | |
| "step": 2674 | |
| }, | |
| { | |
| "epoch": 0.7269021739130435, | |
| "grad_norm": 1.6472978591918945, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2588, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 0.7271739130434782, | |
| "grad_norm": 1.313114047050476, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0819, | |
| "step": 2676 | |
| }, | |
| { | |
| "epoch": 0.727445652173913, | |
| "grad_norm": 1.3546169996261597, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3884, | |
| "step": 2677 | |
| }, | |
| { | |
| "epoch": 0.7277173913043479, | |
| "grad_norm": 1.4308792352676392, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2228, | |
| "step": 2678 | |
| }, | |
| { | |
| "epoch": 0.7279891304347826, | |
| "grad_norm": 1.2303006649017334, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9464, | |
| "step": 2679 | |
| }, | |
| { | |
| "epoch": 0.7282608695652174, | |
| "grad_norm": 2.12964129447937, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1515, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.7285326086956522, | |
| "grad_norm": 1.1187695264816284, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2245, | |
| "step": 2681 | |
| }, | |
| { | |
| "epoch": 0.7288043478260869, | |
| "grad_norm": 1.440588355064392, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0875, | |
| "step": 2682 | |
| }, | |
| { | |
| "epoch": 0.7290760869565217, | |
| "grad_norm": 1.2928602695465088, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4652, | |
| "step": 2683 | |
| }, | |
| { | |
| "epoch": 0.7293478260869565, | |
| "grad_norm": 1.1873844861984253, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8352, | |
| "step": 2684 | |
| }, | |
| { | |
| "epoch": 0.7296195652173914, | |
| "grad_norm": 1.1879850625991821, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8956, | |
| "step": 2685 | |
| }, | |
| { | |
| "epoch": 0.7298913043478261, | |
| "grad_norm": 1.206374168395996, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3039, | |
| "step": 2686 | |
| }, | |
| { | |
| "epoch": 0.7301630434782609, | |
| "grad_norm": 1.1491773128509521, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0314, | |
| "step": 2687 | |
| }, | |
| { | |
| "epoch": 0.7304347826086957, | |
| "grad_norm": 1.3866368532180786, | |
| "learning_rate": 3e-05, | |
| "loss": 3.055, | |
| "step": 2688 | |
| }, | |
| { | |
| "epoch": 0.7307065217391304, | |
| "grad_norm": 1.5674668550491333, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1808, | |
| "step": 2689 | |
| }, | |
| { | |
| "epoch": 0.7309782608695652, | |
| "grad_norm": 1.562813639640808, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2706, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.73125, | |
| "grad_norm": 1.6883511543273926, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2192, | |
| "step": 2691 | |
| }, | |
| { | |
| "epoch": 0.7315217391304348, | |
| "grad_norm": 1.1254132986068726, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1529, | |
| "step": 2692 | |
| }, | |
| { | |
| "epoch": 0.7317934782608696, | |
| "grad_norm": 1.0081984996795654, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0864, | |
| "step": 2693 | |
| }, | |
| { | |
| "epoch": 0.7320652173913044, | |
| "grad_norm": 1.453481674194336, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2169, | |
| "step": 2694 | |
| }, | |
| { | |
| "epoch": 0.7323369565217391, | |
| "grad_norm": 1.3893227577209473, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3761, | |
| "step": 2695 | |
| }, | |
| { | |
| "epoch": 0.7326086956521739, | |
| "grad_norm": 1.7900173664093018, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4638, | |
| "step": 2696 | |
| }, | |
| { | |
| "epoch": 0.7328804347826087, | |
| "grad_norm": 1.1031737327575684, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6962, | |
| "step": 2697 | |
| }, | |
| { | |
| "epoch": 0.7331521739130434, | |
| "grad_norm": 1.3984167575836182, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3052, | |
| "step": 2698 | |
| }, | |
| { | |
| "epoch": 0.7334239130434783, | |
| "grad_norm": 1.6182535886764526, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4213, | |
| "step": 2699 | |
| }, | |
| { | |
| "epoch": 0.7336956521739131, | |
| "grad_norm": 1.7064732313156128, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0381, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.7339673913043478, | |
| "grad_norm": 1.357704520225525, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6925, | |
| "step": 2701 | |
| }, | |
| { | |
| "epoch": 0.7342391304347826, | |
| "grad_norm": 1.6365474462509155, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2891, | |
| "step": 2702 | |
| }, | |
| { | |
| "epoch": 0.7345108695652174, | |
| "grad_norm": 1.8574589490890503, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3637, | |
| "step": 2703 | |
| }, | |
| { | |
| "epoch": 0.7347826086956522, | |
| "grad_norm": 1.3467528820037842, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1417, | |
| "step": 2704 | |
| }, | |
| { | |
| "epoch": 0.7350543478260869, | |
| "grad_norm": 1.4658093452453613, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3401, | |
| "step": 2705 | |
| }, | |
| { | |
| "epoch": 0.7353260869565217, | |
| "grad_norm": 1.2771729230880737, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3731, | |
| "step": 2706 | |
| }, | |
| { | |
| "epoch": 0.7355978260869566, | |
| "grad_norm": 1.0597422122955322, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1315, | |
| "step": 2707 | |
| }, | |
| { | |
| "epoch": 0.7358695652173913, | |
| "grad_norm": 1.1597671508789062, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6791, | |
| "step": 2708 | |
| }, | |
| { | |
| "epoch": 0.7361413043478261, | |
| "grad_norm": 1.7157272100448608, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8816, | |
| "step": 2709 | |
| }, | |
| { | |
| "epoch": 0.7364130434782609, | |
| "grad_norm": 1.3123220205307007, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6255, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.7366847826086956, | |
| "grad_norm": 1.2993775606155396, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6012, | |
| "step": 2711 | |
| }, | |
| { | |
| "epoch": 0.7369565217391304, | |
| "grad_norm": 1.0929709672927856, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3147, | |
| "step": 2712 | |
| }, | |
| { | |
| "epoch": 0.7372282608695652, | |
| "grad_norm": 1.2103468179702759, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3148, | |
| "step": 2713 | |
| }, | |
| { | |
| "epoch": 0.7375, | |
| "grad_norm": 0.9965810179710388, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2745, | |
| "step": 2714 | |
| }, | |
| { | |
| "epoch": 0.7377717391304348, | |
| "grad_norm": 1.7804696559906006, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6753, | |
| "step": 2715 | |
| }, | |
| { | |
| "epoch": 0.7380434782608696, | |
| "grad_norm": 1.2880659103393555, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4252, | |
| "step": 2716 | |
| }, | |
| { | |
| "epoch": 0.7383152173913043, | |
| "grad_norm": 0.9781152606010437, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9325, | |
| "step": 2717 | |
| }, | |
| { | |
| "epoch": 0.7385869565217391, | |
| "grad_norm": 1.724921464920044, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6383, | |
| "step": 2718 | |
| }, | |
| { | |
| "epoch": 0.7388586956521739, | |
| "grad_norm": 1.3509818315505981, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2334, | |
| "step": 2719 | |
| }, | |
| { | |
| "epoch": 0.7391304347826086, | |
| "grad_norm": 1.2037723064422607, | |
| "learning_rate": 3e-05, | |
| "loss": 3.242, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.7394021739130435, | |
| "grad_norm": 1.337058424949646, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5805, | |
| "step": 2721 | |
| }, | |
| { | |
| "epoch": 0.7396739130434783, | |
| "grad_norm": 1.3748770952224731, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9626, | |
| "step": 2722 | |
| }, | |
| { | |
| "epoch": 0.7399456521739131, | |
| "grad_norm": 1.3539892435073853, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0298, | |
| "step": 2723 | |
| }, | |
| { | |
| "epoch": 0.7402173913043478, | |
| "grad_norm": 0.979083776473999, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7295, | |
| "step": 2724 | |
| }, | |
| { | |
| "epoch": 0.7404891304347826, | |
| "grad_norm": 1.7237200736999512, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5329, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 0.7407608695652174, | |
| "grad_norm": 1.4173531532287598, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5202, | |
| "step": 2726 | |
| }, | |
| { | |
| "epoch": 0.7410326086956521, | |
| "grad_norm": 1.3420840501785278, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3239, | |
| "step": 2727 | |
| }, | |
| { | |
| "epoch": 0.741304347826087, | |
| "grad_norm": 1.3799097537994385, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4807, | |
| "step": 2728 | |
| }, | |
| { | |
| "epoch": 0.7415760869565218, | |
| "grad_norm": 1.2215850353240967, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5144, | |
| "step": 2729 | |
| }, | |
| { | |
| "epoch": 0.7418478260869565, | |
| "grad_norm": 1.2649718523025513, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4898, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.7421195652173913, | |
| "grad_norm": 1.1176096200942993, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4977, | |
| "step": 2731 | |
| }, | |
| { | |
| "epoch": 0.7423913043478261, | |
| "grad_norm": 1.0005884170532227, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1848, | |
| "step": 2732 | |
| }, | |
| { | |
| "epoch": 0.7426630434782608, | |
| "grad_norm": 1.6306564807891846, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8107, | |
| "step": 2733 | |
| }, | |
| { | |
| "epoch": 0.7429347826086956, | |
| "grad_norm": 1.6788973808288574, | |
| "learning_rate": 3e-05, | |
| "loss": 3.985, | |
| "step": 2734 | |
| }, | |
| { | |
| "epoch": 0.7432065217391305, | |
| "grad_norm": 1.16837739944458, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8648, | |
| "step": 2735 | |
| }, | |
| { | |
| "epoch": 0.7434782608695653, | |
| "grad_norm": 1.324316382408142, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5445, | |
| "step": 2736 | |
| }, | |
| { | |
| "epoch": 0.74375, | |
| "grad_norm": 1.2631101608276367, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2311, | |
| "step": 2737 | |
| }, | |
| { | |
| "epoch": 0.7440217391304348, | |
| "grad_norm": 1.030339241027832, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8981, | |
| "step": 2738 | |
| }, | |
| { | |
| "epoch": 0.7442934782608696, | |
| "grad_norm": 1.5260175466537476, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2335, | |
| "step": 2739 | |
| }, | |
| { | |
| "epoch": 0.7445652173913043, | |
| "grad_norm": 1.610879898071289, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3666, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.7448369565217391, | |
| "grad_norm": 1.290809988975525, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9172, | |
| "step": 2741 | |
| }, | |
| { | |
| "epoch": 0.7451086956521739, | |
| "grad_norm": 1.2643457651138306, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1168, | |
| "step": 2742 | |
| }, | |
| { | |
| "epoch": 0.7453804347826087, | |
| "grad_norm": 1.2268829345703125, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2188, | |
| "step": 2743 | |
| }, | |
| { | |
| "epoch": 0.7456521739130435, | |
| "grad_norm": 1.022884726524353, | |
| "learning_rate": 3e-05, | |
| "loss": 2.884, | |
| "step": 2744 | |
| }, | |
| { | |
| "epoch": 0.7459239130434783, | |
| "grad_norm": 1.9909472465515137, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1557, | |
| "step": 2745 | |
| }, | |
| { | |
| "epoch": 0.746195652173913, | |
| "grad_norm": 2.2413318157196045, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0246, | |
| "step": 2746 | |
| }, | |
| { | |
| "epoch": 0.7464673913043478, | |
| "grad_norm": 1.0311791896820068, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8403, | |
| "step": 2747 | |
| }, | |
| { | |
| "epoch": 0.7467391304347826, | |
| "grad_norm": 1.0483710765838623, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7621, | |
| "step": 2748 | |
| }, | |
| { | |
| "epoch": 0.7470108695652173, | |
| "grad_norm": 0.9712722897529602, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7713, | |
| "step": 2749 | |
| }, | |
| { | |
| "epoch": 0.7472826086956522, | |
| "grad_norm": 1.5642272233963013, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2916, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.747554347826087, | |
| "grad_norm": 1.0362199544906616, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0884, | |
| "step": 2751 | |
| }, | |
| { | |
| "epoch": 0.7478260869565218, | |
| "grad_norm": 1.2575013637542725, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3603, | |
| "step": 2752 | |
| }, | |
| { | |
| "epoch": 0.7480978260869565, | |
| "grad_norm": 1.2161589860916138, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3827, | |
| "step": 2753 | |
| }, | |
| { | |
| "epoch": 0.7483695652173913, | |
| "grad_norm": 1.0151880979537964, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9711, | |
| "step": 2754 | |
| }, | |
| { | |
| "epoch": 0.748641304347826, | |
| "grad_norm": 1.3773422241210938, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9046, | |
| "step": 2755 | |
| }, | |
| { | |
| "epoch": 0.7489130434782608, | |
| "grad_norm": 0.8108665943145752, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9052, | |
| "step": 2756 | |
| }, | |
| { | |
| "epoch": 0.7491847826086957, | |
| "grad_norm": 0.8243381381034851, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8174, | |
| "step": 2757 | |
| }, | |
| { | |
| "epoch": 0.7494565217391305, | |
| "grad_norm": 1.3421036005020142, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3869, | |
| "step": 2758 | |
| }, | |
| { | |
| "epoch": 0.7497282608695652, | |
| "grad_norm": 1.1268045902252197, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4369, | |
| "step": 2759 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.982552707195282, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0077, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.7502717391304348, | |
| "grad_norm": 1.548680305480957, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9879, | |
| "step": 2761 | |
| }, | |
| { | |
| "epoch": 0.7505434782608695, | |
| "grad_norm": 1.1556179523468018, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9654, | |
| "step": 2762 | |
| }, | |
| { | |
| "epoch": 0.7508152173913043, | |
| "grad_norm": 1.310678482055664, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6287, | |
| "step": 2763 | |
| }, | |
| { | |
| "epoch": 0.7510869565217392, | |
| "grad_norm": 1.4762641191482544, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6961, | |
| "step": 2764 | |
| }, | |
| { | |
| "epoch": 0.751358695652174, | |
| "grad_norm": 1.3265812397003174, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2219, | |
| "step": 2765 | |
| }, | |
| { | |
| "epoch": 0.7516304347826087, | |
| "grad_norm": 1.2346985340118408, | |
| "learning_rate": 3e-05, | |
| "loss": 3.259, | |
| "step": 2766 | |
| }, | |
| { | |
| "epoch": 0.7519021739130435, | |
| "grad_norm": 0.9740216135978699, | |
| "learning_rate": 3e-05, | |
| "loss": 2.854, | |
| "step": 2767 | |
| }, | |
| { | |
| "epoch": 0.7521739130434782, | |
| "grad_norm": 1.2616466283798218, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9113, | |
| "step": 2768 | |
| }, | |
| { | |
| "epoch": 0.752445652173913, | |
| "grad_norm": 1.4515007734298706, | |
| "learning_rate": 3e-05, | |
| "loss": 4.4309, | |
| "step": 2769 | |
| }, | |
| { | |
| "epoch": 0.7527173913043478, | |
| "grad_norm": 1.2112640142440796, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1847, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.7529891304347827, | |
| "grad_norm": 1.123841643333435, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3345, | |
| "step": 2771 | |
| }, | |
| { | |
| "epoch": 0.7532608695652174, | |
| "grad_norm": 1.1541368961334229, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8234, | |
| "step": 2772 | |
| }, | |
| { | |
| "epoch": 0.7535326086956522, | |
| "grad_norm": 1.0209699869155884, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1726, | |
| "step": 2773 | |
| }, | |
| { | |
| "epoch": 0.753804347826087, | |
| "grad_norm": 1.2185944318771362, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8311, | |
| "step": 2774 | |
| }, | |
| { | |
| "epoch": 0.7540760869565217, | |
| "grad_norm": 1.553025722503662, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2896, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 0.7543478260869565, | |
| "grad_norm": 1.5103728771209717, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0133, | |
| "step": 2776 | |
| }, | |
| { | |
| "epoch": 0.7546195652173913, | |
| "grad_norm": 1.0733397006988525, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0122, | |
| "step": 2777 | |
| }, | |
| { | |
| "epoch": 0.7548913043478261, | |
| "grad_norm": 1.4341579675674438, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5646, | |
| "step": 2778 | |
| }, | |
| { | |
| "epoch": 0.7551630434782609, | |
| "grad_norm": 1.6829897165298462, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6338, | |
| "step": 2779 | |
| }, | |
| { | |
| "epoch": 0.7554347826086957, | |
| "grad_norm": 1.0816172361373901, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1679, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.7557065217391304, | |
| "grad_norm": 1.3089653253555298, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6569, | |
| "step": 2781 | |
| }, | |
| { | |
| "epoch": 0.7559782608695652, | |
| "grad_norm": 1.329774022102356, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0589, | |
| "step": 2782 | |
| }, | |
| { | |
| "epoch": 0.75625, | |
| "grad_norm": 1.5232934951782227, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5649, | |
| "step": 2783 | |
| }, | |
| { | |
| "epoch": 0.7565217391304347, | |
| "grad_norm": 1.501591444015503, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7119, | |
| "step": 2784 | |
| }, | |
| { | |
| "epoch": 0.7567934782608695, | |
| "grad_norm": 0.89922696352005, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9755, | |
| "step": 2785 | |
| }, | |
| { | |
| "epoch": 0.7570652173913044, | |
| "grad_norm": 1.0538735389709473, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4823, | |
| "step": 2786 | |
| }, | |
| { | |
| "epoch": 0.7573369565217392, | |
| "grad_norm": 0.8761924505233765, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1571, | |
| "step": 2787 | |
| }, | |
| { | |
| "epoch": 0.7576086956521739, | |
| "grad_norm": 1.2133067846298218, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3668, | |
| "step": 2788 | |
| }, | |
| { | |
| "epoch": 0.7578804347826087, | |
| "grad_norm": 1.0436729192733765, | |
| "learning_rate": 3e-05, | |
| "loss": 3.338, | |
| "step": 2789 | |
| }, | |
| { | |
| "epoch": 0.7581521739130435, | |
| "grad_norm": 1.0040472745895386, | |
| "learning_rate": 3e-05, | |
| "loss": 3.314, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.7584239130434782, | |
| "grad_norm": 1.019578456878662, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3716, | |
| "step": 2791 | |
| }, | |
| { | |
| "epoch": 0.758695652173913, | |
| "grad_norm": 1.209367036819458, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7626, | |
| "step": 2792 | |
| }, | |
| { | |
| "epoch": 0.7589673913043479, | |
| "grad_norm": 1.0667492151260376, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4984, | |
| "step": 2793 | |
| }, | |
| { | |
| "epoch": 0.7592391304347826, | |
| "grad_norm": 1.200366497039795, | |
| "learning_rate": 3e-05, | |
| "loss": 3.228, | |
| "step": 2794 | |
| }, | |
| { | |
| "epoch": 0.7595108695652174, | |
| "grad_norm": 1.2726914882659912, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9393, | |
| "step": 2795 | |
| }, | |
| { | |
| "epoch": 0.7597826086956522, | |
| "grad_norm": 1.244170904159546, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9552, | |
| "step": 2796 | |
| }, | |
| { | |
| "epoch": 0.7600543478260869, | |
| "grad_norm": 1.113420844078064, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1033, | |
| "step": 2797 | |
| }, | |
| { | |
| "epoch": 0.7603260869565217, | |
| "grad_norm": 1.9228954315185547, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7877, | |
| "step": 2798 | |
| }, | |
| { | |
| "epoch": 0.7605978260869565, | |
| "grad_norm": 1.1893031597137451, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0852, | |
| "step": 2799 | |
| }, | |
| { | |
| "epoch": 0.7608695652173914, | |
| "grad_norm": 1.524493932723999, | |
| "learning_rate": 3e-05, | |
| "loss": 4.1925, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.7611413043478261, | |
| "grad_norm": 1.5600111484527588, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4646, | |
| "step": 2801 | |
| }, | |
| { | |
| "epoch": 0.7614130434782609, | |
| "grad_norm": 0.9608951210975647, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8357, | |
| "step": 2802 | |
| }, | |
| { | |
| "epoch": 0.7616847826086957, | |
| "grad_norm": 1.2598545551300049, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2298, | |
| "step": 2803 | |
| }, | |
| { | |
| "epoch": 0.7619565217391304, | |
| "grad_norm": 1.4729243516921997, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2004, | |
| "step": 2804 | |
| }, | |
| { | |
| "epoch": 0.7622282608695652, | |
| "grad_norm": 1.04935622215271, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9192, | |
| "step": 2805 | |
| }, | |
| { | |
| "epoch": 0.7625, | |
| "grad_norm": 1.1317492723464966, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5182, | |
| "step": 2806 | |
| }, | |
| { | |
| "epoch": 0.7627717391304348, | |
| "grad_norm": 0.8651010394096375, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7038, | |
| "step": 2807 | |
| }, | |
| { | |
| "epoch": 0.7630434782608696, | |
| "grad_norm": 1.4115127325057983, | |
| "learning_rate": 3e-05, | |
| "loss": 3.225, | |
| "step": 2808 | |
| }, | |
| { | |
| "epoch": 0.7633152173913044, | |
| "grad_norm": 1.0244847536087036, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1254, | |
| "step": 2809 | |
| }, | |
| { | |
| "epoch": 0.7635869565217391, | |
| "grad_norm": 1.4117242097854614, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4427, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.7638586956521739, | |
| "grad_norm": 1.3557825088500977, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2669, | |
| "step": 2811 | |
| }, | |
| { | |
| "epoch": 0.7641304347826087, | |
| "grad_norm": 1.2034603357315063, | |
| "learning_rate": 3e-05, | |
| "loss": 3.77, | |
| "step": 2812 | |
| }, | |
| { | |
| "epoch": 0.7644021739130434, | |
| "grad_norm": 1.2670207023620605, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1149, | |
| "step": 2813 | |
| }, | |
| { | |
| "epoch": 0.7646739130434783, | |
| "grad_norm": 0.9181430339813232, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0427, | |
| "step": 2814 | |
| }, | |
| { | |
| "epoch": 0.7649456521739131, | |
| "grad_norm": 1.2132922410964966, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4703, | |
| "step": 2815 | |
| }, | |
| { | |
| "epoch": 0.7652173913043478, | |
| "grad_norm": 0.9925717115402222, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1396, | |
| "step": 2816 | |
| }, | |
| { | |
| "epoch": 0.7654891304347826, | |
| "grad_norm": 1.0748741626739502, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8827, | |
| "step": 2817 | |
| }, | |
| { | |
| "epoch": 0.7657608695652174, | |
| "grad_norm": 1.0484018325805664, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1837, | |
| "step": 2818 | |
| }, | |
| { | |
| "epoch": 0.7660326086956522, | |
| "grad_norm": 1.1004911661148071, | |
| "learning_rate": 3e-05, | |
| "loss": 3.135, | |
| "step": 2819 | |
| }, | |
| { | |
| "epoch": 0.7663043478260869, | |
| "grad_norm": 1.3505610227584839, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3415, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.7665760869565217, | |
| "grad_norm": 1.1511911153793335, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3407, | |
| "step": 2821 | |
| }, | |
| { | |
| "epoch": 0.7668478260869566, | |
| "grad_norm": 1.2488864660263062, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0697, | |
| "step": 2822 | |
| }, | |
| { | |
| "epoch": 0.7671195652173913, | |
| "grad_norm": 0.970241129398346, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6801, | |
| "step": 2823 | |
| }, | |
| { | |
| "epoch": 0.7673913043478261, | |
| "grad_norm": 1.3036609888076782, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7613, | |
| "step": 2824 | |
| }, | |
| { | |
| "epoch": 0.7676630434782609, | |
| "grad_norm": 1.1077839136123657, | |
| "learning_rate": 3e-05, | |
| "loss": 3.829, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 0.7679347826086956, | |
| "grad_norm": 1.1794723272323608, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1147, | |
| "step": 2826 | |
| }, | |
| { | |
| "epoch": 0.7682065217391304, | |
| "grad_norm": 1.460880994796753, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7514, | |
| "step": 2827 | |
| }, | |
| { | |
| "epoch": 0.7684782608695652, | |
| "grad_norm": 1.2111482620239258, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5524, | |
| "step": 2828 | |
| }, | |
| { | |
| "epoch": 0.76875, | |
| "grad_norm": 1.426535725593567, | |
| "learning_rate": 3e-05, | |
| "loss": 4.0174, | |
| "step": 2829 | |
| }, | |
| { | |
| "epoch": 0.7690217391304348, | |
| "grad_norm": 1.4052951335906982, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3475, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.7692934782608696, | |
| "grad_norm": 0.9852263331413269, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9218, | |
| "step": 2831 | |
| }, | |
| { | |
| "epoch": 0.7695652173913043, | |
| "grad_norm": 1.2342568635940552, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1937, | |
| "step": 2832 | |
| }, | |
| { | |
| "epoch": 0.7698369565217391, | |
| "grad_norm": 2.569143772125244, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8627, | |
| "step": 2833 | |
| }, | |
| { | |
| "epoch": 0.7701086956521739, | |
| "grad_norm": 1.07598876953125, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8528, | |
| "step": 2834 | |
| }, | |
| { | |
| "epoch": 0.7703804347826086, | |
| "grad_norm": 1.0930018424987793, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6242, | |
| "step": 2835 | |
| }, | |
| { | |
| "epoch": 0.7706521739130435, | |
| "grad_norm": 1.571661353111267, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2027, | |
| "step": 2836 | |
| }, | |
| { | |
| "epoch": 0.7709239130434783, | |
| "grad_norm": 1.623907446861267, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9722, | |
| "step": 2837 | |
| }, | |
| { | |
| "epoch": 0.7711956521739131, | |
| "grad_norm": 1.4214915037155151, | |
| "learning_rate": 3e-05, | |
| "loss": 3.483, | |
| "step": 2838 | |
| }, | |
| { | |
| "epoch": 0.7714673913043478, | |
| "grad_norm": 1.1177326440811157, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2388, | |
| "step": 2839 | |
| }, | |
| { | |
| "epoch": 0.7717391304347826, | |
| "grad_norm": 1.2823110818862915, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2644, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.7720108695652174, | |
| "grad_norm": 1.169673204421997, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8629, | |
| "step": 2841 | |
| }, | |
| { | |
| "epoch": 0.7722826086956521, | |
| "grad_norm": 1.5543997287750244, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6887, | |
| "step": 2842 | |
| }, | |
| { | |
| "epoch": 0.772554347826087, | |
| "grad_norm": 0.8735353946685791, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7277, | |
| "step": 2843 | |
| }, | |
| { | |
| "epoch": 0.7728260869565218, | |
| "grad_norm": 0.8694882988929749, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9491, | |
| "step": 2844 | |
| }, | |
| { | |
| "epoch": 0.7730978260869565, | |
| "grad_norm": 1.2447638511657715, | |
| "learning_rate": 3e-05, | |
| "loss": 3.473, | |
| "step": 2845 | |
| }, | |
| { | |
| "epoch": 0.7733695652173913, | |
| "grad_norm": 0.869049072265625, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0047, | |
| "step": 2846 | |
| }, | |
| { | |
| "epoch": 0.7736413043478261, | |
| "grad_norm": 1.1336791515350342, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5196, | |
| "step": 2847 | |
| }, | |
| { | |
| "epoch": 0.7739130434782608, | |
| "grad_norm": 1.0776828527450562, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2469, | |
| "step": 2848 | |
| }, | |
| { | |
| "epoch": 0.7741847826086956, | |
| "grad_norm": 1.1838096380233765, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5876, | |
| "step": 2849 | |
| }, | |
| { | |
| "epoch": 0.7744565217391305, | |
| "grad_norm": 0.9228270649909973, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0038, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.7747282608695653, | |
| "grad_norm": 0.9622864127159119, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7338, | |
| "step": 2851 | |
| }, | |
| { | |
| "epoch": 0.775, | |
| "grad_norm": 1.3739656209945679, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3167, | |
| "step": 2852 | |
| }, | |
| { | |
| "epoch": 0.7752717391304348, | |
| "grad_norm": 1.2962409257888794, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9095, | |
| "step": 2853 | |
| }, | |
| { | |
| "epoch": 0.7755434782608696, | |
| "grad_norm": 1.3018954992294312, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4756, | |
| "step": 2854 | |
| }, | |
| { | |
| "epoch": 0.7758152173913043, | |
| "grad_norm": 1.2301543951034546, | |
| "learning_rate": 3e-05, | |
| "loss": 3.658, | |
| "step": 2855 | |
| }, | |
| { | |
| "epoch": 0.7760869565217391, | |
| "grad_norm": 0.9971374869346619, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8921, | |
| "step": 2856 | |
| }, | |
| { | |
| "epoch": 0.7763586956521739, | |
| "grad_norm": 1.0504611730575562, | |
| "learning_rate": 3e-05, | |
| "loss": 2.907, | |
| "step": 2857 | |
| }, | |
| { | |
| "epoch": 0.7766304347826087, | |
| "grad_norm": 0.9376839399337769, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9993, | |
| "step": 2858 | |
| }, | |
| { | |
| "epoch": 0.7769021739130435, | |
| "grad_norm": 1.1995012760162354, | |
| "learning_rate": 3e-05, | |
| "loss": 3.505, | |
| "step": 2859 | |
| }, | |
| { | |
| "epoch": 0.7771739130434783, | |
| "grad_norm": 1.2391998767852783, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5183, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.777445652173913, | |
| "grad_norm": 1.0271795988082886, | |
| "learning_rate": 3e-05, | |
| "loss": 2.764, | |
| "step": 2861 | |
| }, | |
| { | |
| "epoch": 0.7777173913043478, | |
| "grad_norm": 1.1367555856704712, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4225, | |
| "step": 2862 | |
| }, | |
| { | |
| "epoch": 0.7779891304347826, | |
| "grad_norm": 1.258692979812622, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9128, | |
| "step": 2863 | |
| }, | |
| { | |
| "epoch": 0.7782608695652173, | |
| "grad_norm": 1.050280213356018, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1644, | |
| "step": 2864 | |
| }, | |
| { | |
| "epoch": 0.7785326086956522, | |
| "grad_norm": 1.4861702919006348, | |
| "learning_rate": 3e-05, | |
| "loss": 3.651, | |
| "step": 2865 | |
| }, | |
| { | |
| "epoch": 0.778804347826087, | |
| "grad_norm": 1.9443000555038452, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1064, | |
| "step": 2866 | |
| }, | |
| { | |
| "epoch": 0.7790760869565218, | |
| "grad_norm": 1.2355011701583862, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2686, | |
| "step": 2867 | |
| }, | |
| { | |
| "epoch": 0.7793478260869565, | |
| "grad_norm": 1.06037175655365, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4073, | |
| "step": 2868 | |
| }, | |
| { | |
| "epoch": 0.7796195652173913, | |
| "grad_norm": 1.3563976287841797, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9419, | |
| "step": 2869 | |
| }, | |
| { | |
| "epoch": 0.779891304347826, | |
| "grad_norm": 1.411892294883728, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4305, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.7801630434782608, | |
| "grad_norm": 2.1324245929718018, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5014, | |
| "step": 2871 | |
| }, | |
| { | |
| "epoch": 0.7804347826086957, | |
| "grad_norm": 1.3413655757904053, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4645, | |
| "step": 2872 | |
| }, | |
| { | |
| "epoch": 0.7807065217391305, | |
| "grad_norm": 1.2617512941360474, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1992, | |
| "step": 2873 | |
| }, | |
| { | |
| "epoch": 0.7809782608695652, | |
| "grad_norm": 1.1358786821365356, | |
| "learning_rate": 3e-05, | |
| "loss": 3.223, | |
| "step": 2874 | |
| }, | |
| { | |
| "epoch": 0.78125, | |
| "grad_norm": 1.284077525138855, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2483, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 0.7815217391304348, | |
| "grad_norm": 2.2102537155151367, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3587, | |
| "step": 2876 | |
| }, | |
| { | |
| "epoch": 0.7817934782608695, | |
| "grad_norm": 1.2199740409851074, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0096, | |
| "step": 2877 | |
| }, | |
| { | |
| "epoch": 0.7820652173913043, | |
| "grad_norm": 1.0134555101394653, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0385, | |
| "step": 2878 | |
| }, | |
| { | |
| "epoch": 0.7823369565217392, | |
| "grad_norm": 0.9114274978637695, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7133, | |
| "step": 2879 | |
| }, | |
| { | |
| "epoch": 0.782608695652174, | |
| "grad_norm": 1.0371575355529785, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1674, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.7828804347826087, | |
| "grad_norm": 1.2628506422042847, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5175, | |
| "step": 2881 | |
| }, | |
| { | |
| "epoch": 0.7831521739130435, | |
| "grad_norm": 1.2910369634628296, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1921, | |
| "step": 2882 | |
| }, | |
| { | |
| "epoch": 0.7834239130434782, | |
| "grad_norm": 1.460605263710022, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2101, | |
| "step": 2883 | |
| }, | |
| { | |
| "epoch": 0.783695652173913, | |
| "grad_norm": 1.4789795875549316, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5643, | |
| "step": 2884 | |
| }, | |
| { | |
| "epoch": 0.7839673913043478, | |
| "grad_norm": 1.3687986135482788, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5187, | |
| "step": 2885 | |
| }, | |
| { | |
| "epoch": 0.7842391304347827, | |
| "grad_norm": 1.4191471338272095, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8682, | |
| "step": 2886 | |
| }, | |
| { | |
| "epoch": 0.7845108695652174, | |
| "grad_norm": 1.3148168325424194, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0195, | |
| "step": 2887 | |
| }, | |
| { | |
| "epoch": 0.7847826086956522, | |
| "grad_norm": 1.15743088722229, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9205, | |
| "step": 2888 | |
| }, | |
| { | |
| "epoch": 0.785054347826087, | |
| "grad_norm": 1.167377233505249, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3717, | |
| "step": 2889 | |
| }, | |
| { | |
| "epoch": 0.7853260869565217, | |
| "grad_norm": 1.369253158569336, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3943, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.7855978260869565, | |
| "grad_norm": 1.3081823587417603, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1458, | |
| "step": 2891 | |
| }, | |
| { | |
| "epoch": 0.7858695652173913, | |
| "grad_norm": 1.0637850761413574, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5853, | |
| "step": 2892 | |
| }, | |
| { | |
| "epoch": 0.7861413043478261, | |
| "grad_norm": 1.325531005859375, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6246, | |
| "step": 2893 | |
| }, | |
| { | |
| "epoch": 0.7864130434782609, | |
| "grad_norm": 1.2909876108169556, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7209, | |
| "step": 2894 | |
| }, | |
| { | |
| "epoch": 0.7866847826086957, | |
| "grad_norm": 1.072770357131958, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7364, | |
| "step": 2895 | |
| }, | |
| { | |
| "epoch": 0.7869565217391304, | |
| "grad_norm": 1.2623767852783203, | |
| "learning_rate": 3e-05, | |
| "loss": 3.193, | |
| "step": 2896 | |
| }, | |
| { | |
| "epoch": 0.7872282608695652, | |
| "grad_norm": 1.262104868888855, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4054, | |
| "step": 2897 | |
| }, | |
| { | |
| "epoch": 0.7875, | |
| "grad_norm": 0.9568426012992859, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0418, | |
| "step": 2898 | |
| }, | |
| { | |
| "epoch": 0.7877717391304347, | |
| "grad_norm": 1.0304343700408936, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1849, | |
| "step": 2899 | |
| }, | |
| { | |
| "epoch": 0.7880434782608695, | |
| "grad_norm": 1.1511303186416626, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3258, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.7883152173913044, | |
| "grad_norm": 0.9511053562164307, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9477, | |
| "step": 2901 | |
| }, | |
| { | |
| "epoch": 0.7885869565217392, | |
| "grad_norm": 1.1826517581939697, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9749, | |
| "step": 2902 | |
| }, | |
| { | |
| "epoch": 0.7888586956521739, | |
| "grad_norm": 0.9371961355209351, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0142, | |
| "step": 2903 | |
| }, | |
| { | |
| "epoch": 0.7891304347826087, | |
| "grad_norm": 1.3846261501312256, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3297, | |
| "step": 2904 | |
| }, | |
| { | |
| "epoch": 0.7894021739130435, | |
| "grad_norm": 1.09294593334198, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1767, | |
| "step": 2905 | |
| }, | |
| { | |
| "epoch": 0.7896739130434782, | |
| "grad_norm": 1.326731562614441, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8467, | |
| "step": 2906 | |
| }, | |
| { | |
| "epoch": 0.789945652173913, | |
| "grad_norm": 1.1063183546066284, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0861, | |
| "step": 2907 | |
| }, | |
| { | |
| "epoch": 0.7902173913043479, | |
| "grad_norm": 1.174807071685791, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0249, | |
| "step": 2908 | |
| }, | |
| { | |
| "epoch": 0.7904891304347826, | |
| "grad_norm": 0.9046750068664551, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0051, | |
| "step": 2909 | |
| }, | |
| { | |
| "epoch": 0.7907608695652174, | |
| "grad_norm": 2.0610415935516357, | |
| "learning_rate": 3e-05, | |
| "loss": 3.9009, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.7910326086956522, | |
| "grad_norm": 1.4573287963867188, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1832, | |
| "step": 2911 | |
| }, | |
| { | |
| "epoch": 0.7913043478260869, | |
| "grad_norm": 1.1538214683532715, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7802, | |
| "step": 2912 | |
| }, | |
| { | |
| "epoch": 0.7915760869565217, | |
| "grad_norm": 1.0094918012619019, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0695, | |
| "step": 2913 | |
| }, | |
| { | |
| "epoch": 0.7918478260869565, | |
| "grad_norm": 1.1341625452041626, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4901, | |
| "step": 2914 | |
| }, | |
| { | |
| "epoch": 0.7921195652173914, | |
| "grad_norm": 1.0618107318878174, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1211, | |
| "step": 2915 | |
| }, | |
| { | |
| "epoch": 0.7923913043478261, | |
| "grad_norm": 1.4313887357711792, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8207, | |
| "step": 2916 | |
| }, | |
| { | |
| "epoch": 0.7926630434782609, | |
| "grad_norm": 1.4587217569351196, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5706, | |
| "step": 2917 | |
| }, | |
| { | |
| "epoch": 0.7929347826086957, | |
| "grad_norm": 1.4848532676696777, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6406, | |
| "step": 2918 | |
| }, | |
| { | |
| "epoch": 0.7932065217391304, | |
| "grad_norm": 1.006595253944397, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0846, | |
| "step": 2919 | |
| }, | |
| { | |
| "epoch": 0.7934782608695652, | |
| "grad_norm": 1.184501051902771, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3719, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.79375, | |
| "grad_norm": 1.3015398979187012, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9707, | |
| "step": 2921 | |
| }, | |
| { | |
| "epoch": 0.7940217391304348, | |
| "grad_norm": 1.001300573348999, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9032, | |
| "step": 2922 | |
| }, | |
| { | |
| "epoch": 0.7942934782608696, | |
| "grad_norm": 1.4572199583053589, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7695, | |
| "step": 2923 | |
| }, | |
| { | |
| "epoch": 0.7945652173913044, | |
| "grad_norm": 1.3405942916870117, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8765, | |
| "step": 2924 | |
| }, | |
| { | |
| "epoch": 0.7948369565217391, | |
| "grad_norm": 1.6628684997558594, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6848, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 0.7951086956521739, | |
| "grad_norm": 1.3848426342010498, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3296, | |
| "step": 2926 | |
| }, | |
| { | |
| "epoch": 0.7953804347826087, | |
| "grad_norm": 1.5421236753463745, | |
| "learning_rate": 3e-05, | |
| "loss": 3.363, | |
| "step": 2927 | |
| }, | |
| { | |
| "epoch": 0.7956521739130434, | |
| "grad_norm": 1.3111261129379272, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8346, | |
| "step": 2928 | |
| }, | |
| { | |
| "epoch": 0.7959239130434783, | |
| "grad_norm": 1.0846003293991089, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3071, | |
| "step": 2929 | |
| }, | |
| { | |
| "epoch": 0.7961956521739131, | |
| "grad_norm": 0.9776524305343628, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9533, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.7964673913043478, | |
| "grad_norm": 1.3840489387512207, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4739, | |
| "step": 2931 | |
| }, | |
| { | |
| "epoch": 0.7967391304347826, | |
| "grad_norm": 1.2059979438781738, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9474, | |
| "step": 2932 | |
| }, | |
| { | |
| "epoch": 0.7970108695652174, | |
| "grad_norm": 1.2110824584960938, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2581, | |
| "step": 2933 | |
| }, | |
| { | |
| "epoch": 0.7972826086956522, | |
| "grad_norm": 1.0525647401809692, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9519, | |
| "step": 2934 | |
| }, | |
| { | |
| "epoch": 0.7975543478260869, | |
| "grad_norm": 1.2633483409881592, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1686, | |
| "step": 2935 | |
| }, | |
| { | |
| "epoch": 0.7978260869565217, | |
| "grad_norm": 0.8925679922103882, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8761, | |
| "step": 2936 | |
| }, | |
| { | |
| "epoch": 0.7980978260869566, | |
| "grad_norm": 0.9802154898643494, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2334, | |
| "step": 2937 | |
| }, | |
| { | |
| "epoch": 0.7983695652173913, | |
| "grad_norm": 1.3679046630859375, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2836, | |
| "step": 2938 | |
| }, | |
| { | |
| "epoch": 0.7986413043478261, | |
| "grad_norm": 1.0334969758987427, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1355, | |
| "step": 2939 | |
| }, | |
| { | |
| "epoch": 0.7989130434782609, | |
| "grad_norm": 1.0484052896499634, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1652, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.7991847826086956, | |
| "grad_norm": 1.0501008033752441, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9963, | |
| "step": 2941 | |
| }, | |
| { | |
| "epoch": 0.7994565217391304, | |
| "grad_norm": 1.2090955972671509, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3553, | |
| "step": 2942 | |
| }, | |
| { | |
| "epoch": 0.7997282608695652, | |
| "grad_norm": 1.1819127798080444, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1212, | |
| "step": 2943 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.1051278114318848, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2308, | |
| "step": 2944 | |
| }, | |
| { | |
| "epoch": 0.8002717391304348, | |
| "grad_norm": 0.9563131928443909, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7561, | |
| "step": 2945 | |
| }, | |
| { | |
| "epoch": 0.8005434782608696, | |
| "grad_norm": 1.1029945611953735, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6245, | |
| "step": 2946 | |
| }, | |
| { | |
| "epoch": 0.8008152173913043, | |
| "grad_norm": 1.3487423658370972, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4707, | |
| "step": 2947 | |
| }, | |
| { | |
| "epoch": 0.8010869565217391, | |
| "grad_norm": 1.7481412887573242, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6706, | |
| "step": 2948 | |
| }, | |
| { | |
| "epoch": 0.8013586956521739, | |
| "grad_norm": 1.0554581880569458, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0406, | |
| "step": 2949 | |
| }, | |
| { | |
| "epoch": 0.8016304347826086, | |
| "grad_norm": 1.0953891277313232, | |
| "learning_rate": 3e-05, | |
| "loss": 2.7938, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.8019021739130435, | |
| "grad_norm": 1.3923141956329346, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0691, | |
| "step": 2951 | |
| }, | |
| { | |
| "epoch": 0.8021739130434783, | |
| "grad_norm": 1.1058765649795532, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9356, | |
| "step": 2952 | |
| }, | |
| { | |
| "epoch": 0.8024456521739131, | |
| "grad_norm": 1.1275757551193237, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8867, | |
| "step": 2953 | |
| }, | |
| { | |
| "epoch": 0.8027173913043478, | |
| "grad_norm": 1.2130517959594727, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1133, | |
| "step": 2954 | |
| }, | |
| { | |
| "epoch": 0.8029891304347826, | |
| "grad_norm": 1.6094809770584106, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8152, | |
| "step": 2955 | |
| }, | |
| { | |
| "epoch": 0.8032608695652174, | |
| "grad_norm": 0.9870564341545105, | |
| "learning_rate": 3e-05, | |
| "loss": 2.721, | |
| "step": 2956 | |
| }, | |
| { | |
| "epoch": 0.8035326086956521, | |
| "grad_norm": 1.2996047735214233, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6848, | |
| "step": 2957 | |
| }, | |
| { | |
| "epoch": 0.803804347826087, | |
| "grad_norm": 1.2500205039978027, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3352, | |
| "step": 2958 | |
| }, | |
| { | |
| "epoch": 0.8040760869565218, | |
| "grad_norm": 1.3211798667907715, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7284, | |
| "step": 2959 | |
| }, | |
| { | |
| "epoch": 0.8043478260869565, | |
| "grad_norm": 1.514905571937561, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2425, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.8046195652173913, | |
| "grad_norm": 1.2107200622558594, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4771, | |
| "step": 2961 | |
| }, | |
| { | |
| "epoch": 0.8048913043478261, | |
| "grad_norm": 1.707463264465332, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6518, | |
| "step": 2962 | |
| }, | |
| { | |
| "epoch": 0.8051630434782608, | |
| "grad_norm": 0.9443992376327515, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8471, | |
| "step": 2963 | |
| }, | |
| { | |
| "epoch": 0.8054347826086956, | |
| "grad_norm": 1.3588606119155884, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3993, | |
| "step": 2964 | |
| }, | |
| { | |
| "epoch": 0.8057065217391305, | |
| "grad_norm": 1.2722697257995605, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2316, | |
| "step": 2965 | |
| }, | |
| { | |
| "epoch": 0.8059782608695653, | |
| "grad_norm": 1.6636009216308594, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5648, | |
| "step": 2966 | |
| }, | |
| { | |
| "epoch": 0.80625, | |
| "grad_norm": 1.3926972150802612, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6403, | |
| "step": 2967 | |
| }, | |
| { | |
| "epoch": 0.8065217391304348, | |
| "grad_norm": 1.2489240169525146, | |
| "learning_rate": 3e-05, | |
| "loss": 3.6428, | |
| "step": 2968 | |
| }, | |
| { | |
| "epoch": 0.8067934782608696, | |
| "grad_norm": 1.3040508031845093, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3698, | |
| "step": 2969 | |
| }, | |
| { | |
| "epoch": 0.8070652173913043, | |
| "grad_norm": 1.024529218673706, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1765, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.8073369565217391, | |
| "grad_norm": 1.127133846282959, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4167, | |
| "step": 2971 | |
| }, | |
| { | |
| "epoch": 0.8076086956521739, | |
| "grad_norm": 1.5928646326065063, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2939, | |
| "step": 2972 | |
| }, | |
| { | |
| "epoch": 0.8078804347826087, | |
| "grad_norm": 1.099817156791687, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3166, | |
| "step": 2973 | |
| }, | |
| { | |
| "epoch": 0.8081521739130435, | |
| "grad_norm": 0.9949479699134827, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4637, | |
| "step": 2974 | |
| }, | |
| { | |
| "epoch": 0.8084239130434783, | |
| "grad_norm": 1.3012653589248657, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3314, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 0.808695652173913, | |
| "grad_norm": 1.1788009405136108, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9917, | |
| "step": 2976 | |
| }, | |
| { | |
| "epoch": 0.8089673913043478, | |
| "grad_norm": 1.5235824584960938, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0784, | |
| "step": 2977 | |
| }, | |
| { | |
| "epoch": 0.8092391304347826, | |
| "grad_norm": 1.4393073320388794, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8917, | |
| "step": 2978 | |
| }, | |
| { | |
| "epoch": 0.8095108695652173, | |
| "grad_norm": 1.432785987854004, | |
| "learning_rate": 3e-05, | |
| "loss": 3.8753, | |
| "step": 2979 | |
| }, | |
| { | |
| "epoch": 0.8097826086956522, | |
| "grad_norm": 1.4102272987365723, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0096, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.810054347826087, | |
| "grad_norm": 1.5349915027618408, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2429, | |
| "step": 2981 | |
| }, | |
| { | |
| "epoch": 0.8103260869565218, | |
| "grad_norm": 0.9741056561470032, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2216, | |
| "step": 2982 | |
| }, | |
| { | |
| "epoch": 0.8105978260869565, | |
| "grad_norm": 1.3699191808700562, | |
| "learning_rate": 3e-05, | |
| "loss": 2.8594, | |
| "step": 2983 | |
| }, | |
| { | |
| "epoch": 0.8108695652173913, | |
| "grad_norm": 1.0808976888656616, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1742, | |
| "step": 2984 | |
| }, | |
| { | |
| "epoch": 0.811141304347826, | |
| "grad_norm": 0.9032256007194519, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0112, | |
| "step": 2985 | |
| }, | |
| { | |
| "epoch": 0.8114130434782608, | |
| "grad_norm": 1.119470477104187, | |
| "learning_rate": 3e-05, | |
| "loss": 3.1507, | |
| "step": 2986 | |
| }, | |
| { | |
| "epoch": 0.8116847826086957, | |
| "grad_norm": 1.3185955286026, | |
| "learning_rate": 3e-05, | |
| "loss": 3.7403, | |
| "step": 2987 | |
| }, | |
| { | |
| "epoch": 0.8119565217391305, | |
| "grad_norm": 1.283615231513977, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2814, | |
| "step": 2988 | |
| }, | |
| { | |
| "epoch": 0.8122282608695652, | |
| "grad_norm": 1.2075817584991455, | |
| "learning_rate": 3e-05, | |
| "loss": 3.5796, | |
| "step": 2989 | |
| }, | |
| { | |
| "epoch": 0.8125, | |
| "grad_norm": 1.1944336891174316, | |
| "learning_rate": 3e-05, | |
| "loss": 3.3505, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.8127717391304348, | |
| "grad_norm": 1.1111395359039307, | |
| "learning_rate": 3e-05, | |
| "loss": 3.2605, | |
| "step": 2991 | |
| }, | |
| { | |
| "epoch": 0.8130434782608695, | |
| "grad_norm": 1.2699017524719238, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4753, | |
| "step": 2992 | |
| }, | |
| { | |
| "epoch": 0.8133152173913043, | |
| "grad_norm": 1.1877753734588623, | |
| "learning_rate": 3e-05, | |
| "loss": 3.0174, | |
| "step": 2993 | |
| }, | |
| { | |
| "epoch": 0.8135869565217392, | |
| "grad_norm": 1.198492407798767, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4792, | |
| "step": 2994 | |
| }, | |
| { | |
| "epoch": 0.813858695652174, | |
| "grad_norm": 1.2535799741744995, | |
| "learning_rate": 3e-05, | |
| "loss": 3.4246, | |
| "step": 2995 | |
| }, | |
| { | |
| "epoch": 0.8141304347826087, | |
| "grad_norm": 1.0552626848220825, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9214, | |
| "step": 2996 | |
| }, | |
| { | |
| "epoch": 0.8144021739130435, | |
| "grad_norm": 1.1369494199752808, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9704, | |
| "step": 2997 | |
| }, | |
| { | |
| "epoch": 0.8146739130434782, | |
| "grad_norm": 1.04477858543396, | |
| "learning_rate": 3e-05, | |
| "loss": 2.916, | |
| "step": 2998 | |
| }, | |
| { | |
| "epoch": 0.814945652173913, | |
| "grad_norm": 0.9620361328125, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6156, | |
| "step": 2999 | |
| }, | |
| { | |
| "epoch": 0.8152173913043478, | |
| "grad_norm": 1.0974937677383423, | |
| "learning_rate": 3e-05, | |
| "loss": 3.362, | |
| "step": 3000 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 3680, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.262770368118784e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |