|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.13671875, |
|
"eval_steps": 500, |
|
"global_step": 1400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 9.765625e-05, |
|
"grad_norm": 79.3869857788086, |
|
"learning_rate": 3.3333333333333337e-06, |
|
"loss": 7.7773, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0001953125, |
|
"grad_norm": 80.6998291015625, |
|
"learning_rate": 6.6666666666666675e-06, |
|
"loss": 7.7617, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00029296875, |
|
"grad_norm": 67.86173248291016, |
|
"learning_rate": 1e-05, |
|
"loss": 7.6523, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.000390625, |
|
"grad_norm": 31.295886993408203, |
|
"learning_rate": 1.3333333333333335e-05, |
|
"loss": 7.3242, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.00048828125, |
|
"grad_norm": 27.488962173461914, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 7.2773, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0005859375, |
|
"grad_norm": 16.267820358276367, |
|
"learning_rate": 2e-05, |
|
"loss": 7.1641, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.00068359375, |
|
"grad_norm": 37.23747253417969, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 7.2812, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.00078125, |
|
"grad_norm": 17.467384338378906, |
|
"learning_rate": 2.666666666666667e-05, |
|
"loss": 7.0508, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.00087890625, |
|
"grad_norm": 9.459285736083984, |
|
"learning_rate": 3e-05, |
|
"loss": 6.9375, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0009765625, |
|
"grad_norm": 8.56839656829834, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 6.8789, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.00107421875, |
|
"grad_norm": 24.074443817138672, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 6.8672, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.001171875, |
|
"grad_norm": 11.778696060180664, |
|
"learning_rate": 4e-05, |
|
"loss": 6.7969, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.00126953125, |
|
"grad_norm": 14.282492637634277, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 6.8047, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0013671875, |
|
"grad_norm": 8.801307678222656, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 6.7188, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.00146484375, |
|
"grad_norm": 8.514760971069336, |
|
"learning_rate": 5e-05, |
|
"loss": 6.7148, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0015625, |
|
"grad_norm": 9.48074722290039, |
|
"learning_rate": 5.333333333333334e-05, |
|
"loss": 6.6641, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.00166015625, |
|
"grad_norm": 7.557260036468506, |
|
"learning_rate": 5.6666666666666664e-05, |
|
"loss": 6.6172, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0017578125, |
|
"grad_norm": 6.613265037536621, |
|
"learning_rate": 6e-05, |
|
"loss": 6.5977, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.00185546875, |
|
"grad_norm": 13.777167320251465, |
|
"learning_rate": 6.333333333333335e-05, |
|
"loss": 6.6328, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.001953125, |
|
"grad_norm": 6.775121688842773, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 6.5391, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.00205078125, |
|
"grad_norm": 9.053375244140625, |
|
"learning_rate": 7.000000000000001e-05, |
|
"loss": 6.543, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0021484375, |
|
"grad_norm": 5.147669792175293, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 6.5039, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.00224609375, |
|
"grad_norm": 9.595170974731445, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 6.5, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.00234375, |
|
"grad_norm": 6.9720964431762695, |
|
"learning_rate": 8e-05, |
|
"loss": 6.4883, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.00244140625, |
|
"grad_norm": 9.805245399475098, |
|
"learning_rate": 8.333333333333333e-05, |
|
"loss": 6.4414, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0025390625, |
|
"grad_norm": 5.9478044509887695, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 6.4297, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.00263671875, |
|
"grad_norm": 8.962249755859375, |
|
"learning_rate": 8.999999999999999e-05, |
|
"loss": 6.4297, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.002734375, |
|
"grad_norm": 5.202462673187256, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 6.375, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.00283203125, |
|
"grad_norm": 5.034671306610107, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 6.3555, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0029296875, |
|
"grad_norm": 7.446832656860352, |
|
"learning_rate": 0.0001, |
|
"loss": 6.3477, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.00302734375, |
|
"grad_norm": 5.554169654846191, |
|
"learning_rate": 0.00010333333333333333, |
|
"loss": 6.3086, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.003125, |
|
"grad_norm": 5.340260028839111, |
|
"learning_rate": 0.00010666666666666668, |
|
"loss": 6.293, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.00322265625, |
|
"grad_norm": 6.529757976531982, |
|
"learning_rate": 0.00011, |
|
"loss": 6.2812, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0033203125, |
|
"grad_norm": 8.140447616577148, |
|
"learning_rate": 0.00011333333333333333, |
|
"loss": 6.2656, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.00341796875, |
|
"grad_norm": 5.15455961227417, |
|
"learning_rate": 0.00011666666666666667, |
|
"loss": 6.2266, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.003515625, |
|
"grad_norm": 7.219542980194092, |
|
"learning_rate": 0.00012, |
|
"loss": 6.2539, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.00361328125, |
|
"grad_norm": 5.522006988525391, |
|
"learning_rate": 0.00012333333333333334, |
|
"loss": 6.2305, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0037109375, |
|
"grad_norm": 4.108240127563477, |
|
"learning_rate": 0.0001266666666666667, |
|
"loss": 6.2109, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.00380859375, |
|
"grad_norm": 5.117391586303711, |
|
"learning_rate": 0.00013000000000000002, |
|
"loss": 6.1992, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.00390625, |
|
"grad_norm": 9.84037971496582, |
|
"learning_rate": 0.00013333333333333334, |
|
"loss": 6.2227, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.00400390625, |
|
"grad_norm": 5.794247150421143, |
|
"learning_rate": 0.00013666666666666666, |
|
"loss": 6.1953, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0041015625, |
|
"grad_norm": 5.9350810050964355, |
|
"learning_rate": 0.00014000000000000001, |
|
"loss": 6.1836, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.00419921875, |
|
"grad_norm": 6.219013214111328, |
|
"learning_rate": 0.00014333333333333334, |
|
"loss": 6.168, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.004296875, |
|
"grad_norm": 4.501729488372803, |
|
"learning_rate": 0.00014666666666666666, |
|
"loss": 6.1055, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.00439453125, |
|
"grad_norm": 6.2689948081970215, |
|
"learning_rate": 0.00015, |
|
"loss": 6.1328, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0044921875, |
|
"grad_norm": 4.54839563369751, |
|
"learning_rate": 0.00015333333333333334, |
|
"loss": 6.1055, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.00458984375, |
|
"grad_norm": 4.619740009307861, |
|
"learning_rate": 0.0001566666666666667, |
|
"loss": 6.082, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0046875, |
|
"grad_norm": 7.260074615478516, |
|
"learning_rate": 0.00016, |
|
"loss": 6.0938, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.00478515625, |
|
"grad_norm": 5.270298480987549, |
|
"learning_rate": 0.00016333333333333334, |
|
"loss": 6.0859, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.0048828125, |
|
"grad_norm": 6.634472846984863, |
|
"learning_rate": 0.00016666666666666666, |
|
"loss": 6.0625, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.00498046875, |
|
"grad_norm": 4.172934532165527, |
|
"learning_rate": 0.00017, |
|
"loss": 6.0469, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.005078125, |
|
"grad_norm": 5.719873905181885, |
|
"learning_rate": 0.00017333333333333334, |
|
"loss": 6.0273, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.00517578125, |
|
"grad_norm": 4.851602554321289, |
|
"learning_rate": 0.00017666666666666666, |
|
"loss": 6.0234, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0052734375, |
|
"grad_norm": 4.784290313720703, |
|
"learning_rate": 0.00017999999999999998, |
|
"loss": 5.9883, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.00537109375, |
|
"grad_norm": 5.738783359527588, |
|
"learning_rate": 0.00018333333333333334, |
|
"loss": 6.0078, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.00546875, |
|
"grad_norm": 4.525517463684082, |
|
"learning_rate": 0.0001866666666666667, |
|
"loss": 5.9844, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.00556640625, |
|
"grad_norm": 5.545010566711426, |
|
"learning_rate": 0.00019, |
|
"loss": 6.0078, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.0056640625, |
|
"grad_norm": 5.725442409515381, |
|
"learning_rate": 0.00019333333333333333, |
|
"loss": 5.9688, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.00576171875, |
|
"grad_norm": 4.413902282714844, |
|
"learning_rate": 0.00019666666666666666, |
|
"loss": 5.9414, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.005859375, |
|
"grad_norm": 4.734899520874023, |
|
"learning_rate": 0.0002, |
|
"loss": 5.9609, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.00595703125, |
|
"grad_norm": 4.493259906768799, |
|
"learning_rate": 0.00020333333333333333, |
|
"loss": 5.9648, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.0060546875, |
|
"grad_norm": 4.654330253601074, |
|
"learning_rate": 0.00020666666666666666, |
|
"loss": 5.957, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.00615234375, |
|
"grad_norm": 5.410141944885254, |
|
"learning_rate": 0.00021, |
|
"loss": 5.9766, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.00625, |
|
"grad_norm": 3.376593589782715, |
|
"learning_rate": 0.00021333333333333336, |
|
"loss": 5.9062, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.00634765625, |
|
"grad_norm": 5.334528923034668, |
|
"learning_rate": 0.00021666666666666668, |
|
"loss": 5.9141, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0064453125, |
|
"grad_norm": 4.844014644622803, |
|
"learning_rate": 0.00022, |
|
"loss": 5.9062, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.00654296875, |
|
"grad_norm": 4.3729939460754395, |
|
"learning_rate": 0.00022333333333333333, |
|
"loss": 5.9023, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.006640625, |
|
"grad_norm": 6.744809627532959, |
|
"learning_rate": 0.00022666666666666666, |
|
"loss": 5.9648, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.00673828125, |
|
"grad_norm": 4.010498046875, |
|
"learning_rate": 0.00023, |
|
"loss": 5.8945, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.0068359375, |
|
"grad_norm": 5.671877384185791, |
|
"learning_rate": 0.00023333333333333333, |
|
"loss": 5.9414, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.00693359375, |
|
"grad_norm": 3.9647581577301025, |
|
"learning_rate": 0.00023666666666666668, |
|
"loss": 5.8672, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.00703125, |
|
"grad_norm": 4.879064083099365, |
|
"learning_rate": 0.00024, |
|
"loss": 5.8672, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.00712890625, |
|
"grad_norm": 5.130377292633057, |
|
"learning_rate": 0.00024333333333333336, |
|
"loss": 5.8711, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.0072265625, |
|
"grad_norm": 5.8605122566223145, |
|
"learning_rate": 0.0002466666666666667, |
|
"loss": 5.875, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.00732421875, |
|
"grad_norm": 5.346503257751465, |
|
"learning_rate": 0.00025, |
|
"loss": 5.9023, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.007421875, |
|
"grad_norm": 5.273228645324707, |
|
"learning_rate": 0.0002533333333333334, |
|
"loss": 5.8867, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.00751953125, |
|
"grad_norm": 3.8530828952789307, |
|
"learning_rate": 0.00025666666666666665, |
|
"loss": 5.8477, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.0076171875, |
|
"grad_norm": 5.188773155212402, |
|
"learning_rate": 0.00026000000000000003, |
|
"loss": 5.8633, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.00771484375, |
|
"grad_norm": 3.4620485305786133, |
|
"learning_rate": 0.0002633333333333333, |
|
"loss": 5.8398, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.0078125, |
|
"grad_norm": 4.68696403503418, |
|
"learning_rate": 0.0002666666666666667, |
|
"loss": 5.8359, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.00791015625, |
|
"grad_norm": 4.243693828582764, |
|
"learning_rate": 0.00027, |
|
"loss": 5.8555, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.0080078125, |
|
"grad_norm": 5.6479363441467285, |
|
"learning_rate": 0.00027333333333333333, |
|
"loss": 5.832, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.00810546875, |
|
"grad_norm": 3.7417171001434326, |
|
"learning_rate": 0.00027666666666666665, |
|
"loss": 5.8281, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.008203125, |
|
"grad_norm": 4.857771873474121, |
|
"learning_rate": 0.00028000000000000003, |
|
"loss": 5.8086, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.00830078125, |
|
"grad_norm": 4.6594085693359375, |
|
"learning_rate": 0.00028333333333333335, |
|
"loss": 5.832, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.0083984375, |
|
"grad_norm": 4.48290491104126, |
|
"learning_rate": 0.0002866666666666667, |
|
"loss": 5.8164, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.00849609375, |
|
"grad_norm": 6.183064937591553, |
|
"learning_rate": 0.00029, |
|
"loss": 5.8398, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.00859375, |
|
"grad_norm": 4.494201183319092, |
|
"learning_rate": 0.0002933333333333333, |
|
"loss": 5.7969, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.00869140625, |
|
"grad_norm": 5.650720596313477, |
|
"learning_rate": 0.0002966666666666667, |
|
"loss": 5.8438, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.0087890625, |
|
"grad_norm": 5.558387756347656, |
|
"learning_rate": 0.0003, |
|
"loss": 5.8555, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.00888671875, |
|
"grad_norm": 5.16208553314209, |
|
"learning_rate": 0.00030333333333333335, |
|
"loss": 5.8086, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.008984375, |
|
"grad_norm": 6.598042964935303, |
|
"learning_rate": 0.0003066666666666667, |
|
"loss": 5.8516, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.00908203125, |
|
"grad_norm": 4.591591835021973, |
|
"learning_rate": 0.00031, |
|
"loss": 5.8555, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0091796875, |
|
"grad_norm": 4.879761695861816, |
|
"learning_rate": 0.0003133333333333334, |
|
"loss": 5.8203, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.00927734375, |
|
"grad_norm": 5.21987771987915, |
|
"learning_rate": 0.00031666666666666665, |
|
"loss": 5.8203, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.009375, |
|
"grad_norm": 4.257232666015625, |
|
"learning_rate": 0.00032, |
|
"loss": 5.8047, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.00947265625, |
|
"grad_norm": 6.07011604309082, |
|
"learning_rate": 0.0003233333333333333, |
|
"loss": 5.8359, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.0095703125, |
|
"grad_norm": 5.662813186645508, |
|
"learning_rate": 0.0003266666666666667, |
|
"loss": 5.8633, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.00966796875, |
|
"grad_norm": 4.59433650970459, |
|
"learning_rate": 0.00033, |
|
"loss": 5.8086, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.009765625, |
|
"grad_norm": 4.295780181884766, |
|
"learning_rate": 0.0003333333333333333, |
|
"loss": 5.8008, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.00986328125, |
|
"grad_norm": 4.587396144866943, |
|
"learning_rate": 0.0003366666666666667, |
|
"loss": 5.8008, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.0099609375, |
|
"grad_norm": 4.299502849578857, |
|
"learning_rate": 0.00034, |
|
"loss": 5.7773, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.01005859375, |
|
"grad_norm": 4.284260272979736, |
|
"learning_rate": 0.00034333333333333335, |
|
"loss": 5.793, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.01015625, |
|
"grad_norm": 6.048828601837158, |
|
"learning_rate": 0.00034666666666666667, |
|
"loss": 5.8008, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.01025390625, |
|
"grad_norm": 4.235161304473877, |
|
"learning_rate": 0.00035, |
|
"loss": 5.7656, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.0103515625, |
|
"grad_norm": 5.187899589538574, |
|
"learning_rate": 0.0003533333333333333, |
|
"loss": 5.8125, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.01044921875, |
|
"grad_norm": 4.133037567138672, |
|
"learning_rate": 0.0003566666666666667, |
|
"loss": 5.8047, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.010546875, |
|
"grad_norm": 5.822926044464111, |
|
"learning_rate": 0.00035999999999999997, |
|
"loss": 5.8281, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.01064453125, |
|
"grad_norm": 4.3357672691345215, |
|
"learning_rate": 0.00036333333333333335, |
|
"loss": 5.7734, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.0107421875, |
|
"grad_norm": 4.930606842041016, |
|
"learning_rate": 0.00036666666666666667, |
|
"loss": 5.7812, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.01083984375, |
|
"grad_norm": 4.797028064727783, |
|
"learning_rate": 0.00037, |
|
"loss": 5.7695, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.0109375, |
|
"grad_norm": 3.1973586082458496, |
|
"learning_rate": 0.0003733333333333334, |
|
"loss": 5.7539, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.01103515625, |
|
"grad_norm": 4.5399980545043945, |
|
"learning_rate": 0.00037666666666666664, |
|
"loss": 5.7852, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.0111328125, |
|
"grad_norm": 4.550619602203369, |
|
"learning_rate": 0.00038, |
|
"loss": 5.7773, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.01123046875, |
|
"grad_norm": 5.377904415130615, |
|
"learning_rate": 0.00038333333333333334, |
|
"loss": 5.7891, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.011328125, |
|
"grad_norm": 4.06483268737793, |
|
"learning_rate": 0.00038666666666666667, |
|
"loss": 5.6992, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.01142578125, |
|
"grad_norm": 3.8046791553497314, |
|
"learning_rate": 0.00039000000000000005, |
|
"loss": 5.7344, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.0115234375, |
|
"grad_norm": 4.709420204162598, |
|
"learning_rate": 0.0003933333333333333, |
|
"loss": 5.7461, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.01162109375, |
|
"grad_norm": 4.36158561706543, |
|
"learning_rate": 0.0003966666666666667, |
|
"loss": 5.75, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.01171875, |
|
"grad_norm": 4.972657680511475, |
|
"learning_rate": 0.0004, |
|
"loss": 5.7188, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.01181640625, |
|
"grad_norm": 3.862230062484741, |
|
"learning_rate": 0.00040333333333333334, |
|
"loss": 5.7617, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.0119140625, |
|
"grad_norm": 6.694273948669434, |
|
"learning_rate": 0.00040666666666666667, |
|
"loss": 5.7734, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.01201171875, |
|
"grad_norm": 4.289857864379883, |
|
"learning_rate": 0.00041, |
|
"loss": 5.7539, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.012109375, |
|
"grad_norm": 4.241764545440674, |
|
"learning_rate": 0.0004133333333333333, |
|
"loss": 5.7422, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.01220703125, |
|
"grad_norm": 5.350276947021484, |
|
"learning_rate": 0.0004166666666666667, |
|
"loss": 5.7578, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.0123046875, |
|
"grad_norm": 4.058553695678711, |
|
"learning_rate": 0.00042, |
|
"loss": 5.6992, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.01240234375, |
|
"grad_norm": 4.70885705947876, |
|
"learning_rate": 0.00042333333333333334, |
|
"loss": 5.7383, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.0125, |
|
"grad_norm": 4.190490245819092, |
|
"learning_rate": 0.0004266666666666667, |
|
"loss": 5.7305, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.01259765625, |
|
"grad_norm": 6.176610469818115, |
|
"learning_rate": 0.00043, |
|
"loss": 5.7344, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.0126953125, |
|
"grad_norm": 4.583580017089844, |
|
"learning_rate": 0.00043333333333333337, |
|
"loss": 5.7266, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.01279296875, |
|
"grad_norm": 3.8871593475341797, |
|
"learning_rate": 0.00043666666666666664, |
|
"loss": 5.7422, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.012890625, |
|
"grad_norm": 6.952763557434082, |
|
"learning_rate": 0.00044, |
|
"loss": 5.7812, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.01298828125, |
|
"grad_norm": 4.236309051513672, |
|
"learning_rate": 0.00044333333333333334, |
|
"loss": 5.75, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.0130859375, |
|
"grad_norm": 7.308345794677734, |
|
"learning_rate": 0.00044666666666666666, |
|
"loss": 5.8125, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.01318359375, |
|
"grad_norm": 8.599268913269043, |
|
"learning_rate": 0.00045000000000000004, |
|
"loss": 5.918, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.01328125, |
|
"grad_norm": 4.937841415405273, |
|
"learning_rate": 0.0004533333333333333, |
|
"loss": 5.8242, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.01337890625, |
|
"grad_norm": 4.781856536865234, |
|
"learning_rate": 0.0004566666666666667, |
|
"loss": 5.7891, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.0134765625, |
|
"grad_norm": 7.261330604553223, |
|
"learning_rate": 0.00046, |
|
"loss": 5.8203, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.01357421875, |
|
"grad_norm": 5.891605854034424, |
|
"learning_rate": 0.00046333333333333334, |
|
"loss": 5.875, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.013671875, |
|
"grad_norm": 7.775816440582275, |
|
"learning_rate": 0.00046666666666666666, |
|
"loss": 5.8672, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.01376953125, |
|
"grad_norm": 3.8871777057647705, |
|
"learning_rate": 0.00047, |
|
"loss": 5.8359, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.0138671875, |
|
"grad_norm": 5.772259712219238, |
|
"learning_rate": 0.00047333333333333336, |
|
"loss": 5.8477, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.01396484375, |
|
"grad_norm": 4.485783100128174, |
|
"learning_rate": 0.0004766666666666667, |
|
"loss": 5.793, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.0140625, |
|
"grad_norm": 4.418201446533203, |
|
"learning_rate": 0.00048, |
|
"loss": 5.7812, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.01416015625, |
|
"grad_norm": 7.3408074378967285, |
|
"learning_rate": 0.00048333333333333334, |
|
"loss": 5.8516, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.0142578125, |
|
"grad_norm": 5.989964962005615, |
|
"learning_rate": 0.0004866666666666667, |
|
"loss": 5.8945, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.01435546875, |
|
"grad_norm": 4.477835655212402, |
|
"learning_rate": 0.00049, |
|
"loss": 5.8477, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.014453125, |
|
"grad_norm": 6.493783950805664, |
|
"learning_rate": 0.0004933333333333334, |
|
"loss": 5.8594, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.01455078125, |
|
"grad_norm": 6.190314292907715, |
|
"learning_rate": 0.0004966666666666666, |
|
"loss": 5.8906, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.0146484375, |
|
"grad_norm": 6.708803653717041, |
|
"learning_rate": 0.0005, |
|
"loss": 5.8711, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.01474609375, |
|
"grad_norm": 5.7710113525390625, |
|
"learning_rate": 0.0004999999890938886, |
|
"loss": 5.8633, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.01484375, |
|
"grad_norm": 5.553577899932861, |
|
"learning_rate": 0.0004999999563755552, |
|
"loss": 5.8594, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.01494140625, |
|
"grad_norm": 4.852464199066162, |
|
"learning_rate": 0.0004999999018450032, |
|
"loss": 5.8555, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.0150390625, |
|
"grad_norm": 4.127274990081787, |
|
"learning_rate": 0.0004999998255022377, |
|
"loss": 5.793, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.01513671875, |
|
"grad_norm": 5.139339923858643, |
|
"learning_rate": 0.0004999997273472664, |
|
"loss": 5.8398, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.015234375, |
|
"grad_norm": 3.9165873527526855, |
|
"learning_rate": 0.0004999996073800985, |
|
"loss": 5.7852, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.01533203125, |
|
"grad_norm": 4.544178485870361, |
|
"learning_rate": 0.0004999994656007457, |
|
"loss": 5.8125, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.0154296875, |
|
"grad_norm": 4.998808860778809, |
|
"learning_rate": 0.0004999993020092219, |
|
"loss": 5.8359, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.01552734375, |
|
"grad_norm": 5.184920787811279, |
|
"learning_rate": 0.0004999991166055426, |
|
"loss": 5.8281, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.015625, |
|
"grad_norm": 3.716250419616699, |
|
"learning_rate": 0.0004999989093897262, |
|
"loss": 5.7969, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.01572265625, |
|
"grad_norm": 3.8317952156066895, |
|
"learning_rate": 0.0004999986803617926, |
|
"loss": 5.7617, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.0158203125, |
|
"grad_norm": 4.461795330047607, |
|
"learning_rate": 0.0004999984295217641, |
|
"loss": 5.8047, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.01591796875, |
|
"grad_norm": 3.0825703144073486, |
|
"learning_rate": 0.0004999981568696648, |
|
"loss": 5.7656, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.016015625, |
|
"grad_norm": 4.151459217071533, |
|
"learning_rate": 0.0004999978624055212, |
|
"loss": 5.7617, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.01611328125, |
|
"grad_norm": 3.66987681388855, |
|
"learning_rate": 0.0004999975461293621, |
|
"loss": 5.7461, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.0162109375, |
|
"grad_norm": 4.092290878295898, |
|
"learning_rate": 0.0004999972080412177, |
|
"loss": 5.75, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.01630859375, |
|
"grad_norm": 4.104146480560303, |
|
"learning_rate": 0.0004999968481411212, |
|
"loss": 5.7656, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.01640625, |
|
"grad_norm": 4.437919616699219, |
|
"learning_rate": 0.0004999964664291073, |
|
"loss": 5.7812, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.01650390625, |
|
"grad_norm": 7.200653553009033, |
|
"learning_rate": 0.0004999960629052131, |
|
"loss": 5.7578, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.0166015625, |
|
"grad_norm": 2.731268882751465, |
|
"learning_rate": 0.0004999956375694776, |
|
"loss": 5.707, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01669921875, |
|
"grad_norm": 5.332357406616211, |
|
"learning_rate": 0.0004999951904219421, |
|
"loss": 5.7461, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.016796875, |
|
"grad_norm": 3.130514144897461, |
|
"learning_rate": 0.0004999947214626501, |
|
"loss": 5.7109, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.01689453125, |
|
"grad_norm": 3.6535258293151855, |
|
"learning_rate": 0.0004999942306916466, |
|
"loss": 5.7344, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.0169921875, |
|
"grad_norm": 3.5438027381896973, |
|
"learning_rate": 0.0004999937181089796, |
|
"loss": 5.6953, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.01708984375, |
|
"grad_norm": 4.228607177734375, |
|
"learning_rate": 0.0004999931837146987, |
|
"loss": 5.7031, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.0171875, |
|
"grad_norm": 3.217113971710205, |
|
"learning_rate": 0.0004999926275088556, |
|
"loss": 5.7148, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.01728515625, |
|
"grad_norm": 4.9072041511535645, |
|
"learning_rate": 0.0004999920494915043, |
|
"loss": 5.75, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.0173828125, |
|
"grad_norm": 2.8025128841400146, |
|
"learning_rate": 0.0004999914496627009, |
|
"loss": 5.7148, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.01748046875, |
|
"grad_norm": 2.481431245803833, |
|
"learning_rate": 0.0004999908280225035, |
|
"loss": 5.6445, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.017578125, |
|
"grad_norm": 3.0694146156311035, |
|
"learning_rate": 0.0004999901845709722, |
|
"loss": 5.6602, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.01767578125, |
|
"grad_norm": 2.6255242824554443, |
|
"learning_rate": 0.0004999895193081698, |
|
"loss": 5.6523, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.0177734375, |
|
"grad_norm": 3.0787289142608643, |
|
"learning_rate": 0.0004999888322341602, |
|
"loss": 5.6367, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.01787109375, |
|
"grad_norm": 3.5675113201141357, |
|
"learning_rate": 0.0004999881233490104, |
|
"loss": 5.6875, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.01796875, |
|
"grad_norm": 3.278024673461914, |
|
"learning_rate": 0.0004999873926527891, |
|
"loss": 5.6406, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.01806640625, |
|
"grad_norm": 3.4470205307006836, |
|
"learning_rate": 0.0004999866401455671, |
|
"loss": 5.6328, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.0181640625, |
|
"grad_norm": 2.537705659866333, |
|
"learning_rate": 0.0004999858658274172, |
|
"loss": 5.625, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.01826171875, |
|
"grad_norm": 3.446849822998047, |
|
"learning_rate": 0.0004999850696984147, |
|
"loss": 5.6367, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.018359375, |
|
"grad_norm": 2.9557571411132812, |
|
"learning_rate": 0.0004999842517586367, |
|
"loss": 5.6523, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.01845703125, |
|
"grad_norm": 2.224954605102539, |
|
"learning_rate": 0.0004999834120081624, |
|
"loss": 5.6133, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.0185546875, |
|
"grad_norm": 2.847534418106079, |
|
"learning_rate": 0.0004999825504470732, |
|
"loss": 5.5977, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.01865234375, |
|
"grad_norm": 3.08176851272583, |
|
"learning_rate": 0.0004999816670754527, |
|
"loss": 5.6094, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.01875, |
|
"grad_norm": 2.509631872177124, |
|
"learning_rate": 0.0004999807618933866, |
|
"loss": 5.582, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.01884765625, |
|
"grad_norm": 2.551470994949341, |
|
"learning_rate": 0.0004999798349009626, |
|
"loss": 5.582, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.0189453125, |
|
"grad_norm": 2.8080625534057617, |
|
"learning_rate": 0.0004999788860982706, |
|
"loss": 5.5898, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.01904296875, |
|
"grad_norm": 2.784714698791504, |
|
"learning_rate": 0.0004999779154854024, |
|
"loss": 5.6016, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.019140625, |
|
"grad_norm": 2.6262359619140625, |
|
"learning_rate": 0.0004999769230624524, |
|
"loss": 5.5664, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.01923828125, |
|
"grad_norm": 2.2458934783935547, |
|
"learning_rate": 0.0004999759088295165, |
|
"loss": 5.5508, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.0193359375, |
|
"grad_norm": 2.5689525604248047, |
|
"learning_rate": 0.0004999748727866932, |
|
"loss": 5.5625, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.01943359375, |
|
"grad_norm": 2.44174861907959, |
|
"learning_rate": 0.0004999738149340828, |
|
"loss": 5.5352, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.01953125, |
|
"grad_norm": 2.5862767696380615, |
|
"learning_rate": 0.000499972735271788, |
|
"loss": 5.5234, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.01962890625, |
|
"grad_norm": 2.2569644451141357, |
|
"learning_rate": 0.0004999716337999135, |
|
"loss": 5.5078, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.0197265625, |
|
"grad_norm": 2.5576300621032715, |
|
"learning_rate": 0.0004999705105185659, |
|
"loss": 5.5391, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.01982421875, |
|
"grad_norm": 2.7671685218811035, |
|
"learning_rate": 0.0004999693654278542, |
|
"loss": 5.5234, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.019921875, |
|
"grad_norm": 1.9716945886611938, |
|
"learning_rate": 0.0004999681985278894, |
|
"loss": 5.4961, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.02001953125, |
|
"grad_norm": 2.345658302307129, |
|
"learning_rate": 0.0004999670098187846, |
|
"loss": 5.4961, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.0201171875, |
|
"grad_norm": 2.1516318321228027, |
|
"learning_rate": 0.0004999657993006551, |
|
"loss": 5.5078, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.02021484375, |
|
"grad_norm": 2.983196258544922, |
|
"learning_rate": 0.0004999645669736181, |
|
"loss": 5.5273, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.0203125, |
|
"grad_norm": 2.5061614513397217, |
|
"learning_rate": 0.0004999633128377932, |
|
"loss": 5.5195, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.02041015625, |
|
"grad_norm": 1.7015206813812256, |
|
"learning_rate": 0.000499962036893302, |
|
"loss": 5.4961, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.0205078125, |
|
"grad_norm": 2.2488410472869873, |
|
"learning_rate": 0.0004999607391402681, |
|
"loss": 5.4258, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.02060546875, |
|
"grad_norm": 2.7714498043060303, |
|
"learning_rate": 0.0004999594195788175, |
|
"loss": 5.5039, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.020703125, |
|
"grad_norm": 2.1639745235443115, |
|
"learning_rate": 0.0004999580782090778, |
|
"loss": 5.4922, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.02080078125, |
|
"grad_norm": 1.9490896463394165, |
|
"learning_rate": 0.0004999567150311793, |
|
"loss": 5.4492, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.0208984375, |
|
"grad_norm": 2.291276454925537, |
|
"learning_rate": 0.0004999553300452541, |
|
"loss": 5.4648, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.02099609375, |
|
"grad_norm": 2.4977705478668213, |
|
"learning_rate": 0.0004999539232514363, |
|
"loss": 5.4688, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.02109375, |
|
"grad_norm": 2.013157367706299, |
|
"learning_rate": 0.0004999524946498624, |
|
"loss": 5.4727, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.02119140625, |
|
"grad_norm": 2.35416579246521, |
|
"learning_rate": 0.000499951044240671, |
|
"loss": 5.457, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.0212890625, |
|
"grad_norm": 1.7172154188156128, |
|
"learning_rate": 0.0004999495720240027, |
|
"loss": 5.4531, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.02138671875, |
|
"grad_norm": 2.345547676086426, |
|
"learning_rate": 0.000499948078, |
|
"loss": 5.4609, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.021484375, |
|
"grad_norm": 2.2435221672058105, |
|
"learning_rate": 0.0004999465621688078, |
|
"loss": 5.4453, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.02158203125, |
|
"grad_norm": 2.306879997253418, |
|
"learning_rate": 0.0004999450245305732, |
|
"loss": 5.4414, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.0216796875, |
|
"grad_norm": 2.2427804470062256, |
|
"learning_rate": 0.0004999434650854452, |
|
"loss": 5.4531, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.02177734375, |
|
"grad_norm": 1.790202260017395, |
|
"learning_rate": 0.000499941883833575, |
|
"loss": 5.457, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.021875, |
|
"grad_norm": 1.838149070739746, |
|
"learning_rate": 0.0004999402807751157, |
|
"loss": 5.4297, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.02197265625, |
|
"grad_norm": 2.016653299331665, |
|
"learning_rate": 0.000499938655910223, |
|
"loss": 5.4141, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.0220703125, |
|
"grad_norm": 2.0161890983581543, |
|
"learning_rate": 0.0004999370092390541, |
|
"loss": 5.3906, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.02216796875, |
|
"grad_norm": 2.036649465560913, |
|
"learning_rate": 0.0004999353407617689, |
|
"loss": 5.4297, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.022265625, |
|
"grad_norm": 1.5296927690505981, |
|
"learning_rate": 0.000499933650478529, |
|
"loss": 5.4023, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.02236328125, |
|
"grad_norm": 1.713457703590393, |
|
"learning_rate": 0.0004999319383894985, |
|
"loss": 5.4219, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.0224609375, |
|
"grad_norm": 2.3026342391967773, |
|
"learning_rate": 0.000499930204494843, |
|
"loss": 5.4453, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.02255859375, |
|
"grad_norm": 2.2491424083709717, |
|
"learning_rate": 0.0004999284487947308, |
|
"loss": 5.4219, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.02265625, |
|
"grad_norm": 2.2838447093963623, |
|
"learning_rate": 0.0004999266712893322, |
|
"loss": 5.4336, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.02275390625, |
|
"grad_norm": 1.7964040040969849, |
|
"learning_rate": 0.0004999248719788193, |
|
"loss": 5.4141, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.0228515625, |
|
"grad_norm": 2.0387890338897705, |
|
"learning_rate": 0.0004999230508633667, |
|
"loss": 5.4102, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.02294921875, |
|
"grad_norm": 2.3577651977539062, |
|
"learning_rate": 0.0004999212079431507, |
|
"loss": 5.4297, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.023046875, |
|
"grad_norm": 1.8143409490585327, |
|
"learning_rate": 0.0004999193432183504, |
|
"loss": 5.4023, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.02314453125, |
|
"grad_norm": 1.6521782875061035, |
|
"learning_rate": 0.0004999174566891461, |
|
"loss": 5.3906, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.0232421875, |
|
"grad_norm": 1.8481347560882568, |
|
"learning_rate": 0.000499915548355721, |
|
"loss": 5.3984, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.02333984375, |
|
"grad_norm": 2.1952035427093506, |
|
"learning_rate": 0.0004999136182182601, |
|
"loss": 5.3984, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.0234375, |
|
"grad_norm": 2.0808660984039307, |
|
"learning_rate": 0.0004999116662769502, |
|
"loss": 5.3945, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.02353515625, |
|
"grad_norm": 2.293430805206299, |
|
"learning_rate": 0.0004999096925319808, |
|
"loss": 5.4102, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.0236328125, |
|
"grad_norm": 1.6741044521331787, |
|
"learning_rate": 0.0004999076969835432, |
|
"loss": 5.4062, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.02373046875, |
|
"grad_norm": 1.95404851436615, |
|
"learning_rate": 0.0004999056796318308, |
|
"loss": 5.3672, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.023828125, |
|
"grad_norm": 1.8848334550857544, |
|
"learning_rate": 0.0004999036404770391, |
|
"loss": 5.375, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.02392578125, |
|
"grad_norm": 1.6347148418426514, |
|
"learning_rate": 0.0004999015795193661, |
|
"loss": 5.3711, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.0240234375, |
|
"grad_norm": 2.3380987644195557, |
|
"learning_rate": 0.0004998994967590113, |
|
"loss": 5.4023, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.02412109375, |
|
"grad_norm": 1.4733819961547852, |
|
"learning_rate": 0.0004998973921961767, |
|
"loss": 5.3711, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.02421875, |
|
"grad_norm": 2.443174362182617, |
|
"learning_rate": 0.0004998952658310662, |
|
"loss": 5.3633, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.02431640625, |
|
"grad_norm": 1.9105093479156494, |
|
"learning_rate": 0.0004998931176638861, |
|
"loss": 5.375, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.0244140625, |
|
"grad_norm": 2.14117431640625, |
|
"learning_rate": 0.0004998909476948446, |
|
"loss": 5.3672, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.02451171875, |
|
"grad_norm": 1.5175701379776, |
|
"learning_rate": 0.0004998887559241521, |
|
"loss": 5.3711, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.024609375, |
|
"grad_norm": 1.7954213619232178, |
|
"learning_rate": 0.000499886542352021, |
|
"loss": 5.3359, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.02470703125, |
|
"grad_norm": 2.0219264030456543, |
|
"learning_rate": 0.0004998843069786659, |
|
"loss": 5.3516, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.0248046875, |
|
"grad_norm": 2.42618465423584, |
|
"learning_rate": 0.0004998820498043036, |
|
"loss": 5.3594, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.02490234375, |
|
"grad_norm": 2.4111266136169434, |
|
"learning_rate": 0.0004998797708291528, |
|
"loss": 5.3477, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.025, |
|
"grad_norm": 1.6236835718154907, |
|
"learning_rate": 0.0004998774700534346, |
|
"loss": 5.3516, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.02509765625, |
|
"grad_norm": 2.249431610107422, |
|
"learning_rate": 0.0004998751474773718, |
|
"loss": 5.3789, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.0251953125, |
|
"grad_norm": 1.878957986831665, |
|
"learning_rate": 0.0004998728031011897, |
|
"loss": 5.3359, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.02529296875, |
|
"grad_norm": 1.7500239610671997, |
|
"learning_rate": 0.0004998704369251156, |
|
"loss": 5.3086, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.025390625, |
|
"grad_norm": 1.5108247995376587, |
|
"learning_rate": 0.0004998680489493788, |
|
"loss": 5.3008, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.02548828125, |
|
"grad_norm": 1.8362010717391968, |
|
"learning_rate": 0.0004998656391742108, |
|
"loss": 5.3398, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.0255859375, |
|
"grad_norm": 1.7218053340911865, |
|
"learning_rate": 0.0004998632075998453, |
|
"loss": 5.3633, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.02568359375, |
|
"grad_norm": 1.8602724075317383, |
|
"learning_rate": 0.000499860754226518, |
|
"loss": 5.3477, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.02578125, |
|
"grad_norm": 1.3672821521759033, |
|
"learning_rate": 0.0004998582790544667, |
|
"loss": 5.3047, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.02587890625, |
|
"grad_norm": 1.5733098983764648, |
|
"learning_rate": 0.0004998557820839313, |
|
"loss": 5.3203, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.0259765625, |
|
"grad_norm": 1.547677993774414, |
|
"learning_rate": 0.0004998532633151539, |
|
"loss": 5.3203, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.02607421875, |
|
"grad_norm": 1.8635348081588745, |
|
"learning_rate": 0.0004998507227483789, |
|
"loss": 5.332, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.026171875, |
|
"grad_norm": 1.7884019613265991, |
|
"learning_rate": 0.0004998481603838523, |
|
"loss": 5.3281, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.02626953125, |
|
"grad_norm": 1.6009703874588013, |
|
"learning_rate": 0.0004998455762218225, |
|
"loss": 5.3125, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.0263671875, |
|
"grad_norm": 1.79560387134552, |
|
"learning_rate": 0.0004998429702625401, |
|
"loss": 5.3125, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.02646484375, |
|
"grad_norm": 2.2909066677093506, |
|
"learning_rate": 0.0004998403425062579, |
|
"loss": 5.3242, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.0265625, |
|
"grad_norm": 1.209547758102417, |
|
"learning_rate": 0.0004998376929532305, |
|
"loss": 5.3164, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.02666015625, |
|
"grad_norm": 1.811440110206604, |
|
"learning_rate": 0.0004998350216037146, |
|
"loss": 5.3047, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.0267578125, |
|
"grad_norm": 1.956322193145752, |
|
"learning_rate": 0.0004998323284579694, |
|
"loss": 5.3086, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.02685546875, |
|
"grad_norm": 2.4985668659210205, |
|
"learning_rate": 0.000499829613516256, |
|
"loss": 5.3047, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.026953125, |
|
"grad_norm": 2.2196872234344482, |
|
"learning_rate": 0.0004998268767788373, |
|
"loss": 5.3125, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.02705078125, |
|
"grad_norm": 2.2135283946990967, |
|
"learning_rate": 0.0004998241182459789, |
|
"loss": 5.3242, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.0271484375, |
|
"grad_norm": 1.4025847911834717, |
|
"learning_rate": 0.0004998213379179481, |
|
"loss": 5.3398, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.02724609375, |
|
"grad_norm": 2.832472801208496, |
|
"learning_rate": 0.0004998185357950144, |
|
"loss": 5.3047, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.02734375, |
|
"grad_norm": 1.4372154474258423, |
|
"learning_rate": 0.0004998157118774496, |
|
"loss": 5.3047, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.02744140625, |
|
"grad_norm": 2.209719657897949, |
|
"learning_rate": 0.0004998128661655274, |
|
"loss": 5.3008, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.0275390625, |
|
"grad_norm": 2.680424928665161, |
|
"learning_rate": 0.0004998099986595235, |
|
"loss": 5.3047, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.02763671875, |
|
"grad_norm": 2.1611225605010986, |
|
"learning_rate": 0.0004998071093597162, |
|
"loss": 5.3008, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.027734375, |
|
"grad_norm": 1.9571424722671509, |
|
"learning_rate": 0.0004998041982663851, |
|
"loss": 5.2891, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.02783203125, |
|
"grad_norm": 1.3447707891464233, |
|
"learning_rate": 0.000499801265379813, |
|
"loss": 5.2852, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.0279296875, |
|
"grad_norm": 2.0131936073303223, |
|
"learning_rate": 0.0004997983107002838, |
|
"loss": 5.3086, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.02802734375, |
|
"grad_norm": 1.1873258352279663, |
|
"learning_rate": 0.0004997953342280843, |
|
"loss": 5.2656, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.028125, |
|
"grad_norm": 2.239952564239502, |
|
"learning_rate": 0.0004997923359635027, |
|
"loss": 5.2773, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.02822265625, |
|
"grad_norm": 1.3604589700698853, |
|
"learning_rate": 0.0004997893159068297, |
|
"loss": 5.2773, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.0283203125, |
|
"grad_norm": 1.888407826423645, |
|
"learning_rate": 0.0004997862740583584, |
|
"loss": 5.2852, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.02841796875, |
|
"grad_norm": 1.8944205045700073, |
|
"learning_rate": 0.0004997832104183833, |
|
"loss": 5.2812, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.028515625, |
|
"grad_norm": 1.1297378540039062, |
|
"learning_rate": 0.0004997801249872016, |
|
"loss": 5.25, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.02861328125, |
|
"grad_norm": 1.6378464698791504, |
|
"learning_rate": 0.0004997770177651123, |
|
"loss": 5.2461, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.0287109375, |
|
"grad_norm": 1.3006982803344727, |
|
"learning_rate": 0.0004997738887524168, |
|
"loss": 5.2578, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.02880859375, |
|
"grad_norm": 1.8940247297286987, |
|
"learning_rate": 0.0004997707379494183, |
|
"loss": 5.2773, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.02890625, |
|
"grad_norm": 1.5350399017333984, |
|
"learning_rate": 0.0004997675653564223, |
|
"loss": 5.2656, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.02900390625, |
|
"grad_norm": 1.58504056930542, |
|
"learning_rate": 0.0004997643709737363, |
|
"loss": 5.2695, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.0291015625, |
|
"grad_norm": 1.5148996114730835, |
|
"learning_rate": 0.0004997611548016699, |
|
"loss": 5.2734, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.02919921875, |
|
"grad_norm": 1.2484545707702637, |
|
"learning_rate": 0.0004997579168405352, |
|
"loss": 5.25, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.029296875, |
|
"grad_norm": 1.4745291471481323, |
|
"learning_rate": 0.0004997546570906458, |
|
"loss": 5.2773, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.02939453125, |
|
"grad_norm": 1.0975582599639893, |
|
"learning_rate": 0.0004997513755523179, |
|
"loss": 5.2344, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.0294921875, |
|
"grad_norm": 1.4826176166534424, |
|
"learning_rate": 0.0004997480722258694, |
|
"loss": 5.2617, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.02958984375, |
|
"grad_norm": 1.3512847423553467, |
|
"learning_rate": 0.0004997447471116207, |
|
"loss": 5.2422, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.0296875, |
|
"grad_norm": 1.2088016271591187, |
|
"learning_rate": 0.0004997414002098941, |
|
"loss": 5.2539, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.02978515625, |
|
"grad_norm": 1.3594310283660889, |
|
"learning_rate": 0.0004997380315210142, |
|
"loss": 5.2695, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.0298828125, |
|
"grad_norm": 1.6119142770767212, |
|
"learning_rate": 0.0004997346410453074, |
|
"loss": 5.2383, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.02998046875, |
|
"grad_norm": 1.2976305484771729, |
|
"learning_rate": 0.0004997312287831024, |
|
"loss": 5.2773, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.030078125, |
|
"grad_norm": 1.1194605827331543, |
|
"learning_rate": 0.00049972779473473, |
|
"loss": 5.2656, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.03017578125, |
|
"grad_norm": 1.244971752166748, |
|
"learning_rate": 0.0004997243389005232, |
|
"loss": 5.2383, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.0302734375, |
|
"grad_norm": 1.1144415140151978, |
|
"learning_rate": 0.0004997208612808168, |
|
"loss": 5.2188, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.03037109375, |
|
"grad_norm": 1.1849431991577148, |
|
"learning_rate": 0.0004997173618759482, |
|
"loss": 5.2422, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.03046875, |
|
"grad_norm": 1.3395856618881226, |
|
"learning_rate": 0.0004997138406862565, |
|
"loss": 5.2422, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.03056640625, |
|
"grad_norm": 1.1774636507034302, |
|
"learning_rate": 0.0004997102977120832, |
|
"loss": 5.2188, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.0306640625, |
|
"grad_norm": 1.4251941442489624, |
|
"learning_rate": 0.0004997067329537714, |
|
"loss": 5.2578, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.03076171875, |
|
"grad_norm": 1.4117344617843628, |
|
"learning_rate": 0.0004997031464116672, |
|
"loss": 5.207, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.030859375, |
|
"grad_norm": 1.3700896501541138, |
|
"learning_rate": 0.0004996995380861178, |
|
"loss": 5.2305, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.03095703125, |
|
"grad_norm": 1.682862401008606, |
|
"learning_rate": 0.0004996959079774734, |
|
"loss": 5.25, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.0310546875, |
|
"grad_norm": 1.1571077108383179, |
|
"learning_rate": 0.0004996922560860856, |
|
"loss": 5.2109, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.03115234375, |
|
"grad_norm": 1.1706202030181885, |
|
"learning_rate": 0.0004996885824123087, |
|
"loss": 5.2344, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.03125, |
|
"grad_norm": 1.116774082183838, |
|
"learning_rate": 0.0004996848869564986, |
|
"loss": 5.2227, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.03134765625, |
|
"grad_norm": 1.3174951076507568, |
|
"learning_rate": 0.0004996811697190137, |
|
"loss": 5.207, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.0314453125, |
|
"grad_norm": 1.7159594297409058, |
|
"learning_rate": 0.0004996774307002142, |
|
"loss": 5.2305, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.03154296875, |
|
"grad_norm": 1.1668405532836914, |
|
"learning_rate": 0.0004996736699004628, |
|
"loss": 5.2344, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.031640625, |
|
"grad_norm": 1.4254777431488037, |
|
"learning_rate": 0.000499669887320124, |
|
"loss": 5.2188, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.03173828125, |
|
"grad_norm": 1.4150874614715576, |
|
"learning_rate": 0.0004996660829595643, |
|
"loss": 5.2461, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.0318359375, |
|
"grad_norm": 1.4464291334152222, |
|
"learning_rate": 0.0004996622568191529, |
|
"loss": 5.207, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.03193359375, |
|
"grad_norm": 1.3123974800109863, |
|
"learning_rate": 0.0004996584088992603, |
|
"loss": 5.2227, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.03203125, |
|
"grad_norm": 1.2821930646896362, |
|
"learning_rate": 0.0004996545392002597, |
|
"loss": 5.2344, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.03212890625, |
|
"grad_norm": 1.6003687381744385, |
|
"learning_rate": 0.0004996506477225264, |
|
"loss": 5.1836, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.0322265625, |
|
"grad_norm": 1.305853009223938, |
|
"learning_rate": 0.0004996467344664374, |
|
"loss": 5.2266, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.03232421875, |
|
"grad_norm": 1.6081973314285278, |
|
"learning_rate": 0.0004996427994323723, |
|
"loss": 5.2031, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.032421875, |
|
"grad_norm": 1.0995270013809204, |
|
"learning_rate": 0.0004996388426207123, |
|
"loss": 5.1797, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.03251953125, |
|
"grad_norm": 1.1569955348968506, |
|
"learning_rate": 0.0004996348640318412, |
|
"loss": 5.2305, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.0326171875, |
|
"grad_norm": 1.3775110244750977, |
|
"learning_rate": 0.0004996308636661447, |
|
"loss": 5.2266, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.03271484375, |
|
"grad_norm": 1.2185978889465332, |
|
"learning_rate": 0.0004996268415240104, |
|
"loss": 5.1641, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.0328125, |
|
"grad_norm": 1.349692702293396, |
|
"learning_rate": 0.0004996227976058285, |
|
"loss": 5.1914, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.03291015625, |
|
"grad_norm": 0.9755382537841797, |
|
"learning_rate": 0.0004996187319119908, |
|
"loss": 5.1914, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.0330078125, |
|
"grad_norm": 1.187360405921936, |
|
"learning_rate": 0.0004996146444428916, |
|
"loss": 5.1875, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.03310546875, |
|
"grad_norm": 1.2067431211471558, |
|
"learning_rate": 0.000499610535198927, |
|
"loss": 5.2109, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.033203125, |
|
"grad_norm": 1.2557358741760254, |
|
"learning_rate": 0.0004996064041804956, |
|
"loss": 5.1953, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.03330078125, |
|
"grad_norm": 1.191425085067749, |
|
"learning_rate": 0.0004996022513879977, |
|
"loss": 5.1719, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.0333984375, |
|
"grad_norm": 0.985287606716156, |
|
"learning_rate": 0.0004995980768218358, |
|
"loss": 5.1719, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.03349609375, |
|
"grad_norm": 2.132200002670288, |
|
"learning_rate": 0.0004995938804824146, |
|
"loss": 5.2109, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.03359375, |
|
"grad_norm": 0.8363533616065979, |
|
"learning_rate": 0.0004995896623701412, |
|
"loss": 5.1914, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.03369140625, |
|
"grad_norm": 1.090147614479065, |
|
"learning_rate": 0.0004995854224854243, |
|
"loss": 5.2031, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.0337890625, |
|
"grad_norm": 1.313947319984436, |
|
"learning_rate": 0.000499581160828675, |
|
"loss": 5.1523, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.03388671875, |
|
"grad_norm": 1.3441343307495117, |
|
"learning_rate": 0.0004995768774003062, |
|
"loss": 5.1797, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.033984375, |
|
"grad_norm": 1.4852529764175415, |
|
"learning_rate": 0.0004995725722007335, |
|
"loss": 5.2031, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.03408203125, |
|
"grad_norm": 1.9018231630325317, |
|
"learning_rate": 0.0004995682452303741, |
|
"loss": 5.1797, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.0341796875, |
|
"grad_norm": 0.8266469836235046, |
|
"learning_rate": 0.0004995638964896475, |
|
"loss": 5.1758, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.03427734375, |
|
"grad_norm": 0.971101701259613, |
|
"learning_rate": 0.0004995595259789752, |
|
"loss": 5.168, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.034375, |
|
"grad_norm": 1.2720019817352295, |
|
"learning_rate": 0.0004995551336987809, |
|
"loss": 5.1914, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.03447265625, |
|
"grad_norm": 1.524877667427063, |
|
"learning_rate": 0.0004995507196494905, |
|
"loss": 5.1836, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.0345703125, |
|
"grad_norm": 1.155196189880371, |
|
"learning_rate": 0.0004995462838315319, |
|
"loss": 5.1484, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.03466796875, |
|
"grad_norm": 1.2725322246551514, |
|
"learning_rate": 0.0004995418262453351, |
|
"loss": 5.1562, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.034765625, |
|
"grad_norm": 1.064315915107727, |
|
"learning_rate": 0.000499537346891332, |
|
"loss": 5.1641, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.03486328125, |
|
"grad_norm": 1.1174203157424927, |
|
"learning_rate": 0.0004995328457699573, |
|
"loss": 5.1289, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.0349609375, |
|
"grad_norm": 1.5893704891204834, |
|
"learning_rate": 0.000499528322881647, |
|
"loss": 5.1523, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.03505859375, |
|
"grad_norm": 1.3740280866622925, |
|
"learning_rate": 0.0004995237782268397, |
|
"loss": 5.1289, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.03515625, |
|
"grad_norm": 1.560889720916748, |
|
"learning_rate": 0.000499519211805976, |
|
"loss": 5.1875, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.03525390625, |
|
"grad_norm": 1.1352694034576416, |
|
"learning_rate": 0.0004995146236194984, |
|
"loss": 5.1758, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.0353515625, |
|
"grad_norm": 1.4046801328659058, |
|
"learning_rate": 0.0004995100136678519, |
|
"loss": 5.1758, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.03544921875, |
|
"grad_norm": 1.10200035572052, |
|
"learning_rate": 0.0004995053819514834, |
|
"loss": 5.1523, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.035546875, |
|
"grad_norm": 1.179033875465393, |
|
"learning_rate": 0.0004995007284708418, |
|
"loss": 5.1367, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.03564453125, |
|
"grad_norm": 1.1701732873916626, |
|
"learning_rate": 0.0004994960532263782, |
|
"loss": 5.1406, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.0357421875, |
|
"grad_norm": 1.096832513809204, |
|
"learning_rate": 0.000499491356218546, |
|
"loss": 5.1484, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.03583984375, |
|
"grad_norm": 1.4660563468933105, |
|
"learning_rate": 0.0004994866374478003, |
|
"loss": 5.1562, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.0359375, |
|
"grad_norm": 1.2725329399108887, |
|
"learning_rate": 0.0004994818969145989, |
|
"loss": 5.1719, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.03603515625, |
|
"grad_norm": 1.5378856658935547, |
|
"learning_rate": 0.000499477134619401, |
|
"loss": 5.1758, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.0361328125, |
|
"grad_norm": 1.0054930448532104, |
|
"learning_rate": 0.0004994723505626685, |
|
"loss": 5.2031, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.03623046875, |
|
"grad_norm": 1.3905240297317505, |
|
"learning_rate": 0.0004994675447448651, |
|
"loss": 5.1562, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.036328125, |
|
"grad_norm": 1.2946956157684326, |
|
"learning_rate": 0.0004994627171664565, |
|
"loss": 5.1641, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.03642578125, |
|
"grad_norm": 0.8594855070114136, |
|
"learning_rate": 0.0004994578678279112, |
|
"loss": 5.1133, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.0365234375, |
|
"grad_norm": 1.0982156991958618, |
|
"learning_rate": 0.0004994529967296989, |
|
"loss": 5.1445, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.03662109375, |
|
"grad_norm": 1.197832703590393, |
|
"learning_rate": 0.0004994481038722919, |
|
"loss": 5.1875, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.03671875, |
|
"grad_norm": 0.903097927570343, |
|
"learning_rate": 0.0004994431892561646, |
|
"loss": 5.1523, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.03681640625, |
|
"grad_norm": 1.064278483390808, |
|
"learning_rate": 0.0004994382528817935, |
|
"loss": 5.1602, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.0369140625, |
|
"grad_norm": 1.0776413679122925, |
|
"learning_rate": 0.0004994332947496568, |
|
"loss": 5.1406, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.03701171875, |
|
"grad_norm": 0.99156254529953, |
|
"learning_rate": 0.0004994283148602357, |
|
"loss": 5.1484, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.037109375, |
|
"grad_norm": 1.25286066532135, |
|
"learning_rate": 0.0004994233132140125, |
|
"loss": 5.1367, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.03720703125, |
|
"grad_norm": 0.9926852583885193, |
|
"learning_rate": 0.0004994182898114722, |
|
"loss": 5.1328, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.0373046875, |
|
"grad_norm": 1.0031254291534424, |
|
"learning_rate": 0.0004994132446531019, |
|
"loss": 5.125, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.03740234375, |
|
"grad_norm": 1.1352587938308716, |
|
"learning_rate": 0.0004994081777393907, |
|
"loss": 5.1719, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.0375, |
|
"grad_norm": 1.145836591720581, |
|
"learning_rate": 0.0004994030890708297, |
|
"loss": 5.1211, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.03759765625, |
|
"grad_norm": 1.125534176826477, |
|
"learning_rate": 0.0004993979786479121, |
|
"loss": 5.125, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.0376953125, |
|
"grad_norm": 0.9138064384460449, |
|
"learning_rate": 0.0004993928464711337, |
|
"loss": 5.1289, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.03779296875, |
|
"grad_norm": 1.0376123189926147, |
|
"learning_rate": 0.0004993876925409917, |
|
"loss": 5.1328, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.037890625, |
|
"grad_norm": 1.0181726217269897, |
|
"learning_rate": 0.0004993825168579857, |
|
"loss": 5.1289, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.03798828125, |
|
"grad_norm": 1.191735029220581, |
|
"learning_rate": 0.0004993773194226178, |
|
"loss": 5.1172, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.0380859375, |
|
"grad_norm": 1.0628931522369385, |
|
"learning_rate": 0.0004993721002353915, |
|
"loss": 5.1133, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.03818359375, |
|
"grad_norm": 1.2050843238830566, |
|
"learning_rate": 0.0004993668592968129, |
|
"loss": 5.1172, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.03828125, |
|
"grad_norm": 0.8847583532333374, |
|
"learning_rate": 0.0004993615966073902, |
|
"loss": 5.1328, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.03837890625, |
|
"grad_norm": 0.9037215709686279, |
|
"learning_rate": 0.0004993563121676332, |
|
"loss": 5.1406, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.0384765625, |
|
"grad_norm": 0.8788461685180664, |
|
"learning_rate": 0.0004993510059780546, |
|
"loss": 5.1523, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.03857421875, |
|
"grad_norm": 0.788327693939209, |
|
"learning_rate": 0.0004993456780391686, |
|
"loss": 5.1016, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.038671875, |
|
"grad_norm": 0.874692440032959, |
|
"learning_rate": 0.0004993403283514919, |
|
"loss": 5.0938, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.03876953125, |
|
"grad_norm": 1.0633156299591064, |
|
"learning_rate": 0.0004993349569155428, |
|
"loss": 5.1289, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.0388671875, |
|
"grad_norm": 1.076167106628418, |
|
"learning_rate": 0.0004993295637318423, |
|
"loss": 5.1172, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.03896484375, |
|
"grad_norm": 0.9785029292106628, |
|
"learning_rate": 0.0004993241488009131, |
|
"loss": 5.0977, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.0390625, |
|
"grad_norm": 1.0597959756851196, |
|
"learning_rate": 0.0004993187121232801, |
|
"loss": 5.1094, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.03916015625, |
|
"grad_norm": 1.2436349391937256, |
|
"learning_rate": 0.0004993132536994705, |
|
"loss": 5.1289, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.0392578125, |
|
"grad_norm": 1.0709871053695679, |
|
"learning_rate": 0.0004993077735300133, |
|
"loss": 5.1328, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.03935546875, |
|
"grad_norm": 1.018754005432129, |
|
"learning_rate": 0.0004993022716154399, |
|
"loss": 5.1094, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.039453125, |
|
"grad_norm": 0.8745573163032532, |
|
"learning_rate": 0.0004992967479562836, |
|
"loss": 5.1172, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.03955078125, |
|
"grad_norm": 0.8379338979721069, |
|
"learning_rate": 0.0004992912025530799, |
|
"loss": 5.1406, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.0396484375, |
|
"grad_norm": 0.8244563341140747, |
|
"learning_rate": 0.0004992856354063663, |
|
"loss": 5.1328, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.03974609375, |
|
"grad_norm": 1.0282996892929077, |
|
"learning_rate": 0.0004992800465166826, |
|
"loss": 5.1016, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.03984375, |
|
"grad_norm": 0.8590074777603149, |
|
"learning_rate": 0.0004992744358845707, |
|
"loss": 5.1094, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.03994140625, |
|
"grad_norm": 0.7324565052986145, |
|
"learning_rate": 0.0004992688035105744, |
|
"loss": 5.1094, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.0400390625, |
|
"grad_norm": 0.8005876541137695, |
|
"learning_rate": 0.0004992631493952395, |
|
"loss": 5.1094, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.04013671875, |
|
"grad_norm": 1.217882513999939, |
|
"learning_rate": 0.0004992574735391144, |
|
"loss": 5.0938, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.040234375, |
|
"grad_norm": 1.186151385307312, |
|
"learning_rate": 0.0004992517759427494, |
|
"loss": 5.1094, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.04033203125, |
|
"grad_norm": 0.851876974105835, |
|
"learning_rate": 0.0004992460566066967, |
|
"loss": 5.0781, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.0404296875, |
|
"grad_norm": 0.8220931887626648, |
|
"learning_rate": 0.0004992403155315107, |
|
"loss": 5.0977, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.04052734375, |
|
"grad_norm": 0.848565399646759, |
|
"learning_rate": 0.0004992345527177482, |
|
"loss": 5.0898, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.040625, |
|
"grad_norm": 1.1592659950256348, |
|
"learning_rate": 0.0004992287681659676, |
|
"loss": 5.0781, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.04072265625, |
|
"grad_norm": 1.0059559345245361, |
|
"learning_rate": 0.0004992229618767298, |
|
"loss": 5.0781, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.0408203125, |
|
"grad_norm": 0.9418927431106567, |
|
"learning_rate": 0.0004992171338505975, |
|
"loss": 5.0898, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.04091796875, |
|
"grad_norm": 0.8450960516929626, |
|
"learning_rate": 0.0004992112840881359, |
|
"loss": 5.0898, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.041015625, |
|
"grad_norm": 0.739745557308197, |
|
"learning_rate": 0.0004992054125899122, |
|
"loss": 5.0625, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.04111328125, |
|
"grad_norm": 0.6499938368797302, |
|
"learning_rate": 0.0004991995193564953, |
|
"loss": 5.0742, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.0412109375, |
|
"grad_norm": 0.5470448136329651, |
|
"learning_rate": 0.0004991936043884566, |
|
"loss": 5.0859, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.04130859375, |
|
"grad_norm": 0.582874059677124, |
|
"learning_rate": 0.0004991876676863697, |
|
"loss": 5.0664, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.04140625, |
|
"grad_norm": 0.61722731590271, |
|
"learning_rate": 0.0004991817092508099, |
|
"loss": 5.0898, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.04150390625, |
|
"grad_norm": 0.6884714365005493, |
|
"learning_rate": 0.0004991757290823548, |
|
"loss": 5.0703, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.0416015625, |
|
"grad_norm": 0.8316398859024048, |
|
"learning_rate": 0.0004991697271815845, |
|
"loss": 5.1367, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.04169921875, |
|
"grad_norm": 0.9839829802513123, |
|
"learning_rate": 0.0004991637035490805, |
|
"loss": 5.0703, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.041796875, |
|
"grad_norm": 1.0404824018478394, |
|
"learning_rate": 0.0004991576581854267, |
|
"loss": 5.0586, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.04189453125, |
|
"grad_norm": 0.8250628709793091, |
|
"learning_rate": 0.0004991515910912095, |
|
"loss": 5.0625, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.0419921875, |
|
"grad_norm": 0.7997605204582214, |
|
"learning_rate": 0.0004991455022670168, |
|
"loss": 5.0938, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.04208984375, |
|
"grad_norm": 0.6865594983100891, |
|
"learning_rate": 0.0004991393917134388, |
|
"loss": 5.0781, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.0421875, |
|
"grad_norm": 0.6902148723602295, |
|
"learning_rate": 0.0004991332594310682, |
|
"loss": 5.0586, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.04228515625, |
|
"grad_norm": 0.6424334645271301, |
|
"learning_rate": 0.0004991271054204992, |
|
"loss": 5.0742, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.0423828125, |
|
"grad_norm": 0.6057596802711487, |
|
"learning_rate": 0.0004991209296823285, |
|
"loss": 5.0625, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.04248046875, |
|
"grad_norm": 0.6817944645881653, |
|
"learning_rate": 0.0004991147322171548, |
|
"loss": 5.082, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.042578125, |
|
"grad_norm": 0.7707593441009521, |
|
"learning_rate": 0.0004991085130255789, |
|
"loss": 5.0586, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.04267578125, |
|
"grad_norm": 0.9107893109321594, |
|
"learning_rate": 0.0004991022721082037, |
|
"loss": 5.0898, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.0427734375, |
|
"grad_norm": 1.1807111501693726, |
|
"learning_rate": 0.0004990960094656342, |
|
"loss": 5.0195, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.04287109375, |
|
"grad_norm": 1.1950851678848267, |
|
"learning_rate": 0.0004990897250984774, |
|
"loss": 5.0469, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.04296875, |
|
"grad_norm": 0.7908074855804443, |
|
"learning_rate": 0.0004990834190073428, |
|
"loss": 5.1016, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.04306640625, |
|
"grad_norm": 0.7892453670501709, |
|
"learning_rate": 0.0004990770911928416, |
|
"loss": 5.0938, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.0431640625, |
|
"grad_norm": 0.7331576347351074, |
|
"learning_rate": 0.0004990707416555871, |
|
"loss": 5.0508, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.04326171875, |
|
"grad_norm": 0.7229627966880798, |
|
"learning_rate": 0.000499064370396195, |
|
"loss": 5.0234, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.043359375, |
|
"grad_norm": 0.8491083979606628, |
|
"learning_rate": 0.000499057977415283, |
|
"loss": 5.0859, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.04345703125, |
|
"grad_norm": 1.1333506107330322, |
|
"learning_rate": 0.0004990515627134706, |
|
"loss": 5.0859, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.0435546875, |
|
"grad_norm": 1.0400718450546265, |
|
"learning_rate": 0.00049904512629138, |
|
"loss": 5.0703, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.04365234375, |
|
"grad_norm": 0.8677104115486145, |
|
"learning_rate": 0.000499038668149635, |
|
"loss": 5.0273, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.04375, |
|
"grad_norm": 1.0702966451644897, |
|
"learning_rate": 0.0004990321882888617, |
|
"loss": 5.0664, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.04384765625, |
|
"grad_norm": 0.9019585847854614, |
|
"learning_rate": 0.0004990256867096881, |
|
"loss": 5.0352, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.0439453125, |
|
"grad_norm": 0.7547096014022827, |
|
"learning_rate": 0.0004990191634127448, |
|
"loss": 5.0664, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.04404296875, |
|
"grad_norm": 0.6731748580932617, |
|
"learning_rate": 0.0004990126183986639, |
|
"loss": 5.0625, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.044140625, |
|
"grad_norm": 0.738862931728363, |
|
"learning_rate": 0.0004990060516680802, |
|
"loss": 5.082, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.04423828125, |
|
"grad_norm": 0.7783480286598206, |
|
"learning_rate": 0.00049899946322163, |
|
"loss": 5.0703, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.0443359375, |
|
"grad_norm": 0.7385779023170471, |
|
"learning_rate": 0.0004989928530599521, |
|
"loss": 5.0547, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.04443359375, |
|
"grad_norm": 1.394942283630371, |
|
"learning_rate": 0.0004989862211836873, |
|
"loss": 5.0547, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.04453125, |
|
"grad_norm": 0.5369975566864014, |
|
"learning_rate": 0.0004989795675934787, |
|
"loss": 5.0781, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.04462890625, |
|
"grad_norm": 0.6641459465026855, |
|
"learning_rate": 0.0004989728922899712, |
|
"loss": 5.0117, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.0447265625, |
|
"grad_norm": 0.7093449831008911, |
|
"learning_rate": 0.0004989661952738118, |
|
"loss": 5.0586, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.04482421875, |
|
"grad_norm": 0.8085068464279175, |
|
"learning_rate": 0.0004989594765456498, |
|
"loss": 5.0508, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.044921875, |
|
"grad_norm": 1.188184142112732, |
|
"learning_rate": 0.0004989527361061366, |
|
"loss": 5.0391, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.04501953125, |
|
"grad_norm": 0.9588766098022461, |
|
"learning_rate": 0.0004989459739559257, |
|
"loss": 5.0195, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.0451171875, |
|
"grad_norm": 0.8630503416061401, |
|
"learning_rate": 0.0004989391900956725, |
|
"loss": 5.0195, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.04521484375, |
|
"grad_norm": 0.8916110396385193, |
|
"learning_rate": 0.0004989323845260346, |
|
"loss": 5.0586, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.0453125, |
|
"grad_norm": 0.8974289298057556, |
|
"learning_rate": 0.0004989255572476719, |
|
"loss": 5.0547, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.04541015625, |
|
"grad_norm": 0.9970776438713074, |
|
"learning_rate": 0.0004989187082612462, |
|
"loss": 5.0469, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.0455078125, |
|
"grad_norm": 0.885099470615387, |
|
"learning_rate": 0.0004989118375674216, |
|
"loss": 5.043, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.04560546875, |
|
"grad_norm": 0.659532368183136, |
|
"learning_rate": 0.0004989049451668639, |
|
"loss": 5.0547, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.045703125, |
|
"grad_norm": 0.8999412655830383, |
|
"learning_rate": 0.0004988980310602415, |
|
"loss": 5.0117, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.04580078125, |
|
"grad_norm": 0.7855193018913269, |
|
"learning_rate": 0.0004988910952482246, |
|
"loss": 5.0859, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.0458984375, |
|
"grad_norm": 0.9827584028244019, |
|
"learning_rate": 0.0004988841377314855, |
|
"loss": 5.0586, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.04599609375, |
|
"grad_norm": 1.13511323928833, |
|
"learning_rate": 0.0004988771585106988, |
|
"loss": 5.0547, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.04609375, |
|
"grad_norm": 0.8385241627693176, |
|
"learning_rate": 0.000498870157586541, |
|
"loss": 5.0586, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.04619140625, |
|
"grad_norm": 0.7410266995429993, |
|
"learning_rate": 0.0004988631349596909, |
|
"loss": 5.0664, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.0462890625, |
|
"grad_norm": 0.6939350962638855, |
|
"learning_rate": 0.0004988560906308291, |
|
"loss": 5.043, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.04638671875, |
|
"grad_norm": 0.7858873605728149, |
|
"learning_rate": 0.0004988490246006388, |
|
"loss": 5.0469, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.046484375, |
|
"grad_norm": 1.0619468688964844, |
|
"learning_rate": 0.0004988419368698047, |
|
"loss": 5.0234, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.04658203125, |
|
"grad_norm": 1.3020342588424683, |
|
"learning_rate": 0.0004988348274390141, |
|
"loss": 5.0469, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.0466796875, |
|
"grad_norm": 0.7949604988098145, |
|
"learning_rate": 0.0004988276963089561, |
|
"loss": 5.0391, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.04677734375, |
|
"grad_norm": 0.8782525658607483, |
|
"learning_rate": 0.0004988205434803222, |
|
"loss": 5.043, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.046875, |
|
"grad_norm": 0.9638606309890747, |
|
"learning_rate": 0.0004988133689538055, |
|
"loss": 5.0703, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.04697265625, |
|
"grad_norm": 0.8633697032928467, |
|
"learning_rate": 0.0004988061727301019, |
|
"loss": 5.0, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.0470703125, |
|
"grad_norm": 0.719272255897522, |
|
"learning_rate": 0.0004987989548099087, |
|
"loss": 5.043, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.04716796875, |
|
"grad_norm": 0.7947590947151184, |
|
"learning_rate": 0.0004987917151939257, |
|
"loss": 5.0273, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.047265625, |
|
"grad_norm": 0.8721004128456116, |
|
"learning_rate": 0.000498784453882855, |
|
"loss": 5.0469, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.04736328125, |
|
"grad_norm": 0.840782880783081, |
|
"learning_rate": 0.0004987771708774001, |
|
"loss": 5.0273, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.0474609375, |
|
"grad_norm": 0.6539798974990845, |
|
"learning_rate": 0.0004987698661782674, |
|
"loss": 5.0273, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.04755859375, |
|
"grad_norm": 0.6629400253295898, |
|
"learning_rate": 0.0004987625397861649, |
|
"loss": 5.0273, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.04765625, |
|
"grad_norm": 0.69793301820755, |
|
"learning_rate": 0.0004987551917018028, |
|
"loss": 4.9961, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.04775390625, |
|
"grad_norm": 0.8525344133377075, |
|
"learning_rate": 0.0004987478219258936, |
|
"loss": 5.0508, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.0478515625, |
|
"grad_norm": 0.9601410627365112, |
|
"learning_rate": 0.0004987404304591514, |
|
"loss": 5.0391, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.04794921875, |
|
"grad_norm": 0.8520921468734741, |
|
"learning_rate": 0.0004987330173022932, |
|
"loss": 5.0273, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.048046875, |
|
"grad_norm": 0.8635746240615845, |
|
"learning_rate": 0.0004987255824560374, |
|
"loss": 5.0234, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.04814453125, |
|
"grad_norm": 0.998202383518219, |
|
"learning_rate": 0.0004987181259211048, |
|
"loss": 5.0273, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.0482421875, |
|
"grad_norm": 1.2011066675186157, |
|
"learning_rate": 0.0004987106476982184, |
|
"loss": 5.0312, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.04833984375, |
|
"grad_norm": 0.7638375163078308, |
|
"learning_rate": 0.0004987031477881029, |
|
"loss": 5.0312, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.0484375, |
|
"grad_norm": 0.70158451795578, |
|
"learning_rate": 0.0004986956261914856, |
|
"loss": 5.0156, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.04853515625, |
|
"grad_norm": 0.7986351847648621, |
|
"learning_rate": 0.0004986880829090954, |
|
"loss": 5.0234, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.0486328125, |
|
"grad_norm": 0.8812294006347656, |
|
"learning_rate": 0.0004986805179416638, |
|
"loss": 5.0156, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.04873046875, |
|
"grad_norm": 0.8842159509658813, |
|
"learning_rate": 0.0004986729312899242, |
|
"loss": 5.0156, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.048828125, |
|
"grad_norm": 0.8196664452552795, |
|
"learning_rate": 0.000498665322954612, |
|
"loss": 5.0078, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.04892578125, |
|
"grad_norm": 0.774849534034729, |
|
"learning_rate": 0.0004986576929364646, |
|
"loss": 5.0273, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.0490234375, |
|
"grad_norm": 0.6376418471336365, |
|
"learning_rate": 0.0004986500412362219, |
|
"loss": 5.0234, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.04912109375, |
|
"grad_norm": 0.49704745411872864, |
|
"learning_rate": 0.0004986423678546257, |
|
"loss": 4.9922, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.04921875, |
|
"grad_norm": 0.49575215578079224, |
|
"learning_rate": 0.0004986346727924197, |
|
"loss": 5.0234, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.04931640625, |
|
"grad_norm": 0.5091367363929749, |
|
"learning_rate": 0.0004986269560503501, |
|
"loss": 4.9961, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.0494140625, |
|
"grad_norm": 0.7224910855293274, |
|
"learning_rate": 0.0004986192176291647, |
|
"loss": 4.9922, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.04951171875, |
|
"grad_norm": 0.8590059280395508, |
|
"learning_rate": 0.0004986114575296141, |
|
"loss": 5.0234, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.049609375, |
|
"grad_norm": 0.7906845808029175, |
|
"learning_rate": 0.0004986036757524501, |
|
"loss": 5.0195, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.04970703125, |
|
"grad_norm": 0.6573899388313293, |
|
"learning_rate": 0.0004985958722984275, |
|
"loss": 5.0117, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.0498046875, |
|
"grad_norm": 0.5708054900169373, |
|
"learning_rate": 0.0004985880471683026, |
|
"loss": 5.0, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.04990234375, |
|
"grad_norm": 0.6391012668609619, |
|
"learning_rate": 0.000498580200362834, |
|
"loss": 5.0156, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.6893157362937927, |
|
"learning_rate": 0.0004985723318827824, |
|
"loss": 5.0078, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.05009765625, |
|
"grad_norm": 0.7473337054252625, |
|
"learning_rate": 0.0004985644417289107, |
|
"loss": 5.0352, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.0501953125, |
|
"grad_norm": 0.6992818713188171, |
|
"learning_rate": 0.0004985565299019836, |
|
"loss": 4.9766, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.05029296875, |
|
"grad_norm": 0.5503939986228943, |
|
"learning_rate": 0.0004985485964027682, |
|
"loss": 5.0156, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.050390625, |
|
"grad_norm": 0.5531403422355652, |
|
"learning_rate": 0.0004985406412320337, |
|
"loss": 5.0234, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.05048828125, |
|
"grad_norm": 0.5232397317886353, |
|
"learning_rate": 0.0004985326643905511, |
|
"loss": 4.9922, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.0505859375, |
|
"grad_norm": 0.5153787732124329, |
|
"learning_rate": 0.0004985246658790939, |
|
"loss": 4.9922, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.05068359375, |
|
"grad_norm": 0.7395737767219543, |
|
"learning_rate": 0.0004985166456984375, |
|
"loss": 4.9961, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.05078125, |
|
"grad_norm": 0.7297258973121643, |
|
"learning_rate": 0.0004985086038493591, |
|
"loss": 5.0195, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.05087890625, |
|
"grad_norm": 0.8792772889137268, |
|
"learning_rate": 0.0004985005403326387, |
|
"loss": 5.0195, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.0509765625, |
|
"grad_norm": 0.8485317230224609, |
|
"learning_rate": 0.0004984924551490577, |
|
"loss": 5.0039, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.05107421875, |
|
"grad_norm": 0.7504170536994934, |
|
"learning_rate": 0.0004984843482994001, |
|
"loss": 5.0391, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.051171875, |
|
"grad_norm": 0.6931923031806946, |
|
"learning_rate": 0.0004984762197844516, |
|
"loss": 4.9883, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.05126953125, |
|
"grad_norm": 0.7201303839683533, |
|
"learning_rate": 0.0004984680696050004, |
|
"loss": 4.9844, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.0513671875, |
|
"grad_norm": 0.8321727514266968, |
|
"learning_rate": 0.0004984598977618366, |
|
"loss": 5.0234, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.05146484375, |
|
"grad_norm": 1.0283933877944946, |
|
"learning_rate": 0.0004984517042557523, |
|
"loss": 5.0156, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.0515625, |
|
"grad_norm": 1.3810787200927734, |
|
"learning_rate": 0.0004984434890875417, |
|
"loss": 5.0273, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.05166015625, |
|
"grad_norm": 0.75733482837677, |
|
"learning_rate": 0.0004984352522580015, |
|
"loss": 4.9844, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.0517578125, |
|
"grad_norm": 0.7140693068504333, |
|
"learning_rate": 0.0004984269937679301, |
|
"loss": 4.9961, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.05185546875, |
|
"grad_norm": 0.9317624568939209, |
|
"learning_rate": 0.000498418713618128, |
|
"loss": 4.9805, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.051953125, |
|
"grad_norm": 0.8527510762214661, |
|
"learning_rate": 0.0004984104118093979, |
|
"loss": 4.9961, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.05205078125, |
|
"grad_norm": 0.8085970878601074, |
|
"learning_rate": 0.0004984020883425447, |
|
"loss": 4.9883, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.0521484375, |
|
"grad_norm": 0.8414132595062256, |
|
"learning_rate": 0.0004983937432183754, |
|
"loss": 4.9727, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.05224609375, |
|
"grad_norm": 0.7883987426757812, |
|
"learning_rate": 0.0004983853764376987, |
|
"loss": 4.9766, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.05234375, |
|
"grad_norm": 0.8301931619644165, |
|
"learning_rate": 0.000498376988001326, |
|
"loss": 4.9688, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.05244140625, |
|
"grad_norm": 0.815565288066864, |
|
"learning_rate": 0.0004983685779100702, |
|
"loss": 5.0273, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.0525390625, |
|
"grad_norm": 0.7249122262001038, |
|
"learning_rate": 0.0004983601461647469, |
|
"loss": 5.0234, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.05263671875, |
|
"grad_norm": 0.6938775181770325, |
|
"learning_rate": 0.0004983516927661733, |
|
"loss": 4.9805, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.052734375, |
|
"grad_norm": 0.6271894574165344, |
|
"learning_rate": 0.0004983432177151691, |
|
"loss": 4.9922, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.05283203125, |
|
"grad_norm": 0.5879946351051331, |
|
"learning_rate": 0.0004983347210125558, |
|
"loss": 4.9883, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.0529296875, |
|
"grad_norm": 0.6193601489067078, |
|
"learning_rate": 0.0004983262026591569, |
|
"loss": 4.9688, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.05302734375, |
|
"grad_norm": 0.7117279767990112, |
|
"learning_rate": 0.0004983176626557986, |
|
"loss": 5.0195, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.053125, |
|
"grad_norm": 0.6014550924301147, |
|
"learning_rate": 0.0004983091010033083, |
|
"loss": 5.0, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.05322265625, |
|
"grad_norm": 0.5970187783241272, |
|
"learning_rate": 0.0004983005177025164, |
|
"loss": 4.9688, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.0533203125, |
|
"grad_norm": 0.602258563041687, |
|
"learning_rate": 0.0004982919127542549, |
|
"loss": 5.0156, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.05341796875, |
|
"grad_norm": 0.5443257689476013, |
|
"learning_rate": 0.0004982832861593579, |
|
"loss": 4.9805, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.053515625, |
|
"grad_norm": 0.5458142161369324, |
|
"learning_rate": 0.0004982746379186616, |
|
"loss": 4.9531, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.05361328125, |
|
"grad_norm": 0.5225309729576111, |
|
"learning_rate": 0.0004982659680330047, |
|
"loss": 5.0039, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.0537109375, |
|
"grad_norm": 0.5642147660255432, |
|
"learning_rate": 0.0004982572765032275, |
|
"loss": 4.9961, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.05380859375, |
|
"grad_norm": 0.6520267724990845, |
|
"learning_rate": 0.0004982485633301725, |
|
"loss": 5.0078, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.05390625, |
|
"grad_norm": 0.7051550149917603, |
|
"learning_rate": 0.0004982398285146846, |
|
"loss": 5.0, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.05400390625, |
|
"grad_norm": 0.6687464714050293, |
|
"learning_rate": 0.0004982310720576103, |
|
"loss": 4.9766, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.0541015625, |
|
"grad_norm": 0.60595703125, |
|
"learning_rate": 0.0004982222939597989, |
|
"loss": 4.957, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.05419921875, |
|
"grad_norm": 0.5594586133956909, |
|
"learning_rate": 0.0004982134942221009, |
|
"loss": 4.9883, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.054296875, |
|
"grad_norm": 0.5148446559906006, |
|
"learning_rate": 0.0004982046728453696, |
|
"loss": 4.9844, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.05439453125, |
|
"grad_norm": 0.5946763753890991, |
|
"learning_rate": 0.0004981958298304602, |
|
"loss": 5.0156, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.0544921875, |
|
"grad_norm": 0.716316282749176, |
|
"learning_rate": 0.0004981869651782299, |
|
"loss": 4.9805, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.05458984375, |
|
"grad_norm": 0.794476330280304, |
|
"learning_rate": 0.0004981780788895382, |
|
"loss": 5.0117, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.0546875, |
|
"grad_norm": 0.7105295658111572, |
|
"learning_rate": 0.0004981691709652464, |
|
"loss": 4.9766, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.05478515625, |
|
"grad_norm": 0.5912784337997437, |
|
"learning_rate": 0.0004981602414062181, |
|
"loss": 4.9414, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.0548828125, |
|
"grad_norm": 0.5801540613174438, |
|
"learning_rate": 0.000498151290213319, |
|
"loss": 4.9883, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.05498046875, |
|
"grad_norm": 0.6369305849075317, |
|
"learning_rate": 0.0004981423173874169, |
|
"loss": 5.0039, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.055078125, |
|
"grad_norm": 0.6735507845878601, |
|
"learning_rate": 0.0004981333229293816, |
|
"loss": 4.9648, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.05517578125, |
|
"grad_norm": 0.6315239667892456, |
|
"learning_rate": 0.000498124306840085, |
|
"loss": 5.0078, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.0552734375, |
|
"grad_norm": 0.710253119468689, |
|
"learning_rate": 0.0004981152691204011, |
|
"loss": 4.9844, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.05537109375, |
|
"grad_norm": 0.6913606524467468, |
|
"learning_rate": 0.0004981062097712064, |
|
"loss": 4.9766, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.05546875, |
|
"grad_norm": 0.5899546146392822, |
|
"learning_rate": 0.0004980971287933787, |
|
"loss": 4.9688, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.05556640625, |
|
"grad_norm": 0.5400623083114624, |
|
"learning_rate": 0.0004980880261877986, |
|
"loss": 4.9805, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.0556640625, |
|
"grad_norm": 0.5193256139755249, |
|
"learning_rate": 0.0004980789019553484, |
|
"loss": 4.9453, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.05576171875, |
|
"grad_norm": 0.5622848272323608, |
|
"learning_rate": 0.0004980697560969127, |
|
"loss": 4.957, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.055859375, |
|
"grad_norm": 0.6163820028305054, |
|
"learning_rate": 0.0004980605886133782, |
|
"loss": 4.9766, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.05595703125, |
|
"grad_norm": 0.6021759510040283, |
|
"learning_rate": 0.0004980513995056334, |
|
"loss": 4.957, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.0560546875, |
|
"grad_norm": 0.5321788191795349, |
|
"learning_rate": 0.0004980421887745695, |
|
"loss": 4.9805, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.05615234375, |
|
"grad_norm": 0.4939315617084503, |
|
"learning_rate": 0.0004980329564210792, |
|
"loss": 5.0195, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.05625, |
|
"grad_norm": 0.4567241370677948, |
|
"learning_rate": 0.0004980237024460573, |
|
"loss": 4.9648, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.05634765625, |
|
"grad_norm": 0.44762229919433594, |
|
"learning_rate": 0.0004980144268504012, |
|
"loss": 4.9961, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.0564453125, |
|
"grad_norm": 0.5556433200836182, |
|
"learning_rate": 0.00049800512963501, |
|
"loss": 4.9844, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.05654296875, |
|
"grad_norm": 0.6940642595291138, |
|
"learning_rate": 0.0004979958108007851, |
|
"loss": 4.9844, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.056640625, |
|
"grad_norm": 0.7792614102363586, |
|
"learning_rate": 0.0004979864703486297, |
|
"loss": 4.9648, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.05673828125, |
|
"grad_norm": 0.8071824908256531, |
|
"learning_rate": 0.0004979771082794495, |
|
"loss": 4.9727, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.0568359375, |
|
"grad_norm": 0.9120080471038818, |
|
"learning_rate": 0.0004979677245941519, |
|
"loss": 4.9883, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.05693359375, |
|
"grad_norm": 0.9239040017127991, |
|
"learning_rate": 0.0004979583192936468, |
|
"loss": 4.9609, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.05703125, |
|
"grad_norm": 1.002406120300293, |
|
"learning_rate": 0.0004979488923788459, |
|
"loss": 4.9766, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.05712890625, |
|
"grad_norm": 0.8661404848098755, |
|
"learning_rate": 0.0004979394438506629, |
|
"loss": 5.0, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.0572265625, |
|
"grad_norm": 0.6225184798240662, |
|
"learning_rate": 0.000497929973710014, |
|
"loss": 4.9727, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.05732421875, |
|
"grad_norm": 0.5944101810455322, |
|
"learning_rate": 0.0004979204819578172, |
|
"loss": 4.9414, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.057421875, |
|
"grad_norm": 0.5297685265541077, |
|
"learning_rate": 0.0004979109685949926, |
|
"loss": 4.9844, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.05751953125, |
|
"grad_norm": 0.43607059121131897, |
|
"learning_rate": 0.0004979014336224625, |
|
"loss": 4.9961, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.0576171875, |
|
"grad_norm": 0.48334574699401855, |
|
"learning_rate": 0.0004978918770411513, |
|
"loss": 4.9648, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.05771484375, |
|
"grad_norm": 0.5257386565208435, |
|
"learning_rate": 0.0004978822988519853, |
|
"loss": 4.9766, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.0578125, |
|
"grad_norm": 0.6425731182098389, |
|
"learning_rate": 0.0004978726990558931, |
|
"loss": 4.9492, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.05791015625, |
|
"grad_norm": 1.0080450773239136, |
|
"learning_rate": 0.0004978630776538056, |
|
"loss": 4.9727, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.0580078125, |
|
"grad_norm": 1.2617547512054443, |
|
"learning_rate": 0.000497853434646655, |
|
"loss": 5.0156, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.05810546875, |
|
"grad_norm": 0.8185104727745056, |
|
"learning_rate": 0.0004978437700353766, |
|
"loss": 4.9648, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.058203125, |
|
"grad_norm": 0.996583104133606, |
|
"learning_rate": 0.0004978340838209071, |
|
"loss": 4.9336, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.05830078125, |
|
"grad_norm": 0.8999793529510498, |
|
"learning_rate": 0.0004978243760041855, |
|
"loss": 4.9805, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.0583984375, |
|
"grad_norm": 0.7666017413139343, |
|
"learning_rate": 0.0004978146465861531, |
|
"loss": 4.918, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.05849609375, |
|
"grad_norm": 0.7835460901260376, |
|
"learning_rate": 0.0004978048955677529, |
|
"loss": 4.9766, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.05859375, |
|
"grad_norm": 0.7298296689987183, |
|
"learning_rate": 0.0004977951229499302, |
|
"loss": 4.9648, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.05869140625, |
|
"grad_norm": 0.7160419225692749, |
|
"learning_rate": 0.0004977853287336325, |
|
"loss": 4.9531, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.0587890625, |
|
"grad_norm": 0.9255022406578064, |
|
"learning_rate": 0.0004977755129198092, |
|
"loss": 4.9727, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.05888671875, |
|
"grad_norm": 1.1262624263763428, |
|
"learning_rate": 0.0004977656755094119, |
|
"loss": 4.9766, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.058984375, |
|
"grad_norm": 0.8687927722930908, |
|
"learning_rate": 0.0004977558165033942, |
|
"loss": 4.9727, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.05908203125, |
|
"grad_norm": 0.827407717704773, |
|
"learning_rate": 0.0004977459359027121, |
|
"loss": 4.957, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.0591796875, |
|
"grad_norm": 0.7043539881706238, |
|
"learning_rate": 0.0004977360337083232, |
|
"loss": 4.9961, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.05927734375, |
|
"grad_norm": 0.6146707534790039, |
|
"learning_rate": 0.0004977261099211876, |
|
"loss": 4.957, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.059375, |
|
"grad_norm": 0.5867034196853638, |
|
"learning_rate": 0.0004977161645422672, |
|
"loss": 4.9648, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.05947265625, |
|
"grad_norm": 0.456230491399765, |
|
"learning_rate": 0.0004977061975725264, |
|
"loss": 4.9805, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.0595703125, |
|
"grad_norm": 0.4468291997909546, |
|
"learning_rate": 0.000497696209012931, |
|
"loss": 4.9844, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.05966796875, |
|
"grad_norm": 0.4772776961326599, |
|
"learning_rate": 0.0004976861988644498, |
|
"loss": 4.9453, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.059765625, |
|
"grad_norm": 0.510353147983551, |
|
"learning_rate": 0.0004976761671280529, |
|
"loss": 4.9688, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.05986328125, |
|
"grad_norm": 0.4383707046508789, |
|
"learning_rate": 0.0004976661138047128, |
|
"loss": 4.9531, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.0599609375, |
|
"grad_norm": 0.42035961151123047, |
|
"learning_rate": 0.0004976560388954044, |
|
"loss": 4.9453, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.06005859375, |
|
"grad_norm": 0.45699962973594666, |
|
"learning_rate": 0.0004976459424011041, |
|
"loss": 4.9531, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.06015625, |
|
"grad_norm": 0.47027596831321716, |
|
"learning_rate": 0.0004976358243227908, |
|
"loss": 4.9219, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.06025390625, |
|
"grad_norm": 0.4678795337677002, |
|
"learning_rate": 0.0004976256846614454, |
|
"loss": 4.9609, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.0603515625, |
|
"grad_norm": 0.4823755919933319, |
|
"learning_rate": 0.0004976155234180507, |
|
"loss": 4.957, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.06044921875, |
|
"grad_norm": 0.4279949367046356, |
|
"learning_rate": 0.0004976053405935921, |
|
"loss": 4.918, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.060546875, |
|
"grad_norm": 0.44116416573524475, |
|
"learning_rate": 0.0004975951361890565, |
|
"loss": 4.9297, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.06064453125, |
|
"grad_norm": 0.5708990097045898, |
|
"learning_rate": 0.0004975849102054332, |
|
"loss": 4.9492, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.0607421875, |
|
"grad_norm": 0.6033137440681458, |
|
"learning_rate": 0.0004975746626437135, |
|
"loss": 4.9336, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.06083984375, |
|
"grad_norm": 0.7036421895027161, |
|
"learning_rate": 0.0004975643935048908, |
|
"loss": 4.9297, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.0609375, |
|
"grad_norm": 0.6817371249198914, |
|
"learning_rate": 0.0004975541027899609, |
|
"loss": 4.9805, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.06103515625, |
|
"grad_norm": 0.6419718265533447, |
|
"learning_rate": 0.0004975437904999211, |
|
"loss": 4.9375, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.0611328125, |
|
"grad_norm": 0.6562728881835938, |
|
"learning_rate": 0.0004975334566357712, |
|
"loss": 4.957, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.06123046875, |
|
"grad_norm": 0.7153109312057495, |
|
"learning_rate": 0.000497523101198513, |
|
"loss": 4.9336, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.061328125, |
|
"grad_norm": 0.7886594533920288, |
|
"learning_rate": 0.0004975127241891505, |
|
"loss": 4.9492, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.06142578125, |
|
"grad_norm": 0.8648955225944519, |
|
"learning_rate": 0.0004975023256086896, |
|
"loss": 4.9453, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.0615234375, |
|
"grad_norm": 0.7228974103927612, |
|
"learning_rate": 0.0004974919054581382, |
|
"loss": 4.9531, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.06162109375, |
|
"grad_norm": 0.563917875289917, |
|
"learning_rate": 0.0004974814637385067, |
|
"loss": 4.9219, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.06171875, |
|
"grad_norm": 0.6323292851448059, |
|
"learning_rate": 0.0004974710004508073, |
|
"loss": 4.957, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.06181640625, |
|
"grad_norm": 0.6532883048057556, |
|
"learning_rate": 0.0004974605155960545, |
|
"loss": 4.957, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.0619140625, |
|
"grad_norm": 0.5852863788604736, |
|
"learning_rate": 0.0004974500091752643, |
|
"loss": 4.9492, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.06201171875, |
|
"grad_norm": 0.5243951082229614, |
|
"learning_rate": 0.0004974394811894555, |
|
"loss": 4.9102, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.062109375, |
|
"grad_norm": 0.4946766495704651, |
|
"learning_rate": 0.0004974289316396487, |
|
"loss": 4.957, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.06220703125, |
|
"grad_norm": 0.5417998433113098, |
|
"learning_rate": 0.0004974183605268667, |
|
"loss": 4.9531, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.0623046875, |
|
"grad_norm": 0.5387380719184875, |
|
"learning_rate": 0.0004974077678521343, |
|
"loss": 4.9805, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.06240234375, |
|
"grad_norm": 0.4830368161201477, |
|
"learning_rate": 0.000497397153616478, |
|
"loss": 4.9375, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.0625, |
|
"grad_norm": 0.5016372203826904, |
|
"learning_rate": 0.0004973865178209274, |
|
"loss": 4.9297, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.06259765625, |
|
"grad_norm": 0.5527303814888, |
|
"learning_rate": 0.000497375860466513, |
|
"loss": 4.9297, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.0626953125, |
|
"grad_norm": 0.5242739319801331, |
|
"learning_rate": 0.0004973651815542682, |
|
"loss": 4.9805, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.06279296875, |
|
"grad_norm": 0.48130086064338684, |
|
"learning_rate": 0.0004973544810852284, |
|
"loss": 4.957, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.062890625, |
|
"grad_norm": 0.5349105596542358, |
|
"learning_rate": 0.0004973437590604307, |
|
"loss": 4.9648, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.06298828125, |
|
"grad_norm": 0.5169801712036133, |
|
"learning_rate": 0.0004973330154809146, |
|
"loss": 4.9375, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.0630859375, |
|
"grad_norm": 0.480214387178421, |
|
"learning_rate": 0.0004973222503477216, |
|
"loss": 4.9531, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.06318359375, |
|
"grad_norm": 0.4224573075771332, |
|
"learning_rate": 0.0004973114636618954, |
|
"loss": 4.9219, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.06328125, |
|
"grad_norm": 0.41642358899116516, |
|
"learning_rate": 0.0004973006554244816, |
|
"loss": 4.918, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.06337890625, |
|
"grad_norm": 0.4622966945171356, |
|
"learning_rate": 0.000497289825636528, |
|
"loss": 4.9297, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.0634765625, |
|
"grad_norm": 0.5849423408508301, |
|
"learning_rate": 0.0004972789742990846, |
|
"loss": 4.9258, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.06357421875, |
|
"grad_norm": 0.5855206847190857, |
|
"learning_rate": 0.0004972681014132031, |
|
"loss": 4.9414, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.063671875, |
|
"grad_norm": 0.6138719320297241, |
|
"learning_rate": 0.0004972572069799378, |
|
"loss": 4.9141, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.06376953125, |
|
"grad_norm": 0.5948016047477722, |
|
"learning_rate": 0.0004972462910003447, |
|
"loss": 4.9414, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.0638671875, |
|
"grad_norm": 0.558039665222168, |
|
"learning_rate": 0.0004972353534754821, |
|
"loss": 4.918, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.06396484375, |
|
"grad_norm": 0.6696071624755859, |
|
"learning_rate": 0.0004972243944064103, |
|
"loss": 4.9258, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.0640625, |
|
"grad_norm": 0.7494735717773438, |
|
"learning_rate": 0.0004972134137941918, |
|
"loss": 4.8945, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.06416015625, |
|
"grad_norm": 0.8260038495063782, |
|
"learning_rate": 0.0004972024116398908, |
|
"loss": 4.9336, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.0642578125, |
|
"grad_norm": 0.9088923931121826, |
|
"learning_rate": 0.0004971913879445742, |
|
"loss": 4.9531, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.06435546875, |
|
"grad_norm": 0.7802959084510803, |
|
"learning_rate": 0.0004971803427093105, |
|
"loss": 4.9219, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.064453125, |
|
"grad_norm": 0.6456225514411926, |
|
"learning_rate": 0.0004971692759351705, |
|
"loss": 4.9414, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.06455078125, |
|
"grad_norm": 0.6266503930091858, |
|
"learning_rate": 0.0004971581876232272, |
|
"loss": 4.9219, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.0646484375, |
|
"grad_norm": 0.5530596971511841, |
|
"learning_rate": 0.0004971470777745553, |
|
"loss": 4.9336, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.06474609375, |
|
"grad_norm": 0.5629117488861084, |
|
"learning_rate": 0.0004971359463902319, |
|
"loss": 4.9727, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.06484375, |
|
"grad_norm": 0.5490591526031494, |
|
"learning_rate": 0.0004971247934713362, |
|
"loss": 4.9453, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.06494140625, |
|
"grad_norm": 0.5852833986282349, |
|
"learning_rate": 0.0004971136190189494, |
|
"loss": 4.9414, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.0650390625, |
|
"grad_norm": 0.6156993508338928, |
|
"learning_rate": 0.0004971024230341546, |
|
"loss": 4.957, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.06513671875, |
|
"grad_norm": 0.6128381490707397, |
|
"learning_rate": 0.0004970912055180373, |
|
"loss": 4.8867, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.065234375, |
|
"grad_norm": 0.4521043598651886, |
|
"learning_rate": 0.000497079966471685, |
|
"loss": 4.9141, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.06533203125, |
|
"grad_norm": 0.42642197012901306, |
|
"learning_rate": 0.0004970687058961873, |
|
"loss": 4.9336, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.0654296875, |
|
"grad_norm": 0.41460588574409485, |
|
"learning_rate": 0.0004970574237926356, |
|
"loss": 4.9062, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.06552734375, |
|
"grad_norm": 0.4534105956554413, |
|
"learning_rate": 0.000497046120162124, |
|
"loss": 4.8945, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.065625, |
|
"grad_norm": 0.45969158411026, |
|
"learning_rate": 0.0004970347950057478, |
|
"loss": 4.918, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.06572265625, |
|
"grad_norm": 0.45954084396362305, |
|
"learning_rate": 0.0004970234483246053, |
|
"loss": 4.9375, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.0658203125, |
|
"grad_norm": 0.5340402126312256, |
|
"learning_rate": 0.0004970120801197964, |
|
"loss": 4.9219, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.06591796875, |
|
"grad_norm": 0.5017228722572327, |
|
"learning_rate": 0.0004970006903924231, |
|
"loss": 4.9141, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.066015625, |
|
"grad_norm": 0.4929012060165405, |
|
"learning_rate": 0.0004969892791435896, |
|
"loss": 4.9062, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.06611328125, |
|
"grad_norm": 0.5938189625740051, |
|
"learning_rate": 0.0004969778463744021, |
|
"loss": 4.9414, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.0662109375, |
|
"grad_norm": 0.6704724431037903, |
|
"learning_rate": 0.000496966392085969, |
|
"loss": 4.9297, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.06630859375, |
|
"grad_norm": 0.6303629875183105, |
|
"learning_rate": 0.0004969549162794007, |
|
"loss": 4.9258, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.06640625, |
|
"grad_norm": 0.5393560528755188, |
|
"learning_rate": 0.0004969434189558096, |
|
"loss": 4.9258, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.06650390625, |
|
"grad_norm": 0.4611497223377228, |
|
"learning_rate": 0.0004969319001163104, |
|
"loss": 4.9102, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.0666015625, |
|
"grad_norm": 0.47802746295928955, |
|
"learning_rate": 0.0004969203597620197, |
|
"loss": 4.9219, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.06669921875, |
|
"grad_norm": 0.5258490443229675, |
|
"learning_rate": 0.0004969087978940564, |
|
"loss": 4.9492, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.066796875, |
|
"grad_norm": 0.5325090289115906, |
|
"learning_rate": 0.0004968972145135412, |
|
"loss": 4.9727, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.06689453125, |
|
"grad_norm": 0.5076428651809692, |
|
"learning_rate": 0.0004968856096215971, |
|
"loss": 4.8984, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.0669921875, |
|
"grad_norm": 0.5641031861305237, |
|
"learning_rate": 0.000496873983219349, |
|
"loss": 4.9414, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.06708984375, |
|
"grad_norm": 0.7779815793037415, |
|
"learning_rate": 0.0004968623353079242, |
|
"loss": 4.9102, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.0671875, |
|
"grad_norm": 0.618226170539856, |
|
"learning_rate": 0.0004968506658884517, |
|
"loss": 4.9336, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.06728515625, |
|
"grad_norm": 0.6069994568824768, |
|
"learning_rate": 0.0004968389749620629, |
|
"loss": 4.9453, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.0673828125, |
|
"grad_norm": 0.602695643901825, |
|
"learning_rate": 0.000496827262529891, |
|
"loss": 4.9141, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.06748046875, |
|
"grad_norm": 0.8353970050811768, |
|
"learning_rate": 0.0004968155285930717, |
|
"loss": 4.957, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.067578125, |
|
"grad_norm": 0.7601311206817627, |
|
"learning_rate": 0.0004968037731527422, |
|
"loss": 4.8867, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.06767578125, |
|
"grad_norm": 0.8417662978172302, |
|
"learning_rate": 0.0004967919962100424, |
|
"loss": 4.918, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.0677734375, |
|
"grad_norm": 0.864201009273529, |
|
"learning_rate": 0.0004967801977661138, |
|
"loss": 4.9336, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.06787109375, |
|
"grad_norm": 0.6699989438056946, |
|
"learning_rate": 0.0004967683778221003, |
|
"loss": 4.9531, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.06796875, |
|
"grad_norm": 0.5989636778831482, |
|
"learning_rate": 0.0004967565363791478, |
|
"loss": 4.9023, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.06806640625, |
|
"grad_norm": 0.5926920771598816, |
|
"learning_rate": 0.000496744673438404, |
|
"loss": 4.9141, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.0681640625, |
|
"grad_norm": 0.6211676597595215, |
|
"learning_rate": 0.0004967327890010192, |
|
"loss": 4.8984, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.06826171875, |
|
"grad_norm": 0.985095739364624, |
|
"learning_rate": 0.0004967208830681454, |
|
"loss": 4.9102, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.068359375, |
|
"grad_norm": 0.8903228044509888, |
|
"learning_rate": 0.0004967089556409367, |
|
"loss": 4.918, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.06845703125, |
|
"grad_norm": 0.7326058745384216, |
|
"learning_rate": 0.0004966970067205496, |
|
"loss": 4.9219, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.0685546875, |
|
"grad_norm": 0.5591720938682556, |
|
"learning_rate": 0.0004966850363081423, |
|
"loss": 4.9062, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.06865234375, |
|
"grad_norm": 0.5330259799957275, |
|
"learning_rate": 0.0004966730444048754, |
|
"loss": 4.9258, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.06875, |
|
"grad_norm": 0.5547099113464355, |
|
"learning_rate": 0.0004966610310119113, |
|
"loss": 4.9141, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.06884765625, |
|
"grad_norm": 0.5156508088111877, |
|
"learning_rate": 0.0004966489961304147, |
|
"loss": 4.918, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.0689453125, |
|
"grad_norm": 0.5710839629173279, |
|
"learning_rate": 0.0004966369397615522, |
|
"loss": 4.875, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.06904296875, |
|
"grad_norm": 0.6018747091293335, |
|
"learning_rate": 0.0004966248619064927, |
|
"loss": 4.9102, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.069140625, |
|
"grad_norm": 0.7630068063735962, |
|
"learning_rate": 0.000496612762566407, |
|
"loss": 4.9375, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.06923828125, |
|
"grad_norm": 0.9202441573143005, |
|
"learning_rate": 0.000496600641742468, |
|
"loss": 4.9375, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.0693359375, |
|
"grad_norm": 0.801213800907135, |
|
"learning_rate": 0.0004965884994358508, |
|
"loss": 4.9102, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.06943359375, |
|
"grad_norm": 0.6991446614265442, |
|
"learning_rate": 0.0004965763356477326, |
|
"loss": 4.9219, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.06953125, |
|
"grad_norm": 0.6677018404006958, |
|
"learning_rate": 0.0004965641503792924, |
|
"loss": 4.8984, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.06962890625, |
|
"grad_norm": 0.5393925905227661, |
|
"learning_rate": 0.0004965519436317115, |
|
"loss": 4.9141, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.0697265625, |
|
"grad_norm": 0.4885493814945221, |
|
"learning_rate": 0.0004965397154061736, |
|
"loss": 4.9102, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.06982421875, |
|
"grad_norm": 0.42598259449005127, |
|
"learning_rate": 0.0004965274657038637, |
|
"loss": 4.8867, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.069921875, |
|
"grad_norm": 0.3984520733356476, |
|
"learning_rate": 0.0004965151945259696, |
|
"loss": 4.918, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.07001953125, |
|
"grad_norm": 0.4602973163127899, |
|
"learning_rate": 0.0004965029018736807, |
|
"loss": 4.9336, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.0701171875, |
|
"grad_norm": 0.4962187111377716, |
|
"learning_rate": 0.0004964905877481889, |
|
"loss": 4.9141, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.07021484375, |
|
"grad_norm": 0.572699785232544, |
|
"learning_rate": 0.0004964782521506879, |
|
"loss": 4.9219, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.0703125, |
|
"grad_norm": 0.6187204718589783, |
|
"learning_rate": 0.0004964658950823734, |
|
"loss": 4.9219, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.07041015625, |
|
"grad_norm": 0.5086959600448608, |
|
"learning_rate": 0.0004964535165444436, |
|
"loss": 4.8945, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.0705078125, |
|
"grad_norm": 0.4606127142906189, |
|
"learning_rate": 0.0004964411165380983, |
|
"loss": 4.9102, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.07060546875, |
|
"grad_norm": 0.4611072242259979, |
|
"learning_rate": 0.0004964286950645397, |
|
"loss": 4.8945, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.070703125, |
|
"grad_norm": 0.4993979036808014, |
|
"learning_rate": 0.000496416252124972, |
|
"loss": 4.9023, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.07080078125, |
|
"grad_norm": 0.5374334454536438, |
|
"learning_rate": 0.0004964037877206014, |
|
"loss": 4.9023, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.0708984375, |
|
"grad_norm": 0.5764836668968201, |
|
"learning_rate": 0.0004963913018526363, |
|
"loss": 4.8789, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.07099609375, |
|
"grad_norm": 0.8342440724372864, |
|
"learning_rate": 0.000496378794522287, |
|
"loss": 4.8945, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.07109375, |
|
"grad_norm": 0.8987082839012146, |
|
"learning_rate": 0.0004963662657307661, |
|
"loss": 4.9375, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.07119140625, |
|
"grad_norm": 0.7202743291854858, |
|
"learning_rate": 0.0004963537154792881, |
|
"loss": 4.918, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.0712890625, |
|
"grad_norm": 0.8998580574989319, |
|
"learning_rate": 0.0004963411437690696, |
|
"loss": 4.8867, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.07138671875, |
|
"grad_norm": 0.808499813079834, |
|
"learning_rate": 0.0004963285506013297, |
|
"loss": 4.9414, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.071484375, |
|
"grad_norm": 0.5454872250556946, |
|
"learning_rate": 0.0004963159359772889, |
|
"loss": 4.9219, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.07158203125, |
|
"grad_norm": 0.4970705807209015, |
|
"learning_rate": 0.0004963032998981702, |
|
"loss": 4.918, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.0716796875, |
|
"grad_norm": 0.4672599732875824, |
|
"learning_rate": 0.0004962906423651985, |
|
"loss": 4.9102, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.07177734375, |
|
"grad_norm": 0.4656890630722046, |
|
"learning_rate": 0.000496277963379601, |
|
"loss": 4.9062, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.071875, |
|
"grad_norm": 0.4242008626461029, |
|
"learning_rate": 0.0004962652629426068, |
|
"loss": 4.9023, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.07197265625, |
|
"grad_norm": 0.38737159967422485, |
|
"learning_rate": 0.000496252541055447, |
|
"loss": 4.9102, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.0720703125, |
|
"grad_norm": 0.4093025326728821, |
|
"learning_rate": 0.000496239797719355, |
|
"loss": 4.8984, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.07216796875, |
|
"grad_norm": 0.4339083135128021, |
|
"learning_rate": 0.0004962270329355662, |
|
"loss": 4.9414, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.072265625, |
|
"grad_norm": 0.5110611915588379, |
|
"learning_rate": 0.000496214246705318, |
|
"loss": 4.9141, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.07236328125, |
|
"grad_norm": 0.5828584432601929, |
|
"learning_rate": 0.00049620143902985, |
|
"loss": 4.918, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.0724609375, |
|
"grad_norm": 0.5776299834251404, |
|
"learning_rate": 0.0004961886099104038, |
|
"loss": 4.8945, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.07255859375, |
|
"grad_norm": 0.48184171319007874, |
|
"learning_rate": 0.0004961757593482229, |
|
"loss": 4.8945, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.07265625, |
|
"grad_norm": 0.4601769745349884, |
|
"learning_rate": 0.0004961628873445535, |
|
"loss": 4.9023, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.07275390625, |
|
"grad_norm": 0.44548672437667847, |
|
"learning_rate": 0.0004961499939006431, |
|
"loss": 4.9141, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.0728515625, |
|
"grad_norm": 0.5392785668373108, |
|
"learning_rate": 0.0004961370790177418, |
|
"loss": 4.8945, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.07294921875, |
|
"grad_norm": 0.5730900764465332, |
|
"learning_rate": 0.0004961241426971014, |
|
"loss": 4.9258, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.073046875, |
|
"grad_norm": 0.6566439270973206, |
|
"learning_rate": 0.0004961111849399763, |
|
"loss": 4.9102, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.07314453125, |
|
"grad_norm": 0.6477753520011902, |
|
"learning_rate": 0.0004960982057476224, |
|
"loss": 4.918, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.0732421875, |
|
"grad_norm": 0.5533462166786194, |
|
"learning_rate": 0.0004960852051212982, |
|
"loss": 4.8711, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.07333984375, |
|
"grad_norm": 0.5516164302825928, |
|
"learning_rate": 0.0004960721830622637, |
|
"loss": 4.8828, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.0734375, |
|
"grad_norm": 0.5473514795303345, |
|
"learning_rate": 0.0004960591395717816, |
|
"loss": 4.9297, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.07353515625, |
|
"grad_norm": 0.5541347861289978, |
|
"learning_rate": 0.0004960460746511162, |
|
"loss": 4.8906, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.0736328125, |
|
"grad_norm": 0.5999252200126648, |
|
"learning_rate": 0.0004960329883015341, |
|
"loss": 4.9023, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.07373046875, |
|
"grad_norm": 0.5422800779342651, |
|
"learning_rate": 0.0004960198805243039, |
|
"loss": 4.9023, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.073828125, |
|
"grad_norm": 0.4828493595123291, |
|
"learning_rate": 0.0004960067513206964, |
|
"loss": 4.8555, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.07392578125, |
|
"grad_norm": 0.44730255007743835, |
|
"learning_rate": 0.0004959936006919843, |
|
"loss": 4.9141, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.0740234375, |
|
"grad_norm": 0.5136485695838928, |
|
"learning_rate": 0.0004959804286394425, |
|
"loss": 4.9062, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.07412109375, |
|
"grad_norm": 0.558743417263031, |
|
"learning_rate": 0.000495967235164348, |
|
"loss": 4.9297, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.07421875, |
|
"grad_norm": 0.503806471824646, |
|
"learning_rate": 0.0004959540202679797, |
|
"loss": 4.9102, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.07431640625, |
|
"grad_norm": 0.4152722954750061, |
|
"learning_rate": 0.0004959407839516188, |
|
"loss": 4.8711, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.0744140625, |
|
"grad_norm": 0.38589704036712646, |
|
"learning_rate": 0.0004959275262165485, |
|
"loss": 4.8906, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.07451171875, |
|
"grad_norm": 0.43141141533851624, |
|
"learning_rate": 0.0004959142470640539, |
|
"loss": 4.8516, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.074609375, |
|
"grad_norm": 0.42636358737945557, |
|
"learning_rate": 0.0004959009464954224, |
|
"loss": 4.8789, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.07470703125, |
|
"grad_norm": 0.4754694700241089, |
|
"learning_rate": 0.0004958876245119433, |
|
"loss": 4.8828, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.0748046875, |
|
"grad_norm": 0.4849173128604889, |
|
"learning_rate": 0.0004958742811149083, |
|
"loss": 4.9062, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.07490234375, |
|
"grad_norm": 0.5047995448112488, |
|
"learning_rate": 0.0004958609163056108, |
|
"loss": 4.8828, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.075, |
|
"grad_norm": 0.5260794758796692, |
|
"learning_rate": 0.0004958475300853464, |
|
"loss": 4.8828, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.07509765625, |
|
"grad_norm": 0.4804491102695465, |
|
"learning_rate": 0.0004958341224554129, |
|
"loss": 4.8906, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.0751953125, |
|
"grad_norm": 0.4267667233943939, |
|
"learning_rate": 0.00049582069341711, |
|
"loss": 4.875, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.07529296875, |
|
"grad_norm": 0.460480660200119, |
|
"learning_rate": 0.0004958072429717395, |
|
"loss": 4.8789, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.075390625, |
|
"grad_norm": 0.4774636924266815, |
|
"learning_rate": 0.0004957937711206055, |
|
"loss": 4.8945, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.07548828125, |
|
"grad_norm": 0.44169384241104126, |
|
"learning_rate": 0.000495780277865014, |
|
"loss": 4.875, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.0755859375, |
|
"grad_norm": 0.4531261622905731, |
|
"learning_rate": 0.000495766763206273, |
|
"loss": 4.8945, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.07568359375, |
|
"grad_norm": 0.49369335174560547, |
|
"learning_rate": 0.0004957532271456926, |
|
"loss": 4.9023, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.07578125, |
|
"grad_norm": 0.5407163500785828, |
|
"learning_rate": 0.000495739669684585, |
|
"loss": 4.9023, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.07587890625, |
|
"grad_norm": 0.6588068008422852, |
|
"learning_rate": 0.0004957260908242647, |
|
"loss": 4.8906, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.0759765625, |
|
"grad_norm": 0.708760142326355, |
|
"learning_rate": 0.000495712490566048, |
|
"loss": 4.8828, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.07607421875, |
|
"grad_norm": 0.6345651745796204, |
|
"learning_rate": 0.0004956988689112533, |
|
"loss": 4.8984, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.076171875, |
|
"grad_norm": 0.5177327990531921, |
|
"learning_rate": 0.0004956852258612011, |
|
"loss": 4.8906, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.07626953125, |
|
"grad_norm": 0.5784163475036621, |
|
"learning_rate": 0.0004956715614172141, |
|
"loss": 4.8906, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.0763671875, |
|
"grad_norm": 0.4898063540458679, |
|
"learning_rate": 0.0004956578755806168, |
|
"loss": 4.8945, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.07646484375, |
|
"grad_norm": 0.43215012550354004, |
|
"learning_rate": 0.0004956441683527361, |
|
"loss": 4.9219, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.0765625, |
|
"grad_norm": 0.42248815298080444, |
|
"learning_rate": 0.0004956304397349009, |
|
"loss": 4.8711, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.07666015625, |
|
"grad_norm": 0.4588373303413391, |
|
"learning_rate": 0.0004956166897284419, |
|
"loss": 4.8828, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.0767578125, |
|
"grad_norm": 0.5150899887084961, |
|
"learning_rate": 0.0004956029183346922, |
|
"loss": 4.875, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.07685546875, |
|
"grad_norm": 0.49109452962875366, |
|
"learning_rate": 0.0004955891255549868, |
|
"loss": 4.8828, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.076953125, |
|
"grad_norm": 0.5134442448616028, |
|
"learning_rate": 0.0004955753113906629, |
|
"loss": 4.8789, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.07705078125, |
|
"grad_norm": 0.5372751951217651, |
|
"learning_rate": 0.0004955614758430594, |
|
"loss": 4.8711, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.0771484375, |
|
"grad_norm": 0.5520480871200562, |
|
"learning_rate": 0.0004955476189135179, |
|
"loss": 4.875, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.07724609375, |
|
"grad_norm": 0.5645185112953186, |
|
"learning_rate": 0.0004955337406033817, |
|
"loss": 4.8555, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.07734375, |
|
"grad_norm": 0.5430117249488831, |
|
"learning_rate": 0.000495519840913996, |
|
"loss": 4.8906, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.07744140625, |
|
"grad_norm": 0.5487677454948425, |
|
"learning_rate": 0.0004955059198467085, |
|
"loss": 4.8906, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.0775390625, |
|
"grad_norm": 0.5324546098709106, |
|
"learning_rate": 0.0004954919774028685, |
|
"loss": 4.8672, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.07763671875, |
|
"grad_norm": 0.48905646800994873, |
|
"learning_rate": 0.0004954780135838278, |
|
"loss": 4.8906, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.077734375, |
|
"grad_norm": 0.49760356545448303, |
|
"learning_rate": 0.0004954640283909401, |
|
"loss": 4.8984, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.07783203125, |
|
"grad_norm": 0.5531620979309082, |
|
"learning_rate": 0.0004954500218255613, |
|
"loss": 4.9062, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.0779296875, |
|
"grad_norm": 0.5735150575637817, |
|
"learning_rate": 0.0004954359938890489, |
|
"loss": 4.8594, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.07802734375, |
|
"grad_norm": 0.5884649753570557, |
|
"learning_rate": 0.000495421944582763, |
|
"loss": 4.918, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.078125, |
|
"grad_norm": 0.48785004019737244, |
|
"learning_rate": 0.0004954078739080656, |
|
"loss": 4.9102, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.07822265625, |
|
"grad_norm": 0.47542282938957214, |
|
"learning_rate": 0.0004953937818663208, |
|
"loss": 4.8789, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.0783203125, |
|
"grad_norm": 0.5039560794830322, |
|
"learning_rate": 0.0004953796684588946, |
|
"loss": 4.8906, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.07841796875, |
|
"grad_norm": 0.47064536809921265, |
|
"learning_rate": 0.0004953655336871553, |
|
"loss": 4.8594, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.078515625, |
|
"grad_norm": 0.44826674461364746, |
|
"learning_rate": 0.0004953513775524731, |
|
"loss": 4.8633, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.07861328125, |
|
"grad_norm": 0.403845876455307, |
|
"learning_rate": 0.0004953372000562204, |
|
"loss": 4.8984, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.0787109375, |
|
"grad_norm": 0.4083589017391205, |
|
"learning_rate": 0.0004953230011997716, |
|
"loss": 4.9141, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.07880859375, |
|
"grad_norm": 0.3878018260002136, |
|
"learning_rate": 0.0004953087809845031, |
|
"loss": 4.875, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.07890625, |
|
"grad_norm": 0.39088737964630127, |
|
"learning_rate": 0.0004952945394117936, |
|
"loss": 4.9336, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.07900390625, |
|
"grad_norm": 0.46204832196235657, |
|
"learning_rate": 0.0004952802764830236, |
|
"loss": 4.8477, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.0791015625, |
|
"grad_norm": 0.5762801766395569, |
|
"learning_rate": 0.0004952659921995758, |
|
"loss": 4.8945, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.07919921875, |
|
"grad_norm": 0.6087040305137634, |
|
"learning_rate": 0.0004952516865628352, |
|
"loss": 4.8945, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.079296875, |
|
"grad_norm": 0.6062765717506409, |
|
"learning_rate": 0.0004952373595741883, |
|
"loss": 4.8711, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.07939453125, |
|
"grad_norm": 0.6468778252601624, |
|
"learning_rate": 0.0004952230112350241, |
|
"loss": 4.8633, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.0794921875, |
|
"grad_norm": 0.6803391575813293, |
|
"learning_rate": 0.0004952086415467337, |
|
"loss": 4.8945, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.07958984375, |
|
"grad_norm": 0.6036785244941711, |
|
"learning_rate": 0.0004951942505107101, |
|
"loss": 4.8633, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.0796875, |
|
"grad_norm": 0.5079777240753174, |
|
"learning_rate": 0.0004951798381283482, |
|
"loss": 4.8555, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.07978515625, |
|
"grad_norm": 0.47106999158859253, |
|
"learning_rate": 0.0004951654044010455, |
|
"loss": 4.832, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.0798828125, |
|
"grad_norm": 0.418499618768692, |
|
"learning_rate": 0.0004951509493302011, |
|
"loss": 4.8711, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.07998046875, |
|
"grad_norm": 0.3655601143836975, |
|
"learning_rate": 0.0004951364729172163, |
|
"loss": 4.8516, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.080078125, |
|
"grad_norm": 0.42946258187294006, |
|
"learning_rate": 0.0004951219751634945, |
|
"loss": 4.8789, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.08017578125, |
|
"grad_norm": 0.412626177072525, |
|
"learning_rate": 0.0004951074560704412, |
|
"loss": 4.8828, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.0802734375, |
|
"grad_norm": 0.44532856345176697, |
|
"learning_rate": 0.0004950929156394639, |
|
"loss": 4.8828, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.08037109375, |
|
"grad_norm": 0.4855706989765167, |
|
"learning_rate": 0.0004950783538719723, |
|
"loss": 4.9062, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.08046875, |
|
"grad_norm": 0.504410982131958, |
|
"learning_rate": 0.0004950637707693779, |
|
"loss": 4.8945, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.08056640625, |
|
"grad_norm": 0.5246967077255249, |
|
"learning_rate": 0.0004950491663330946, |
|
"loss": 4.875, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.0806640625, |
|
"grad_norm": 0.5594045519828796, |
|
"learning_rate": 0.000495034540564538, |
|
"loss": 4.8867, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.08076171875, |
|
"grad_norm": 0.5870455503463745, |
|
"learning_rate": 0.000495019893465126, |
|
"loss": 4.8789, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.080859375, |
|
"grad_norm": 0.5400936007499695, |
|
"learning_rate": 0.0004950052250362786, |
|
"loss": 4.8906, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.08095703125, |
|
"grad_norm": 0.4675130546092987, |
|
"learning_rate": 0.000494990535279418, |
|
"loss": 4.8789, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.0810546875, |
|
"grad_norm": 0.45326483249664307, |
|
"learning_rate": 0.0004949758241959679, |
|
"loss": 4.8906, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.08115234375, |
|
"grad_norm": 0.43965944647789, |
|
"learning_rate": 0.0004949610917873547, |
|
"loss": 4.8555, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.08125, |
|
"grad_norm": 0.5271297693252563, |
|
"learning_rate": 0.0004949463380550065, |
|
"loss": 4.8398, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.08134765625, |
|
"grad_norm": 0.6631085872650146, |
|
"learning_rate": 0.0004949315630003537, |
|
"loss": 4.8711, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.0814453125, |
|
"grad_norm": 0.5798559188842773, |
|
"learning_rate": 0.0004949167666248285, |
|
"loss": 4.8672, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.08154296875, |
|
"grad_norm": 0.45223212242126465, |
|
"learning_rate": 0.0004949019489298653, |
|
"loss": 4.8555, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.081640625, |
|
"grad_norm": 0.4504897892475128, |
|
"learning_rate": 0.0004948871099169006, |
|
"loss": 4.8555, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.08173828125, |
|
"grad_norm": 0.4103120267391205, |
|
"learning_rate": 0.0004948722495873732, |
|
"loss": 4.8906, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.0818359375, |
|
"grad_norm": 0.4391990602016449, |
|
"learning_rate": 0.0004948573679427233, |
|
"loss": 4.8633, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.08193359375, |
|
"grad_norm": 0.5236042141914368, |
|
"learning_rate": 0.0004948424649843938, |
|
"loss": 4.8633, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.08203125, |
|
"grad_norm": 0.6059775948524475, |
|
"learning_rate": 0.0004948275407138293, |
|
"loss": 4.8477, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.08212890625, |
|
"grad_norm": 0.6779847145080566, |
|
"learning_rate": 0.0004948125951324768, |
|
"loss": 4.8828, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.0822265625, |
|
"grad_norm": 0.6596094369888306, |
|
"learning_rate": 0.000494797628241785, |
|
"loss": 4.8633, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.08232421875, |
|
"grad_norm": 0.550703763961792, |
|
"learning_rate": 0.000494782640043205, |
|
"loss": 4.8672, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.082421875, |
|
"grad_norm": 0.4469245970249176, |
|
"learning_rate": 0.0004947676305381897, |
|
"loss": 4.8984, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.08251953125, |
|
"grad_norm": 0.4176386892795563, |
|
"learning_rate": 0.0004947525997281941, |
|
"loss": 4.8516, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.0826171875, |
|
"grad_norm": 0.362447589635849, |
|
"learning_rate": 0.0004947375476146755, |
|
"loss": 4.8633, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.08271484375, |
|
"grad_norm": 0.39331042766571045, |
|
"learning_rate": 0.000494722474199093, |
|
"loss": 4.8711, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.0828125, |
|
"grad_norm": 0.37120670080184937, |
|
"learning_rate": 0.0004947073794829079, |
|
"loss": 4.8828, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.08291015625, |
|
"grad_norm": 0.40028196573257446, |
|
"learning_rate": 0.0004946922634675836, |
|
"loss": 4.8789, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.0830078125, |
|
"grad_norm": 0.47256696224212646, |
|
"learning_rate": 0.0004946771261545853, |
|
"loss": 4.8828, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.08310546875, |
|
"grad_norm": 0.5021369457244873, |
|
"learning_rate": 0.0004946619675453806, |
|
"loss": 4.8555, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.083203125, |
|
"grad_norm": 0.4181872010231018, |
|
"learning_rate": 0.000494646787641439, |
|
"loss": 4.8672, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.08330078125, |
|
"grad_norm": 0.40219172835350037, |
|
"learning_rate": 0.000494631586444232, |
|
"loss": 4.8672, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.0833984375, |
|
"grad_norm": 0.42699527740478516, |
|
"learning_rate": 0.0004946163639552335, |
|
"loss": 4.8984, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.08349609375, |
|
"grad_norm": 0.46114829182624817, |
|
"learning_rate": 0.0004946011201759189, |
|
"loss": 4.832, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.08359375, |
|
"grad_norm": 0.5074323415756226, |
|
"learning_rate": 0.0004945858551077662, |
|
"loss": 4.8789, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.08369140625, |
|
"grad_norm": 0.5219331979751587, |
|
"learning_rate": 0.0004945705687522552, |
|
"loss": 4.8438, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.0837890625, |
|
"grad_norm": 0.6114364266395569, |
|
"learning_rate": 0.0004945552611108679, |
|
"loss": 4.8672, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.08388671875, |
|
"grad_norm": 0.7798656821250916, |
|
"learning_rate": 0.0004945399321850879, |
|
"loss": 4.8789, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.083984375, |
|
"grad_norm": 0.7879101634025574, |
|
"learning_rate": 0.0004945245819764016, |
|
"loss": 4.8828, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.08408203125, |
|
"grad_norm": 0.7016037106513977, |
|
"learning_rate": 0.0004945092104862971, |
|
"loss": 4.875, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.0841796875, |
|
"grad_norm": 0.49669718742370605, |
|
"learning_rate": 0.0004944938177162644, |
|
"loss": 4.8359, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.08427734375, |
|
"grad_norm": 0.4770626425743103, |
|
"learning_rate": 0.0004944784036677958, |
|
"loss": 4.8594, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.084375, |
|
"grad_norm": 0.4248046278953552, |
|
"learning_rate": 0.0004944629683423855, |
|
"loss": 4.8867, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.08447265625, |
|
"grad_norm": 0.4632871747016907, |
|
"learning_rate": 0.00049444751174153, |
|
"loss": 4.8594, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.0845703125, |
|
"grad_norm": 0.48169225454330444, |
|
"learning_rate": 0.0004944320338667276, |
|
"loss": 4.8633, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.08466796875, |
|
"grad_norm": 0.42320355772972107, |
|
"learning_rate": 0.0004944165347194788, |
|
"loss": 4.8867, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.084765625, |
|
"grad_norm": 0.39030560851097107, |
|
"learning_rate": 0.0004944010143012861, |
|
"loss": 4.8828, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.08486328125, |
|
"grad_norm": 0.4671940207481384, |
|
"learning_rate": 0.0004943854726136542, |
|
"loss": 4.8203, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.0849609375, |
|
"grad_norm": 0.4762789309024811, |
|
"learning_rate": 0.0004943699096580897, |
|
"loss": 4.8906, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.08505859375, |
|
"grad_norm": 0.5251975655555725, |
|
"learning_rate": 0.0004943543254361013, |
|
"loss": 4.8672, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.08515625, |
|
"grad_norm": 0.5693690180778503, |
|
"learning_rate": 0.0004943387199491998, |
|
"loss": 4.9102, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.08525390625, |
|
"grad_norm": 0.5571799278259277, |
|
"learning_rate": 0.0004943230931988981, |
|
"loss": 4.8672, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.0853515625, |
|
"grad_norm": 0.4701318144798279, |
|
"learning_rate": 0.0004943074451867111, |
|
"loss": 4.8789, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.08544921875, |
|
"grad_norm": 0.43155744671821594, |
|
"learning_rate": 0.0004942917759141556, |
|
"loss": 4.8555, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.085546875, |
|
"grad_norm": 0.43980658054351807, |
|
"learning_rate": 0.0004942760853827509, |
|
"loss": 4.8789, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.08564453125, |
|
"grad_norm": 0.46005746722221375, |
|
"learning_rate": 0.0004942603735940179, |
|
"loss": 4.8867, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.0857421875, |
|
"grad_norm": 0.4600110948085785, |
|
"learning_rate": 0.0004942446405494798, |
|
"loss": 4.8633, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.08583984375, |
|
"grad_norm": 0.40931013226509094, |
|
"learning_rate": 0.0004942288862506618, |
|
"loss": 4.8516, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.0859375, |
|
"grad_norm": 0.4222780466079712, |
|
"learning_rate": 0.0004942131106990911, |
|
"loss": 4.8789, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.08603515625, |
|
"grad_norm": 0.44859063625335693, |
|
"learning_rate": 0.0004941973138962973, |
|
"loss": 4.8672, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.0861328125, |
|
"grad_norm": 0.48440831899642944, |
|
"learning_rate": 0.0004941814958438115, |
|
"loss": 4.8555, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.08623046875, |
|
"grad_norm": 0.525878369808197, |
|
"learning_rate": 0.0004941656565431673, |
|
"loss": 4.8281, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.086328125, |
|
"grad_norm": 0.6066216230392456, |
|
"learning_rate": 0.0004941497959959003, |
|
"loss": 4.8477, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.08642578125, |
|
"grad_norm": 0.6154394149780273, |
|
"learning_rate": 0.0004941339142035478, |
|
"loss": 4.875, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.0865234375, |
|
"grad_norm": 0.5415538549423218, |
|
"learning_rate": 0.0004941180111676497, |
|
"loss": 4.8359, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.08662109375, |
|
"grad_norm": 0.43851184844970703, |
|
"learning_rate": 0.0004941020868897474, |
|
"loss": 4.8477, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.08671875, |
|
"grad_norm": 0.4420389235019684, |
|
"learning_rate": 0.0004940861413713849, |
|
"loss": 4.8516, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.08681640625, |
|
"grad_norm": 0.49204596877098083, |
|
"learning_rate": 0.000494070174614108, |
|
"loss": 4.8516, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.0869140625, |
|
"grad_norm": 0.4975183308124542, |
|
"learning_rate": 0.0004940541866194645, |
|
"loss": 4.875, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.08701171875, |
|
"grad_norm": 0.4811098873615265, |
|
"learning_rate": 0.0004940381773890043, |
|
"loss": 4.8555, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.087109375, |
|
"grad_norm": 0.5079295039176941, |
|
"learning_rate": 0.0004940221469242794, |
|
"loss": 4.8633, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.08720703125, |
|
"grad_norm": 0.5155340433120728, |
|
"learning_rate": 0.000494006095226844, |
|
"loss": 4.8828, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.0873046875, |
|
"grad_norm": 0.47939589619636536, |
|
"learning_rate": 0.0004939900222982539, |
|
"loss": 4.8164, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.08740234375, |
|
"grad_norm": 0.5215303897857666, |
|
"learning_rate": 0.0004939739281400674, |
|
"loss": 4.8633, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.0875, |
|
"grad_norm": 0.48365482687950134, |
|
"learning_rate": 0.0004939578127538449, |
|
"loss": 4.8711, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.08759765625, |
|
"grad_norm": 0.414248526096344, |
|
"learning_rate": 0.0004939416761411484, |
|
"loss": 4.8594, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.0876953125, |
|
"grad_norm": 0.4160768389701843, |
|
"learning_rate": 0.0004939255183035424, |
|
"loss": 4.8633, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.08779296875, |
|
"grad_norm": 0.4438508152961731, |
|
"learning_rate": 0.0004939093392425933, |
|
"loss": 4.8398, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.087890625, |
|
"grad_norm": 0.5406934022903442, |
|
"learning_rate": 0.0004938931389598695, |
|
"loss": 4.832, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.08798828125, |
|
"grad_norm": 0.6314405202865601, |
|
"learning_rate": 0.0004938769174569413, |
|
"loss": 4.8672, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.0880859375, |
|
"grad_norm": 0.6316208839416504, |
|
"learning_rate": 0.0004938606747353818, |
|
"loss": 4.8711, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.08818359375, |
|
"grad_norm": 0.49028605222702026, |
|
"learning_rate": 0.0004938444107967651, |
|
"loss": 4.8398, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.08828125, |
|
"grad_norm": 0.40383410453796387, |
|
"learning_rate": 0.0004938281256426681, |
|
"loss": 4.8281, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.08837890625, |
|
"grad_norm": 0.4249477982521057, |
|
"learning_rate": 0.0004938118192746695, |
|
"loss": 4.8633, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.0884765625, |
|
"grad_norm": 0.4824749827384949, |
|
"learning_rate": 0.0004937954916943502, |
|
"loss": 4.8164, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.08857421875, |
|
"grad_norm": 0.5105504989624023, |
|
"learning_rate": 0.0004937791429032929, |
|
"loss": 4.8477, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.088671875, |
|
"grad_norm": 0.4530431032180786, |
|
"learning_rate": 0.0004937627729030825, |
|
"loss": 4.8672, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.08876953125, |
|
"grad_norm": 0.37337881326675415, |
|
"learning_rate": 0.0004937463816953061, |
|
"loss": 4.875, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.0888671875, |
|
"grad_norm": 0.3546503484249115, |
|
"learning_rate": 0.0004937299692815525, |
|
"loss": 4.8555, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.08896484375, |
|
"grad_norm": 0.3876660466194153, |
|
"learning_rate": 0.000493713535663413, |
|
"loss": 4.8594, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.0890625, |
|
"grad_norm": 0.4789651036262512, |
|
"learning_rate": 0.0004936970808424807, |
|
"loss": 4.8477, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.08916015625, |
|
"grad_norm": 0.5593496561050415, |
|
"learning_rate": 0.0004936806048203506, |
|
"loss": 4.8711, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.0892578125, |
|
"grad_norm": 0.6056161522865295, |
|
"learning_rate": 0.0004936641075986201, |
|
"loss": 4.8516, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.08935546875, |
|
"grad_norm": 0.5245595574378967, |
|
"learning_rate": 0.0004936475891788883, |
|
"loss": 4.8164, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.089453125, |
|
"grad_norm": 0.49999815225601196, |
|
"learning_rate": 0.0004936310495627569, |
|
"loss": 4.832, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.08955078125, |
|
"grad_norm": 0.6258394718170166, |
|
"learning_rate": 0.0004936144887518291, |
|
"loss": 4.8711, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.0896484375, |
|
"grad_norm": 0.6583293080329895, |
|
"learning_rate": 0.0004935979067477103, |
|
"loss": 4.8633, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.08974609375, |
|
"grad_norm": 0.5308080911636353, |
|
"learning_rate": 0.000493581303552008, |
|
"loss": 4.8672, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.08984375, |
|
"grad_norm": 0.42577865719795227, |
|
"learning_rate": 0.0004935646791663318, |
|
"loss": 4.8555, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.08994140625, |
|
"grad_norm": 0.3947119414806366, |
|
"learning_rate": 0.0004935480335922935, |
|
"loss": 4.8359, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.0900390625, |
|
"grad_norm": 0.400806725025177, |
|
"learning_rate": 0.0004935313668315065, |
|
"loss": 4.8359, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.09013671875, |
|
"grad_norm": 0.3871156573295593, |
|
"learning_rate": 0.0004935146788855868, |
|
"loss": 4.875, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.090234375, |
|
"grad_norm": 0.3819091022014618, |
|
"learning_rate": 0.000493497969756152, |
|
"loss": 4.8242, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.09033203125, |
|
"grad_norm": 0.3758518397808075, |
|
"learning_rate": 0.000493481239444822, |
|
"loss": 4.8477, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.0904296875, |
|
"grad_norm": 0.4341889023780823, |
|
"learning_rate": 0.0004934644879532185, |
|
"loss": 4.832, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.09052734375, |
|
"grad_norm": 0.44294634461402893, |
|
"learning_rate": 0.0004934477152829658, |
|
"loss": 4.832, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.090625, |
|
"grad_norm": 0.4348030984401703, |
|
"learning_rate": 0.0004934309214356897, |
|
"loss": 4.8203, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.09072265625, |
|
"grad_norm": 0.497432142496109, |
|
"learning_rate": 0.0004934141064130181, |
|
"loss": 4.8633, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.0908203125, |
|
"grad_norm": 0.5283389687538147, |
|
"learning_rate": 0.0004933972702165814, |
|
"loss": 4.8672, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.09091796875, |
|
"grad_norm": 0.5673421025276184, |
|
"learning_rate": 0.0004933804128480117, |
|
"loss": 4.8359, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.091015625, |
|
"grad_norm": 0.5137133002281189, |
|
"learning_rate": 0.000493363534308943, |
|
"loss": 4.8359, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.09111328125, |
|
"grad_norm": 0.40789496898651123, |
|
"learning_rate": 0.0004933466346010117, |
|
"loss": 4.8359, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.0912109375, |
|
"grad_norm": 0.4613848328590393, |
|
"learning_rate": 0.0004933297137258561, |
|
"loss": 4.8594, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.09130859375, |
|
"grad_norm": 0.49260833859443665, |
|
"learning_rate": 0.0004933127716851167, |
|
"loss": 4.8359, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.09140625, |
|
"grad_norm": 0.4962104856967926, |
|
"learning_rate": 0.0004932958084804356, |
|
"loss": 4.8594, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.09150390625, |
|
"grad_norm": 0.5404455661773682, |
|
"learning_rate": 0.0004932788241134576, |
|
"loss": 4.8438, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.0916015625, |
|
"grad_norm": 0.5084996819496155, |
|
"learning_rate": 0.0004932618185858288, |
|
"loss": 4.8633, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.09169921875, |
|
"grad_norm": 0.4125216603279114, |
|
"learning_rate": 0.0004932447918991983, |
|
"loss": 4.8438, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.091796875, |
|
"grad_norm": 0.3724025785923004, |
|
"learning_rate": 0.0004932277440552164, |
|
"loss": 4.8359, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.09189453125, |
|
"grad_norm": 0.4485456645488739, |
|
"learning_rate": 0.0004932106750555358, |
|
"loss": 4.8594, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.0919921875, |
|
"grad_norm": 0.4383455216884613, |
|
"learning_rate": 0.0004931935849018112, |
|
"loss": 4.8359, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.09208984375, |
|
"grad_norm": 0.4460577368736267, |
|
"learning_rate": 0.0004931764735956995, |
|
"loss": 4.8516, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.0921875, |
|
"grad_norm": 0.4385360777378082, |
|
"learning_rate": 0.0004931593411388594, |
|
"loss": 4.8633, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.09228515625, |
|
"grad_norm": 0.44357937574386597, |
|
"learning_rate": 0.0004931421875329519, |
|
"loss": 4.8398, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.0923828125, |
|
"grad_norm": 0.5505833625793457, |
|
"learning_rate": 0.0004931250127796398, |
|
"loss": 4.8398, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.09248046875, |
|
"grad_norm": 0.5369737148284912, |
|
"learning_rate": 0.0004931078168805881, |
|
"loss": 4.8555, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.092578125, |
|
"grad_norm": 0.5083466172218323, |
|
"learning_rate": 0.0004930905998374639, |
|
"loss": 4.8398, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.09267578125, |
|
"grad_norm": 0.48525020480155945, |
|
"learning_rate": 0.0004930733616519363, |
|
"loss": 4.8594, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.0927734375, |
|
"grad_norm": 0.42595598101615906, |
|
"learning_rate": 0.0004930561023256762, |
|
"loss": 4.8359, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.09287109375, |
|
"grad_norm": 0.40371179580688477, |
|
"learning_rate": 0.000493038821860357, |
|
"loss": 4.8711, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.09296875, |
|
"grad_norm": 0.4778882563114166, |
|
"learning_rate": 0.0004930215202576539, |
|
"loss": 4.8359, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.09306640625, |
|
"grad_norm": 0.5457689166069031, |
|
"learning_rate": 0.000493004197519244, |
|
"loss": 4.8242, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.0931640625, |
|
"grad_norm": 0.4889971613883972, |
|
"learning_rate": 0.0004929868536468069, |
|
"loss": 4.8477, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.09326171875, |
|
"grad_norm": 0.4244244396686554, |
|
"learning_rate": 0.0004929694886420239, |
|
"loss": 4.8672, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.093359375, |
|
"grad_norm": 0.44931143522262573, |
|
"learning_rate": 0.0004929521025065782, |
|
"loss": 4.8594, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.09345703125, |
|
"grad_norm": 0.5113864541053772, |
|
"learning_rate": 0.0004929346952421553, |
|
"loss": 4.8359, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.0935546875, |
|
"grad_norm": 0.47967684268951416, |
|
"learning_rate": 0.000492917266850443, |
|
"loss": 4.8438, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.09365234375, |
|
"grad_norm": 0.41450557112693787, |
|
"learning_rate": 0.0004928998173331306, |
|
"loss": 4.8594, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.09375, |
|
"grad_norm": 0.4654901325702667, |
|
"learning_rate": 0.0004928823466919098, |
|
"loss": 4.8672, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.09384765625, |
|
"grad_norm": 0.3816904127597809, |
|
"learning_rate": 0.0004928648549284743, |
|
"loss": 4.8164, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.0939453125, |
|
"grad_norm": 0.37036556005477905, |
|
"learning_rate": 0.0004928473420445198, |
|
"loss": 4.8359, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.09404296875, |
|
"grad_norm": 0.427219420671463, |
|
"learning_rate": 0.0004928298080417439, |
|
"loss": 4.8516, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.094140625, |
|
"grad_norm": 0.47933682799339294, |
|
"learning_rate": 0.0004928122529218467, |
|
"loss": 4.8359, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.09423828125, |
|
"grad_norm": 0.5197924375534058, |
|
"learning_rate": 0.0004927946766865298, |
|
"loss": 4.8281, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.0943359375, |
|
"grad_norm": 0.5642294883728027, |
|
"learning_rate": 0.0004927770793374971, |
|
"loss": 4.8203, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.09443359375, |
|
"grad_norm": 0.5989418029785156, |
|
"learning_rate": 0.0004927594608764546, |
|
"loss": 4.8086, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.09453125, |
|
"grad_norm": 0.5477548837661743, |
|
"learning_rate": 0.0004927418213051104, |
|
"loss": 4.8555, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.09462890625, |
|
"grad_norm": 0.43198466300964355, |
|
"learning_rate": 0.0004927241606251745, |
|
"loss": 4.8125, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.0947265625, |
|
"grad_norm": 0.42610111832618713, |
|
"learning_rate": 0.0004927064788383587, |
|
"loss": 4.8477, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.09482421875, |
|
"grad_norm": 0.4663979709148407, |
|
"learning_rate": 0.0004926887759463776, |
|
"loss": 4.8281, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.094921875, |
|
"grad_norm": 0.46881988644599915, |
|
"learning_rate": 0.000492671051950947, |
|
"loss": 4.8438, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.09501953125, |
|
"grad_norm": 0.36195266246795654, |
|
"learning_rate": 0.0004926533068537852, |
|
"loss": 4.8477, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.0951171875, |
|
"grad_norm": 0.3922168016433716, |
|
"learning_rate": 0.0004926355406566127, |
|
"loss": 4.8555, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.09521484375, |
|
"grad_norm": 0.425672709941864, |
|
"learning_rate": 0.0004926177533611514, |
|
"loss": 4.8164, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.0953125, |
|
"grad_norm": 0.5194612145423889, |
|
"learning_rate": 0.0004925999449691261, |
|
"loss": 4.8281, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.09541015625, |
|
"grad_norm": 0.5651121735572815, |
|
"learning_rate": 0.000492582115482263, |
|
"loss": 4.8164, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.0955078125, |
|
"grad_norm": 0.5677608847618103, |
|
"learning_rate": 0.0004925642649022903, |
|
"loss": 4.8711, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.09560546875, |
|
"grad_norm": 0.5311588644981384, |
|
"learning_rate": 0.000492546393230939, |
|
"loss": 4.8242, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.095703125, |
|
"grad_norm": 0.5084513425827026, |
|
"learning_rate": 0.0004925285004699411, |
|
"loss": 4.832, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.09580078125, |
|
"grad_norm": 0.6232282519340515, |
|
"learning_rate": 0.0004925105866210316, |
|
"loss": 4.8672, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.0958984375, |
|
"grad_norm": 0.695502519607544, |
|
"learning_rate": 0.0004924926516859469, |
|
"loss": 4.8281, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.09599609375, |
|
"grad_norm": 0.6766898036003113, |
|
"learning_rate": 0.000492474695666426, |
|
"loss": 4.8203, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.09609375, |
|
"grad_norm": 0.5815792679786682, |
|
"learning_rate": 0.0004924567185642091, |
|
"loss": 4.8359, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.09619140625, |
|
"grad_norm": 0.48480623960494995, |
|
"learning_rate": 0.0004924387203810393, |
|
"loss": 4.8633, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.0962890625, |
|
"grad_norm": 0.46070924401283264, |
|
"learning_rate": 0.0004924207011186613, |
|
"loss": 4.8516, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.09638671875, |
|
"grad_norm": 0.4500672221183777, |
|
"learning_rate": 0.0004924026607788219, |
|
"loss": 4.8438, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.096484375, |
|
"grad_norm": 0.42609307169914246, |
|
"learning_rate": 0.0004923845993632702, |
|
"loss": 4.8164, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.09658203125, |
|
"grad_norm": 0.4055596590042114, |
|
"learning_rate": 0.000492366516873757, |
|
"loss": 4.8633, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.0966796875, |
|
"grad_norm": 0.37348657846450806, |
|
"learning_rate": 0.0004923484133120351, |
|
"loss": 4.8203, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.09677734375, |
|
"grad_norm": 0.3880004584789276, |
|
"learning_rate": 0.0004923302886798598, |
|
"loss": 4.8125, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.096875, |
|
"grad_norm": 0.381511926651001, |
|
"learning_rate": 0.000492312142978988, |
|
"loss": 4.8359, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.09697265625, |
|
"grad_norm": 0.4143732786178589, |
|
"learning_rate": 0.0004922939762111788, |
|
"loss": 4.8281, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.0970703125, |
|
"grad_norm": 0.4248274266719818, |
|
"learning_rate": 0.0004922757883781934, |
|
"loss": 4.8555, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.09716796875, |
|
"grad_norm": 0.43763217329978943, |
|
"learning_rate": 0.000492257579481795, |
|
"loss": 4.8047, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.097265625, |
|
"grad_norm": 0.4777696430683136, |
|
"learning_rate": 0.0004922393495237488, |
|
"loss": 4.8125, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.09736328125, |
|
"grad_norm": 0.5218069553375244, |
|
"learning_rate": 0.000492221098505822, |
|
"loss": 4.8125, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.0974609375, |
|
"grad_norm": 0.5178037285804749, |
|
"learning_rate": 0.000492202826429784, |
|
"loss": 4.8242, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.09755859375, |
|
"grad_norm": 0.45934903621673584, |
|
"learning_rate": 0.0004921845332974062, |
|
"loss": 4.8125, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.09765625, |
|
"grad_norm": 0.4091397821903229, |
|
"learning_rate": 0.0004921662191104619, |
|
"loss": 4.8359, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.09775390625, |
|
"grad_norm": 0.3937183618545532, |
|
"learning_rate": 0.0004921478838707266, |
|
"loss": 4.8359, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.0978515625, |
|
"grad_norm": 0.4575563073158264, |
|
"learning_rate": 0.0004921295275799778, |
|
"loss": 4.8281, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.09794921875, |
|
"grad_norm": 0.490375280380249, |
|
"learning_rate": 0.0004921111502399949, |
|
"loss": 4.832, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.098046875, |
|
"grad_norm": 0.4874227046966553, |
|
"learning_rate": 0.0004920927518525594, |
|
"loss": 4.8203, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.09814453125, |
|
"grad_norm": 0.42199403047561646, |
|
"learning_rate": 0.0004920743324194552, |
|
"loss": 4.8555, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.0982421875, |
|
"grad_norm": 0.49443531036376953, |
|
"learning_rate": 0.0004920558919424677, |
|
"loss": 4.8047, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.09833984375, |
|
"grad_norm": 0.39844444394111633, |
|
"learning_rate": 0.0004920374304233846, |
|
"loss": 4.8203, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.0984375, |
|
"grad_norm": 0.462501585483551, |
|
"learning_rate": 0.0004920189478639957, |
|
"loss": 4.8477, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.09853515625, |
|
"grad_norm": 0.5070082545280457, |
|
"learning_rate": 0.0004920004442660927, |
|
"loss": 4.8203, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.0986328125, |
|
"grad_norm": 0.47550520300865173, |
|
"learning_rate": 0.0004919819196314695, |
|
"loss": 4.8008, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.09873046875, |
|
"grad_norm": 0.4895987808704376, |
|
"learning_rate": 0.0004919633739619218, |
|
"loss": 4.8125, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.098828125, |
|
"grad_norm": 0.48678672313690186, |
|
"learning_rate": 0.0004919448072592474, |
|
"loss": 4.8086, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.09892578125, |
|
"grad_norm": 0.4505287706851959, |
|
"learning_rate": 0.0004919262195252465, |
|
"loss": 4.8281, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.0990234375, |
|
"grad_norm": 0.8226277232170105, |
|
"learning_rate": 0.0004919076107617209, |
|
"loss": 4.8047, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.09912109375, |
|
"grad_norm": 0.6768991947174072, |
|
"learning_rate": 0.0004918889809704745, |
|
"loss": 4.8359, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.09921875, |
|
"grad_norm": 0.573598325252533, |
|
"learning_rate": 0.0004918703301533135, |
|
"loss": 4.832, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.09931640625, |
|
"grad_norm": 0.49422210454940796, |
|
"learning_rate": 0.0004918516583120458, |
|
"loss": 4.8555, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.0994140625, |
|
"grad_norm": 0.5521137714385986, |
|
"learning_rate": 0.0004918329654484817, |
|
"loss": 4.8203, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.09951171875, |
|
"grad_norm": 0.6036257743835449, |
|
"learning_rate": 0.0004918142515644332, |
|
"loss": 4.8008, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.099609375, |
|
"grad_norm": 0.8331734538078308, |
|
"learning_rate": 0.0004917955166617146, |
|
"loss": 4.8203, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.09970703125, |
|
"grad_norm": 1.3778009414672852, |
|
"learning_rate": 0.000491776760742142, |
|
"loss": 4.8281, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.0998046875, |
|
"grad_norm": 0.785008430480957, |
|
"learning_rate": 0.0004917579838075337, |
|
"loss": 4.8477, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.09990234375, |
|
"grad_norm": 0.7883325815200806, |
|
"learning_rate": 0.0004917391858597099, |
|
"loss": 4.8398, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.5901327729225159, |
|
"learning_rate": 0.0004917203669004932, |
|
"loss": 4.8555, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.10009765625, |
|
"grad_norm": 0.4483931064605713, |
|
"learning_rate": 0.0004917015269317079, |
|
"loss": 4.832, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.1001953125, |
|
"grad_norm": 0.6025552749633789, |
|
"learning_rate": 0.0004916826659551802, |
|
"loss": 4.8438, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.10029296875, |
|
"grad_norm": 0.691528856754303, |
|
"learning_rate": 0.0004916637839727387, |
|
"loss": 4.8477, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.100390625, |
|
"grad_norm": 0.922143816947937, |
|
"learning_rate": 0.0004916448809862137, |
|
"loss": 4.8398, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.10048828125, |
|
"grad_norm": 0.7501768469810486, |
|
"learning_rate": 0.000491625956997438, |
|
"loss": 4.8438, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.1005859375, |
|
"grad_norm": 0.5718377828598022, |
|
"learning_rate": 0.000491607012008246, |
|
"loss": 4.8047, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.10068359375, |
|
"grad_norm": 0.5166970491409302, |
|
"learning_rate": 0.0004915880460204743, |
|
"loss": 4.832, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.10078125, |
|
"grad_norm": 0.41339635848999023, |
|
"learning_rate": 0.0004915690590359615, |
|
"loss": 4.832, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.10087890625, |
|
"grad_norm": 0.42061278223991394, |
|
"learning_rate": 0.0004915500510565483, |
|
"loss": 4.832, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.1009765625, |
|
"grad_norm": 0.8583418130874634, |
|
"learning_rate": 0.0004915310220840774, |
|
"loss": 4.8047, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.10107421875, |
|
"grad_norm": 1.503048062324524, |
|
"learning_rate": 0.0004915119721203935, |
|
"loss": 4.8555, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.101171875, |
|
"grad_norm": 1.4394514560699463, |
|
"learning_rate": 0.0004914929011673434, |
|
"loss": 4.8281, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.10126953125, |
|
"grad_norm": 1.117308259010315, |
|
"learning_rate": 0.0004914738092267758, |
|
"loss": 4.8398, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.1013671875, |
|
"grad_norm": 0.6745375990867615, |
|
"learning_rate": 0.0004914546963005416, |
|
"loss": 4.8438, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.10146484375, |
|
"grad_norm": 1.0236302614212036, |
|
"learning_rate": 0.0004914355623904938, |
|
"loss": 4.8398, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.1015625, |
|
"grad_norm": 0.8115978240966797, |
|
"learning_rate": 0.0004914164074984872, |
|
"loss": 4.8633, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.10166015625, |
|
"grad_norm": 0.5963826775550842, |
|
"learning_rate": 0.0004913972316263785, |
|
"loss": 4.8203, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.1017578125, |
|
"grad_norm": 0.49297934770584106, |
|
"learning_rate": 0.000491378034776027, |
|
"loss": 4.8086, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.10185546875, |
|
"grad_norm": 0.4009886085987091, |
|
"learning_rate": 0.0004913588169492937, |
|
"loss": 4.8203, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.101953125, |
|
"grad_norm": 0.4827432930469513, |
|
"learning_rate": 0.0004913395781480414, |
|
"loss": 4.8398, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.10205078125, |
|
"grad_norm": 0.402046799659729, |
|
"learning_rate": 0.0004913203183741354, |
|
"loss": 4.832, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.1021484375, |
|
"grad_norm": 0.4627341330051422, |
|
"learning_rate": 0.0004913010376294425, |
|
"loss": 4.8125, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.10224609375, |
|
"grad_norm": 0.4753624200820923, |
|
"learning_rate": 0.0004912817359158322, |
|
"loss": 4.8203, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.10234375, |
|
"grad_norm": 0.5196328163146973, |
|
"learning_rate": 0.0004912624132351755, |
|
"loss": 4.7969, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.10244140625, |
|
"grad_norm": 0.9553819894790649, |
|
"learning_rate": 0.0004912430695893456, |
|
"loss": 4.7969, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.1025390625, |
|
"grad_norm": 0.7087034583091736, |
|
"learning_rate": 0.0004912237049802178, |
|
"loss": 4.8008, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.10263671875, |
|
"grad_norm": 0.6159484386444092, |
|
"learning_rate": 0.0004912043194096693, |
|
"loss": 4.8047, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.102734375, |
|
"grad_norm": 0.5763092637062073, |
|
"learning_rate": 0.0004911849128795793, |
|
"loss": 4.8359, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.10283203125, |
|
"grad_norm": 0.5660399198532104, |
|
"learning_rate": 0.0004911654853918293, |
|
"loss": 4.7891, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.1029296875, |
|
"grad_norm": 0.5883915424346924, |
|
"learning_rate": 0.0004911460369483026, |
|
"loss": 4.8203, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.10302734375, |
|
"grad_norm": 0.5926139950752258, |
|
"learning_rate": 0.0004911265675508847, |
|
"loss": 4.8438, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.103125, |
|
"grad_norm": 0.5742599964141846, |
|
"learning_rate": 0.000491107077201463, |
|
"loss": 4.8203, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.10322265625, |
|
"grad_norm": 0.4908924102783203, |
|
"learning_rate": 0.0004910875659019267, |
|
"loss": 4.8164, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.1033203125, |
|
"grad_norm": 0.42929133772850037, |
|
"learning_rate": 0.0004910680336541676, |
|
"loss": 4.8125, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.10341796875, |
|
"grad_norm": 0.384483277797699, |
|
"learning_rate": 0.000491048480460079, |
|
"loss": 4.8008, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.103515625, |
|
"grad_norm": 0.36012735962867737, |
|
"learning_rate": 0.0004910289063215564, |
|
"loss": 4.8203, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.10361328125, |
|
"grad_norm": 0.3993228077888489, |
|
"learning_rate": 0.0004910093112404978, |
|
"loss": 4.8477, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.1037109375, |
|
"grad_norm": 0.3865804970264435, |
|
"learning_rate": 0.0004909896952188024, |
|
"loss": 4.8125, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.10380859375, |
|
"grad_norm": 0.4794972240924835, |
|
"learning_rate": 0.0004909700582583721, |
|
"loss": 4.7969, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.10390625, |
|
"grad_norm": 0.5843653082847595, |
|
"learning_rate": 0.0004909504003611103, |
|
"loss": 4.8008, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.10400390625, |
|
"grad_norm": 0.7108316421508789, |
|
"learning_rate": 0.000490930721528923, |
|
"loss": 4.8164, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.1041015625, |
|
"grad_norm": 0.7192037105560303, |
|
"learning_rate": 0.0004909110217637177, |
|
"loss": 4.8477, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.10419921875, |
|
"grad_norm": 0.6272011995315552, |
|
"learning_rate": 0.0004908913010674041, |
|
"loss": 4.8125, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.104296875, |
|
"grad_norm": 0.5953921675682068, |
|
"learning_rate": 0.0004908715594418942, |
|
"loss": 4.8398, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.10439453125, |
|
"grad_norm": 0.4558872878551483, |
|
"learning_rate": 0.0004908517968891018, |
|
"loss": 4.8125, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.1044921875, |
|
"grad_norm": 0.3889329135417938, |
|
"learning_rate": 0.0004908320134109427, |
|
"loss": 4.8281, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.10458984375, |
|
"grad_norm": 0.3157101571559906, |
|
"learning_rate": 0.0004908122090093347, |
|
"loss": 4.7969, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.1046875, |
|
"grad_norm": 0.3802729547023773, |
|
"learning_rate": 0.0004907923836861978, |
|
"loss": 4.8086, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.10478515625, |
|
"grad_norm": 0.402535080909729, |
|
"learning_rate": 0.0004907725374434539, |
|
"loss": 4.8086, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.1048828125, |
|
"grad_norm": 0.4550858438014984, |
|
"learning_rate": 0.0004907526702830268, |
|
"loss": 4.8125, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.10498046875, |
|
"grad_norm": 0.45571887493133545, |
|
"learning_rate": 0.0004907327822068427, |
|
"loss": 4.8086, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.105078125, |
|
"grad_norm": 0.3914882242679596, |
|
"learning_rate": 0.0004907128732168297, |
|
"loss": 4.8242, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.10517578125, |
|
"grad_norm": 0.34561851620674133, |
|
"learning_rate": 0.0004906929433149175, |
|
"loss": 4.8203, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.1052734375, |
|
"grad_norm": 0.3450526297092438, |
|
"learning_rate": 0.0004906729925030385, |
|
"loss": 4.8203, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.10537109375, |
|
"grad_norm": 0.3435506224632263, |
|
"learning_rate": 0.0004906530207831266, |
|
"loss": 4.7969, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.10546875, |
|
"grad_norm": 0.3784290552139282, |
|
"learning_rate": 0.0004906330281571179, |
|
"loss": 4.8359, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.10556640625, |
|
"grad_norm": 0.4976685345172882, |
|
"learning_rate": 0.0004906130146269507, |
|
"loss": 4.793, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.1056640625, |
|
"grad_norm": 0.5724058747291565, |
|
"learning_rate": 0.000490592980194565, |
|
"loss": 4.8008, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.10576171875, |
|
"grad_norm": 0.546415388584137, |
|
"learning_rate": 0.0004905729248619032, |
|
"loss": 4.7891, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.105859375, |
|
"grad_norm": 0.5786755681037903, |
|
"learning_rate": 0.0004905528486309095, |
|
"loss": 4.8125, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.10595703125, |
|
"grad_norm": 0.46551647782325745, |
|
"learning_rate": 0.00049053275150353, |
|
"loss": 4.793, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.1060546875, |
|
"grad_norm": 0.3895544707775116, |
|
"learning_rate": 0.0004905126334817131, |
|
"loss": 4.7891, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.10615234375, |
|
"grad_norm": 0.3977148234844208, |
|
"learning_rate": 0.000490492494567409, |
|
"loss": 4.8516, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.10625, |
|
"grad_norm": 0.41639018058776855, |
|
"learning_rate": 0.0004904723347625702, |
|
"loss": 4.8125, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.10634765625, |
|
"grad_norm": 0.4226835370063782, |
|
"learning_rate": 0.0004904521540691509, |
|
"loss": 4.8242, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.1064453125, |
|
"grad_norm": 0.47172993421554565, |
|
"learning_rate": 0.0004904319524891076, |
|
"loss": 4.7734, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.10654296875, |
|
"grad_norm": 0.438104510307312, |
|
"learning_rate": 0.0004904117300243986, |
|
"loss": 4.8203, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.106640625, |
|
"grad_norm": 0.38510963320732117, |
|
"learning_rate": 0.0004903914866769843, |
|
"loss": 4.8164, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.10673828125, |
|
"grad_norm": 0.404243528842926, |
|
"learning_rate": 0.0004903712224488273, |
|
"loss": 4.8359, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.1068359375, |
|
"grad_norm": 0.4017542898654938, |
|
"learning_rate": 0.0004903509373418921, |
|
"loss": 4.8242, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.10693359375, |
|
"grad_norm": 0.4477319121360779, |
|
"learning_rate": 0.000490330631358145, |
|
"loss": 4.7969, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.10703125, |
|
"grad_norm": 0.3956342041492462, |
|
"learning_rate": 0.0004903103044995548, |
|
"loss": 4.8047, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.10712890625, |
|
"grad_norm": 0.43987393379211426, |
|
"learning_rate": 0.0004902899567680917, |
|
"loss": 4.7734, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.1072265625, |
|
"grad_norm": 0.4343857169151306, |
|
"learning_rate": 0.0004902695881657286, |
|
"loss": 4.8477, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.10732421875, |
|
"grad_norm": 0.3674246370792389, |
|
"learning_rate": 0.0004902491986944399, |
|
"loss": 4.8125, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.107421875, |
|
"grad_norm": 0.3673463761806488, |
|
"learning_rate": 0.0004902287883562023, |
|
"loss": 4.8125, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.10751953125, |
|
"grad_norm": 0.4112495481967926, |
|
"learning_rate": 0.0004902083571529944, |
|
"loss": 4.8047, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.1076171875, |
|
"grad_norm": 0.4617629647254944, |
|
"learning_rate": 0.000490187905086797, |
|
"loss": 4.7695, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.10771484375, |
|
"grad_norm": 0.49318692088127136, |
|
"learning_rate": 0.0004901674321595925, |
|
"loss": 4.7812, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.1078125, |
|
"grad_norm": 0.5360081195831299, |
|
"learning_rate": 0.0004901469383733659, |
|
"loss": 4.8242, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.10791015625, |
|
"grad_norm": 0.5867539048194885, |
|
"learning_rate": 0.0004901264237301039, |
|
"loss": 4.7852, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.1080078125, |
|
"grad_norm": 0.5440882444381714, |
|
"learning_rate": 0.0004901058882317951, |
|
"loss": 4.8203, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.10810546875, |
|
"grad_norm": 0.4591827094554901, |
|
"learning_rate": 0.0004900853318804304, |
|
"loss": 4.8164, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.108203125, |
|
"grad_norm": 0.4257602095603943, |
|
"learning_rate": 0.0004900647546780026, |
|
"loss": 4.8008, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.10830078125, |
|
"grad_norm": 0.418489009141922, |
|
"learning_rate": 0.0004900441566265064, |
|
"loss": 4.832, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.1083984375, |
|
"grad_norm": 0.4998464286327362, |
|
"learning_rate": 0.0004900235377279388, |
|
"loss": 4.7773, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.10849609375, |
|
"grad_norm": 0.47938549518585205, |
|
"learning_rate": 0.0004900028979842986, |
|
"loss": 4.8125, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.10859375, |
|
"grad_norm": 0.4215286672115326, |
|
"learning_rate": 0.0004899822373975866, |
|
"loss": 4.7812, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.10869140625, |
|
"grad_norm": 0.41954976320266724, |
|
"learning_rate": 0.0004899615559698058, |
|
"loss": 4.8047, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.1087890625, |
|
"grad_norm": 0.4177006781101227, |
|
"learning_rate": 0.0004899408537029611, |
|
"loss": 4.8047, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.10888671875, |
|
"grad_norm": 0.5628485679626465, |
|
"learning_rate": 0.0004899201305990594, |
|
"loss": 4.7969, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.108984375, |
|
"grad_norm": 0.6657236814498901, |
|
"learning_rate": 0.0004898993866601098, |
|
"loss": 4.8086, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.10908203125, |
|
"grad_norm": 0.6637690663337708, |
|
"learning_rate": 0.0004898786218881232, |
|
"loss": 4.8203, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.1091796875, |
|
"grad_norm": 0.5784990787506104, |
|
"learning_rate": 0.0004898578362851124, |
|
"loss": 4.832, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.10927734375, |
|
"grad_norm": 0.42443713545799255, |
|
"learning_rate": 0.0004898370298530928, |
|
"loss": 4.8008, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.109375, |
|
"grad_norm": 0.3830028474330902, |
|
"learning_rate": 0.0004898162025940812, |
|
"loss": 4.8398, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.10947265625, |
|
"grad_norm": 0.45647111535072327, |
|
"learning_rate": 0.0004897953545100966, |
|
"loss": 4.8086, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.1095703125, |
|
"grad_norm": 0.49465152621269226, |
|
"learning_rate": 0.0004897744856031604, |
|
"loss": 4.8477, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.10966796875, |
|
"grad_norm": 0.5248700380325317, |
|
"learning_rate": 0.0004897535958752954, |
|
"loss": 4.7891, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.109765625, |
|
"grad_norm": 0.5552516579627991, |
|
"learning_rate": 0.0004897326853285268, |
|
"loss": 4.8125, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.10986328125, |
|
"grad_norm": 0.4863637387752533, |
|
"learning_rate": 0.0004897117539648818, |
|
"loss": 4.8203, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.1099609375, |
|
"grad_norm": 0.4048306941986084, |
|
"learning_rate": 0.0004896908017863895, |
|
"loss": 4.7852, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.11005859375, |
|
"grad_norm": 0.4323672652244568, |
|
"learning_rate": 0.0004896698287950812, |
|
"loss": 4.7695, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.11015625, |
|
"grad_norm": 0.5875673890113831, |
|
"learning_rate": 0.0004896488349929898, |
|
"loss": 4.793, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.11025390625, |
|
"grad_norm": 0.5504245758056641, |
|
"learning_rate": 0.0004896278203821506, |
|
"loss": 4.7852, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.1103515625, |
|
"grad_norm": 0.44111916422843933, |
|
"learning_rate": 0.0004896067849646011, |
|
"loss": 4.7852, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.11044921875, |
|
"grad_norm": 0.37442272901535034, |
|
"learning_rate": 0.0004895857287423802, |
|
"loss": 4.7578, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.110546875, |
|
"grad_norm": 0.38579708337783813, |
|
"learning_rate": 0.0004895646517175294, |
|
"loss": 4.7734, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.11064453125, |
|
"grad_norm": 0.350492388010025, |
|
"learning_rate": 0.0004895435538920918, |
|
"loss": 4.7969, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.1107421875, |
|
"grad_norm": 0.3320639431476593, |
|
"learning_rate": 0.0004895224352681127, |
|
"loss": 4.7969, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.11083984375, |
|
"grad_norm": 0.33712780475616455, |
|
"learning_rate": 0.0004895012958476396, |
|
"loss": 4.793, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.1109375, |
|
"grad_norm": 0.3295286297798157, |
|
"learning_rate": 0.0004894801356327217, |
|
"loss": 4.8047, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.11103515625, |
|
"grad_norm": 0.3195537030696869, |
|
"learning_rate": 0.0004894589546254102, |
|
"loss": 4.8047, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.1111328125, |
|
"grad_norm": 0.36060598492622375, |
|
"learning_rate": 0.0004894377528277587, |
|
"loss": 4.8008, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.11123046875, |
|
"grad_norm": 0.4128583073616028, |
|
"learning_rate": 0.0004894165302418224, |
|
"loss": 4.8125, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.111328125, |
|
"grad_norm": 0.6390661001205444, |
|
"learning_rate": 0.0004893952868696588, |
|
"loss": 4.8086, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.11142578125, |
|
"grad_norm": 0.8795298933982849, |
|
"learning_rate": 0.0004893740227133272, |
|
"loss": 4.793, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.1115234375, |
|
"grad_norm": 0.5830650329589844, |
|
"learning_rate": 0.000489352737774889, |
|
"loss": 4.8164, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.11162109375, |
|
"grad_norm": 0.6256621479988098, |
|
"learning_rate": 0.0004893314320564078, |
|
"loss": 4.8086, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.11171875, |
|
"grad_norm": 0.6185265779495239, |
|
"learning_rate": 0.0004893101055599488, |
|
"loss": 4.7969, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.11181640625, |
|
"grad_norm": 0.5876705646514893, |
|
"learning_rate": 0.0004892887582875797, |
|
"loss": 4.7969, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.1119140625, |
|
"grad_norm": 0.5868121981620789, |
|
"learning_rate": 0.0004892673902413699, |
|
"loss": 4.8125, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.11201171875, |
|
"grad_norm": 0.46918195486068726, |
|
"learning_rate": 0.0004892460014233907, |
|
"loss": 4.8086, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.112109375, |
|
"grad_norm": 0.3593907058238983, |
|
"learning_rate": 0.0004892245918357159, |
|
"loss": 4.832, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.11220703125, |
|
"grad_norm": 0.4415476620197296, |
|
"learning_rate": 0.0004892031614804208, |
|
"loss": 4.7852, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.1123046875, |
|
"grad_norm": 0.7689213156700134, |
|
"learning_rate": 0.000489181710359583, |
|
"loss": 4.7891, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.11240234375, |
|
"grad_norm": 1.3944097757339478, |
|
"learning_rate": 0.000489160238475282, |
|
"loss": 4.8086, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.1125, |
|
"grad_norm": 0.6750385165214539, |
|
"learning_rate": 0.0004891387458295995, |
|
"loss": 4.8047, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.11259765625, |
|
"grad_norm": 0.6533737778663635, |
|
"learning_rate": 0.0004891172324246189, |
|
"loss": 4.8164, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.1126953125, |
|
"grad_norm": 0.698330283164978, |
|
"learning_rate": 0.0004890956982624258, |
|
"loss": 4.8203, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.11279296875, |
|
"grad_norm": 0.5275533199310303, |
|
"learning_rate": 0.0004890741433451079, |
|
"loss": 4.7969, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.112890625, |
|
"grad_norm": 0.39748457074165344, |
|
"learning_rate": 0.0004890525676747547, |
|
"loss": 4.8086, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.11298828125, |
|
"grad_norm": 0.49489057064056396, |
|
"learning_rate": 0.0004890309712534578, |
|
"loss": 4.8047, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.1130859375, |
|
"grad_norm": 0.4166368544101715, |
|
"learning_rate": 0.0004890093540833108, |
|
"loss": 4.832, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.11318359375, |
|
"grad_norm": 0.3709654211997986, |
|
"learning_rate": 0.0004889877161664096, |
|
"loss": 4.8281, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.11328125, |
|
"grad_norm": 0.41362860798835754, |
|
"learning_rate": 0.0004889660575048515, |
|
"loss": 4.7773, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.11337890625, |
|
"grad_norm": 0.3981860280036926, |
|
"learning_rate": 0.0004889443781007364, |
|
"loss": 4.7969, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.1134765625, |
|
"grad_norm": 0.4014473557472229, |
|
"learning_rate": 0.000488922677956166, |
|
"loss": 4.793, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.11357421875, |
|
"grad_norm": 0.5000125765800476, |
|
"learning_rate": 0.0004889009570732436, |
|
"loss": 4.7891, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.113671875, |
|
"grad_norm": 0.6138625144958496, |
|
"learning_rate": 0.0004888792154540753, |
|
"loss": 4.7812, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.11376953125, |
|
"grad_norm": 0.6484094858169556, |
|
"learning_rate": 0.0004888574531007687, |
|
"loss": 4.8008, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.1138671875, |
|
"grad_norm": 0.44802355766296387, |
|
"learning_rate": 0.0004888356700154333, |
|
"loss": 4.8047, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.11396484375, |
|
"grad_norm": 0.42703965306282043, |
|
"learning_rate": 0.0004888138662001813, |
|
"loss": 4.7969, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.1140625, |
|
"grad_norm": 0.41006794571876526, |
|
"learning_rate": 0.0004887920416571259, |
|
"loss": 4.793, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.11416015625, |
|
"grad_norm": 0.39348462224006653, |
|
"learning_rate": 0.0004887701963883831, |
|
"loss": 4.793, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.1142578125, |
|
"grad_norm": 0.3154531419277191, |
|
"learning_rate": 0.0004887483303960706, |
|
"loss": 4.793, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.11435546875, |
|
"grad_norm": 0.3364889323711395, |
|
"learning_rate": 0.0004887264436823083, |
|
"loss": 4.7812, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.114453125, |
|
"grad_norm": 0.3992083966732025, |
|
"learning_rate": 0.0004887045362492178, |
|
"loss": 4.793, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.11455078125, |
|
"grad_norm": 0.42253056168556213, |
|
"learning_rate": 0.0004886826080989229, |
|
"loss": 4.7734, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.1146484375, |
|
"grad_norm": 0.36724087595939636, |
|
"learning_rate": 0.0004886606592335495, |
|
"loss": 4.793, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.11474609375, |
|
"grad_norm": 0.3279463052749634, |
|
"learning_rate": 0.0004886386896552252, |
|
"loss": 4.7891, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.11484375, |
|
"grad_norm": 0.3356574773788452, |
|
"learning_rate": 0.0004886166993660799, |
|
"loss": 4.7891, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.11494140625, |
|
"grad_norm": 0.42385661602020264, |
|
"learning_rate": 0.0004885946883682455, |
|
"loss": 4.7969, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.1150390625, |
|
"grad_norm": 0.582700252532959, |
|
"learning_rate": 0.0004885726566638557, |
|
"loss": 4.7812, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.11513671875, |
|
"grad_norm": 0.5969910025596619, |
|
"learning_rate": 0.0004885506042550464, |
|
"loss": 4.8359, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.115234375, |
|
"grad_norm": 0.5986462831497192, |
|
"learning_rate": 0.0004885285311439553, |
|
"loss": 4.7891, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.11533203125, |
|
"grad_norm": 0.49811697006225586, |
|
"learning_rate": 0.0004885064373327223, |
|
"loss": 4.7773, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.1154296875, |
|
"grad_norm": 0.38979536294937134, |
|
"learning_rate": 0.0004884843228234895, |
|
"loss": 4.8047, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.11552734375, |
|
"grad_norm": 0.40310540795326233, |
|
"learning_rate": 0.0004884621876184004, |
|
"loss": 4.8047, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.115625, |
|
"grad_norm": 0.42200982570648193, |
|
"learning_rate": 0.0004884400317196009, |
|
"loss": 4.8203, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.11572265625, |
|
"grad_norm": 0.4246567487716675, |
|
"learning_rate": 0.000488417855129239, |
|
"loss": 4.7812, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.1158203125, |
|
"grad_norm": 0.4499056935310364, |
|
"learning_rate": 0.0004883956578494645, |
|
"loss": 4.7891, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.11591796875, |
|
"grad_norm": 0.42340394854545593, |
|
"learning_rate": 0.0004883734398824294, |
|
"loss": 4.7734, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.116015625, |
|
"grad_norm": 0.42388978600502014, |
|
"learning_rate": 0.0004883512012302874, |
|
"loss": 4.8125, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.11611328125, |
|
"grad_norm": 0.4048875868320465, |
|
"learning_rate": 0.0004883289418951943, |
|
"loss": 4.7812, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.1162109375, |
|
"grad_norm": 0.39005008339881897, |
|
"learning_rate": 0.0004883066618793083, |
|
"loss": 4.7891, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.11630859375, |
|
"grad_norm": 0.38380053639411926, |
|
"learning_rate": 0.0004882843611847892, |
|
"loss": 4.7852, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.11640625, |
|
"grad_norm": 0.35080215334892273, |
|
"learning_rate": 0.0004882620398137988, |
|
"loss": 4.8125, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.11650390625, |
|
"grad_norm": 0.3683309555053711, |
|
"learning_rate": 0.00048823969776850103, |
|
"loss": 4.8086, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.1166015625, |
|
"grad_norm": 0.39639347791671753, |
|
"learning_rate": 0.00048821733505106186, |
|
"loss": 4.8164, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.11669921875, |
|
"grad_norm": 0.41327810287475586, |
|
"learning_rate": 0.0004881949516636491, |
|
"loss": 4.7539, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.116796875, |
|
"grad_norm": 0.4666888117790222, |
|
"learning_rate": 0.0004881725476084328, |
|
"loss": 4.7539, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.11689453125, |
|
"grad_norm": 0.6009286046028137, |
|
"learning_rate": 0.00048815012288758484, |
|
"loss": 4.8086, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.1169921875, |
|
"grad_norm": 0.6095237731933594, |
|
"learning_rate": 0.00048812767750327905, |
|
"loss": 4.8203, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.11708984375, |
|
"grad_norm": 0.5841214656829834, |
|
"learning_rate": 0.0004881052114576915, |
|
"loss": 4.7852, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.1171875, |
|
"grad_norm": 0.5666095614433289, |
|
"learning_rate": 0.00048808272475299994, |
|
"loss": 4.8164, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.11728515625, |
|
"grad_norm": 0.4737398624420166, |
|
"learning_rate": 0.00048806021739138453, |
|
"loss": 4.7891, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.1173828125, |
|
"grad_norm": 0.48223602771759033, |
|
"learning_rate": 0.000488037689375027, |
|
"loss": 4.7812, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.11748046875, |
|
"grad_norm": 0.5186586976051331, |
|
"learning_rate": 0.00048801514070611143, |
|
"loss": 4.7812, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.117578125, |
|
"grad_norm": 0.499870628118515, |
|
"learning_rate": 0.0004879925713868236, |
|
"loss": 4.7812, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.11767578125, |
|
"grad_norm": 0.42764419317245483, |
|
"learning_rate": 0.0004879699814193517, |
|
"loss": 4.7617, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.1177734375, |
|
"grad_norm": 0.42484956979751587, |
|
"learning_rate": 0.0004879473708058855, |
|
"loss": 4.7852, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.11787109375, |
|
"grad_norm": 0.35906827449798584, |
|
"learning_rate": 0.00048792473954861694, |
|
"loss": 4.7852, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.11796875, |
|
"grad_norm": 0.32980072498321533, |
|
"learning_rate": 0.00048790208764973997, |
|
"loss": 4.7695, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.11806640625, |
|
"grad_norm": 0.33791640400886536, |
|
"learning_rate": 0.0004878794151114507, |
|
"loss": 4.8008, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.1181640625, |
|
"grad_norm": 0.30102401971817017, |
|
"learning_rate": 0.0004878567219359469, |
|
"loss": 4.7852, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.11826171875, |
|
"grad_norm": 0.32327136397361755, |
|
"learning_rate": 0.0004878340081254285, |
|
"loss": 4.7852, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.118359375, |
|
"grad_norm": 0.37602269649505615, |
|
"learning_rate": 0.0004878112736820976, |
|
"loss": 4.7852, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.11845703125, |
|
"grad_norm": 0.4111282229423523, |
|
"learning_rate": 0.000487788518608158, |
|
"loss": 4.7734, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.1185546875, |
|
"grad_norm": 0.4097578525543213, |
|
"learning_rate": 0.0004877657429058158, |
|
"loss": 4.7852, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.11865234375, |
|
"grad_norm": 0.3875778615474701, |
|
"learning_rate": 0.0004877429465772788, |
|
"loss": 4.7734, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.11875, |
|
"grad_norm": 0.372007817029953, |
|
"learning_rate": 0.000487720129624757, |
|
"loss": 4.7852, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.11884765625, |
|
"grad_norm": 0.3939751088619232, |
|
"learning_rate": 0.00048769729205046247, |
|
"loss": 4.7773, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.1189453125, |
|
"grad_norm": 0.4261675775051117, |
|
"learning_rate": 0.000487674433856609, |
|
"loss": 4.7812, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.11904296875, |
|
"grad_norm": 0.5432133078575134, |
|
"learning_rate": 0.00048765155504541265, |
|
"loss": 4.8086, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.119140625, |
|
"grad_norm": 0.5995720624923706, |
|
"learning_rate": 0.0004876286556190912, |
|
"loss": 4.8125, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.11923828125, |
|
"grad_norm": 0.6350622773170471, |
|
"learning_rate": 0.00048760573557986476, |
|
"loss": 4.7812, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.1193359375, |
|
"grad_norm": 0.5896903872489929, |
|
"learning_rate": 0.00048758279492995527, |
|
"loss": 4.7852, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.11943359375, |
|
"grad_norm": 0.4300802946090698, |
|
"learning_rate": 0.0004875598336715865, |
|
"loss": 4.7773, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.11953125, |
|
"grad_norm": 0.45555663108825684, |
|
"learning_rate": 0.00048753685180698465, |
|
"loss": 4.7852, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.11962890625, |
|
"grad_norm": 0.42764827609062195, |
|
"learning_rate": 0.00048751384933837737, |
|
"loss": 4.7812, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.1197265625, |
|
"grad_norm": 0.37654969096183777, |
|
"learning_rate": 0.0004874908262679948, |
|
"loss": 4.7891, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.11982421875, |
|
"grad_norm": 0.32424551248550415, |
|
"learning_rate": 0.00048746778259806876, |
|
"loss": 4.8086, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.119921875, |
|
"grad_norm": 0.35854217410087585, |
|
"learning_rate": 0.0004874447183308333, |
|
"loss": 4.8086, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.12001953125, |
|
"grad_norm": 0.38739636540412903, |
|
"learning_rate": 0.0004874216334685242, |
|
"loss": 4.7812, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.1201171875, |
|
"grad_norm": 0.38605567812919617, |
|
"learning_rate": 0.0004873985280133795, |
|
"loss": 4.8086, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.12021484375, |
|
"grad_norm": 0.36838725209236145, |
|
"learning_rate": 0.00048737540196763904, |
|
"loss": 4.7812, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.1203125, |
|
"grad_norm": 0.3206044137477875, |
|
"learning_rate": 0.0004873522553335447, |
|
"loss": 4.7969, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.12041015625, |
|
"grad_norm": 0.3341485261917114, |
|
"learning_rate": 0.00048732908811334046, |
|
"loss": 4.793, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.1205078125, |
|
"grad_norm": 0.38392874598503113, |
|
"learning_rate": 0.00048730590030927217, |
|
"loss": 4.7852, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.12060546875, |
|
"grad_norm": 0.42469480633735657, |
|
"learning_rate": 0.0004872826919235879, |
|
"loss": 4.7578, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.120703125, |
|
"grad_norm": 0.5113615989685059, |
|
"learning_rate": 0.00048725946295853737, |
|
"loss": 4.8008, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.12080078125, |
|
"grad_norm": 0.5344850420951843, |
|
"learning_rate": 0.0004872362134163724, |
|
"loss": 4.7969, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.1208984375, |
|
"grad_norm": 0.4661124050617218, |
|
"learning_rate": 0.0004872129432993471, |
|
"loss": 4.8008, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.12099609375, |
|
"grad_norm": 0.45510703325271606, |
|
"learning_rate": 0.00048718965260971726, |
|
"loss": 4.7812, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.12109375, |
|
"grad_norm": 0.4054417610168457, |
|
"learning_rate": 0.0004871663413497407, |
|
"loss": 4.8203, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.12119140625, |
|
"grad_norm": 0.450013130903244, |
|
"learning_rate": 0.0004871430095216773, |
|
"loss": 4.7812, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.1212890625, |
|
"grad_norm": 0.46598708629608154, |
|
"learning_rate": 0.000487119657127789, |
|
"loss": 4.832, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.12138671875, |
|
"grad_norm": 0.4022948741912842, |
|
"learning_rate": 0.00048709628417033956, |
|
"loss": 4.7891, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.121484375, |
|
"grad_norm": 0.3764280378818512, |
|
"learning_rate": 0.00048707289065159486, |
|
"loss": 4.7852, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.12158203125, |
|
"grad_norm": 0.3581683039665222, |
|
"learning_rate": 0.0004870494765738228, |
|
"loss": 4.7578, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.1216796875, |
|
"grad_norm": 0.3513264060020447, |
|
"learning_rate": 0.0004870260419392931, |
|
"loss": 4.7734, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.12177734375, |
|
"grad_norm": 0.3424482047557831, |
|
"learning_rate": 0.00048700258675027776, |
|
"loss": 4.8125, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.121875, |
|
"grad_norm": 0.3603288233280182, |
|
"learning_rate": 0.0004869791110090504, |
|
"loss": 4.7969, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.12197265625, |
|
"grad_norm": 0.3708992600440979, |
|
"learning_rate": 0.00048695561471788696, |
|
"loss": 4.7734, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.1220703125, |
|
"grad_norm": 0.4168321490287781, |
|
"learning_rate": 0.0004869320978790653, |
|
"loss": 4.8008, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.12216796875, |
|
"grad_norm": 0.4125996530056, |
|
"learning_rate": 0.0004869085604948651, |
|
"loss": 4.7734, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.122265625, |
|
"grad_norm": 0.42782822251319885, |
|
"learning_rate": 0.0004868850025675681, |
|
"loss": 4.7656, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.12236328125, |
|
"grad_norm": 0.34903016686439514, |
|
"learning_rate": 0.0004868614240994583, |
|
"loss": 4.7812, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.1224609375, |
|
"grad_norm": 0.37179067730903625, |
|
"learning_rate": 0.00048683782509282127, |
|
"loss": 4.7578, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.12255859375, |
|
"grad_norm": 0.3796963393688202, |
|
"learning_rate": 0.0004868142055499448, |
|
"loss": 4.7617, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.12265625, |
|
"grad_norm": 0.4198116362094879, |
|
"learning_rate": 0.0004867905654731187, |
|
"loss": 4.8047, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.12275390625, |
|
"grad_norm": 0.4762858748435974, |
|
"learning_rate": 0.00048676690486463474, |
|
"loss": 4.7617, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.1228515625, |
|
"grad_norm": 0.5248888731002808, |
|
"learning_rate": 0.0004867432237267867, |
|
"loss": 4.7383, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.12294921875, |
|
"grad_norm": 0.547907829284668, |
|
"learning_rate": 0.00048671952206187007, |
|
"loss": 4.8047, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.123046875, |
|
"grad_norm": 0.42803826928138733, |
|
"learning_rate": 0.00048669579987218285, |
|
"loss": 4.7969, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.12314453125, |
|
"grad_norm": 0.322132408618927, |
|
"learning_rate": 0.00048667205716002455, |
|
"loss": 4.7852, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.1232421875, |
|
"grad_norm": 0.38514775037765503, |
|
"learning_rate": 0.0004866482939276969, |
|
"loss": 4.7695, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.12333984375, |
|
"grad_norm": 0.4669879674911499, |
|
"learning_rate": 0.00048662451017750377, |
|
"loss": 4.7969, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.1234375, |
|
"grad_norm": 0.47549909353256226, |
|
"learning_rate": 0.0004866007059117505, |
|
"loss": 4.7773, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.12353515625, |
|
"grad_norm": 0.3875703513622284, |
|
"learning_rate": 0.00048657688113274507, |
|
"loss": 4.7852, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.1236328125, |
|
"grad_norm": 0.31729233264923096, |
|
"learning_rate": 0.00048655303584279686, |
|
"loss": 4.7773, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.12373046875, |
|
"grad_norm": 0.33539122343063354, |
|
"learning_rate": 0.0004865291700442177, |
|
"loss": 4.7617, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.123828125, |
|
"grad_norm": 0.3871065676212311, |
|
"learning_rate": 0.0004865052837393212, |
|
"loss": 4.7773, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.12392578125, |
|
"grad_norm": 0.40682950615882874, |
|
"learning_rate": 0.00048648137693042283, |
|
"loss": 4.7812, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.1240234375, |
|
"grad_norm": 0.4068271219730377, |
|
"learning_rate": 0.0004864574496198404, |
|
"loss": 4.8008, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.12412109375, |
|
"grad_norm": 0.3539895713329315, |
|
"learning_rate": 0.0004864335018098933, |
|
"loss": 4.7891, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.12421875, |
|
"grad_norm": 0.34347373247146606, |
|
"learning_rate": 0.00048640953350290324, |
|
"loss": 4.7773, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.12431640625, |
|
"grad_norm": 0.40734362602233887, |
|
"learning_rate": 0.0004863855447011938, |
|
"loss": 4.7734, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.1244140625, |
|
"grad_norm": 0.4735758900642395, |
|
"learning_rate": 0.00048636153540709045, |
|
"loss": 4.7656, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.12451171875, |
|
"grad_norm": 0.5711696147918701, |
|
"learning_rate": 0.0004863375056229208, |
|
"loss": 4.8203, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.124609375, |
|
"grad_norm": 0.5063640475273132, |
|
"learning_rate": 0.00048631345535101426, |
|
"loss": 4.7617, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.12470703125, |
|
"grad_norm": 0.35711005330085754, |
|
"learning_rate": 0.0004862893845937024, |
|
"loss": 4.8125, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.1248046875, |
|
"grad_norm": 0.3374655842781067, |
|
"learning_rate": 0.0004862652933533188, |
|
"loss": 4.7734, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.12490234375, |
|
"grad_norm": 0.3303259015083313, |
|
"learning_rate": 0.00048624118163219875, |
|
"loss": 4.7812, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 0.31290751695632935, |
|
"learning_rate": 0.00048621704943267995, |
|
"loss": 4.7539, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.12509765625, |
|
"grad_norm": 0.3193962574005127, |
|
"learning_rate": 0.00048619289675710177, |
|
"loss": 4.7578, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.1251953125, |
|
"grad_norm": 0.2991279065608978, |
|
"learning_rate": 0.0004861687236078055, |
|
"loss": 4.7734, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.12529296875, |
|
"grad_norm": 0.3635973334312439, |
|
"learning_rate": 0.0004861445299871348, |
|
"loss": 4.7344, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 0.125390625, |
|
"grad_norm": 0.4499048888683319, |
|
"learning_rate": 0.0004861203158974349, |
|
"loss": 4.7812, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.12548828125, |
|
"grad_norm": 0.6076017022132874, |
|
"learning_rate": 0.00048609608134105324, |
|
"loss": 4.7773, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.1255859375, |
|
"grad_norm": 0.6585017442703247, |
|
"learning_rate": 0.0004860718263203393, |
|
"loss": 4.7812, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.12568359375, |
|
"grad_norm": 0.48385024070739746, |
|
"learning_rate": 0.0004860475508376442, |
|
"loss": 4.7695, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.12578125, |
|
"grad_norm": 0.3241603374481201, |
|
"learning_rate": 0.00048602325489532146, |
|
"loss": 4.7617, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.12587890625, |
|
"grad_norm": 0.4593919813632965, |
|
"learning_rate": 0.00048599893849572646, |
|
"loss": 4.7734, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 0.1259765625, |
|
"grad_norm": 0.46703073382377625, |
|
"learning_rate": 0.00048597460164121636, |
|
"loss": 4.7617, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.12607421875, |
|
"grad_norm": 0.3566134572029114, |
|
"learning_rate": 0.00048595024433415054, |
|
"loss": 4.7695, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 0.126171875, |
|
"grad_norm": 0.3164132833480835, |
|
"learning_rate": 0.0004859258665768903, |
|
"loss": 4.793, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.12626953125, |
|
"grad_norm": 0.37039509415626526, |
|
"learning_rate": 0.00048590146837179876, |
|
"loss": 4.7773, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.1263671875, |
|
"grad_norm": 0.37872517108917236, |
|
"learning_rate": 0.00048587704972124135, |
|
"loss": 4.8164, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.12646484375, |
|
"grad_norm": 0.35898569226264954, |
|
"learning_rate": 0.0004858526106275851, |
|
"loss": 4.7773, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.1265625, |
|
"grad_norm": 0.33450818061828613, |
|
"learning_rate": 0.00048582815109319936, |
|
"loss": 4.7734, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.12666015625, |
|
"grad_norm": 0.3074091970920563, |
|
"learning_rate": 0.0004858036711204553, |
|
"loss": 4.7656, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 0.1267578125, |
|
"grad_norm": 0.32790830731391907, |
|
"learning_rate": 0.000485779170711726, |
|
"loss": 4.7852, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.12685546875, |
|
"grad_norm": 0.33045750856399536, |
|
"learning_rate": 0.00048575464986938674, |
|
"loss": 4.7383, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.126953125, |
|
"grad_norm": 0.3500734269618988, |
|
"learning_rate": 0.0004857301085958145, |
|
"loss": 4.7578, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.12705078125, |
|
"grad_norm": 0.39776694774627686, |
|
"learning_rate": 0.0004857055468933885, |
|
"loss": 4.7617, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 0.1271484375, |
|
"grad_norm": 0.36641398072242737, |
|
"learning_rate": 0.0004856809647644897, |
|
"loss": 4.7773, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.12724609375, |
|
"grad_norm": 0.3984200656414032, |
|
"learning_rate": 0.00048565636221150135, |
|
"loss": 4.7852, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 0.12734375, |
|
"grad_norm": 0.40479159355163574, |
|
"learning_rate": 0.0004856317392368084, |
|
"loss": 4.7578, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.12744140625, |
|
"grad_norm": 0.38542646169662476, |
|
"learning_rate": 0.0004856070958427979, |
|
"loss": 4.7969, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.1275390625, |
|
"grad_norm": 0.41619765758514404, |
|
"learning_rate": 0.0004855824320318589, |
|
"loss": 4.7578, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.12763671875, |
|
"grad_norm": 0.5476976633071899, |
|
"learning_rate": 0.0004855577478063822, |
|
"loss": 4.7656, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 0.127734375, |
|
"grad_norm": 0.7227765917778015, |
|
"learning_rate": 0.000485533043168761, |
|
"loss": 4.7734, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.12783203125, |
|
"grad_norm": 0.6441882848739624, |
|
"learning_rate": 0.0004855083181213902, |
|
"loss": 4.7695, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 0.1279296875, |
|
"grad_norm": 0.5060856938362122, |
|
"learning_rate": 0.00048548357266666657, |
|
"loss": 4.793, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.12802734375, |
|
"grad_norm": 0.46759769320487976, |
|
"learning_rate": 0.0004854588068069892, |
|
"loss": 4.7773, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.128125, |
|
"grad_norm": 0.40848907828330994, |
|
"learning_rate": 0.0004854340205447589, |
|
"loss": 4.7578, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.12822265625, |
|
"grad_norm": 0.3958721160888672, |
|
"learning_rate": 0.00048540921388237856, |
|
"loss": 4.7773, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 0.1283203125, |
|
"grad_norm": 0.4191996157169342, |
|
"learning_rate": 0.0004853843868222529, |
|
"loss": 4.7891, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.12841796875, |
|
"grad_norm": 0.38634946942329407, |
|
"learning_rate": 0.00048535953936678885, |
|
"loss": 4.8047, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.128515625, |
|
"grad_norm": 0.36390602588653564, |
|
"learning_rate": 0.00048533467151839517, |
|
"loss": 4.7656, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 0.12861328125, |
|
"grad_norm": 0.32959216833114624, |
|
"learning_rate": 0.0004853097832794827, |
|
"loss": 4.7461, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 0.1287109375, |
|
"grad_norm": 0.3805353045463562, |
|
"learning_rate": 0.000485284874652464, |
|
"loss": 4.7695, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 0.12880859375, |
|
"grad_norm": 0.37332767248153687, |
|
"learning_rate": 0.000485259945639754, |
|
"loss": 4.7734, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 0.12890625, |
|
"grad_norm": 0.3370415270328522, |
|
"learning_rate": 0.00048523499624376925, |
|
"loss": 4.7695, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.12900390625, |
|
"grad_norm": 0.32865846157073975, |
|
"learning_rate": 0.00048521002646692855, |
|
"loss": 4.8008, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 0.1291015625, |
|
"grad_norm": 0.35084402561187744, |
|
"learning_rate": 0.0004851850363116524, |
|
"loss": 4.7656, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 0.12919921875, |
|
"grad_norm": 0.38480809330940247, |
|
"learning_rate": 0.0004851600257803636, |
|
"loss": 4.7734, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 0.129296875, |
|
"grad_norm": 0.36452239751815796, |
|
"learning_rate": 0.00048513499487548665, |
|
"loss": 4.7461, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 0.12939453125, |
|
"grad_norm": 0.36055848002433777, |
|
"learning_rate": 0.00048510994359944804, |
|
"loss": 4.7656, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.1294921875, |
|
"grad_norm": 0.3260059952735901, |
|
"learning_rate": 0.00048508487195467653, |
|
"loss": 4.7344, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.12958984375, |
|
"grad_norm": 0.339093953371048, |
|
"learning_rate": 0.0004850597799436025, |
|
"loss": 4.7617, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 0.1296875, |
|
"grad_norm": 0.40155380964279175, |
|
"learning_rate": 0.00048503466756865847, |
|
"loss": 4.7695, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 0.12978515625, |
|
"grad_norm": 0.578285813331604, |
|
"learning_rate": 0.00048500953483227895, |
|
"loss": 4.7344, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 0.1298828125, |
|
"grad_norm": 0.7531477212905884, |
|
"learning_rate": 0.0004849843817369003, |
|
"loss": 4.7812, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.12998046875, |
|
"grad_norm": 0.6945119500160217, |
|
"learning_rate": 0.0004849592082849611, |
|
"loss": 4.7617, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 0.130078125, |
|
"grad_norm": 0.40566393733024597, |
|
"learning_rate": 0.0004849340144789016, |
|
"loss": 4.7969, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.13017578125, |
|
"grad_norm": 0.4642469584941864, |
|
"learning_rate": 0.00048490880032116425, |
|
"loss": 4.7539, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.1302734375, |
|
"grad_norm": 0.5508970618247986, |
|
"learning_rate": 0.0004848835658141934, |
|
"loss": 4.7539, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 0.13037109375, |
|
"grad_norm": 0.4666309356689453, |
|
"learning_rate": 0.00048485831096043526, |
|
"loss": 4.8047, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.13046875, |
|
"grad_norm": 0.3999099135398865, |
|
"learning_rate": 0.0004848330357623382, |
|
"loss": 4.7617, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 0.13056640625, |
|
"grad_norm": 0.41524437069892883, |
|
"learning_rate": 0.0004848077402223524, |
|
"loss": 4.75, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 0.1306640625, |
|
"grad_norm": 0.37947431206703186, |
|
"learning_rate": 0.0004847824243429302, |
|
"loss": 4.7734, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.13076171875, |
|
"grad_norm": 0.39928561449050903, |
|
"learning_rate": 0.0004847570881265259, |
|
"loss": 4.7852, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 0.130859375, |
|
"grad_norm": 0.3333352208137512, |
|
"learning_rate": 0.0004847317315755953, |
|
"loss": 4.7891, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.13095703125, |
|
"grad_norm": 0.3555909991264343, |
|
"learning_rate": 0.00048470635469259697, |
|
"loss": 4.7344, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 0.1310546875, |
|
"grad_norm": 0.3695686459541321, |
|
"learning_rate": 0.00048468095747999067, |
|
"loss": 4.75, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 0.13115234375, |
|
"grad_norm": 0.32535481452941895, |
|
"learning_rate": 0.00048465553994023875, |
|
"loss": 4.7852, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 0.13125, |
|
"grad_norm": 0.3166685402393341, |
|
"learning_rate": 0.00048463010207580517, |
|
"loss": 4.7656, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.13134765625, |
|
"grad_norm": 0.30036693811416626, |
|
"learning_rate": 0.000484604643889156, |
|
"loss": 4.7773, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.1314453125, |
|
"grad_norm": 0.3111598491668701, |
|
"learning_rate": 0.0004845791653827591, |
|
"loss": 4.7695, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 0.13154296875, |
|
"grad_norm": 0.3160509765148163, |
|
"learning_rate": 0.00048455366655908455, |
|
"loss": 4.7695, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 0.131640625, |
|
"grad_norm": 0.3423873782157898, |
|
"learning_rate": 0.0004845281474206043, |
|
"loss": 4.7422, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 0.13173828125, |
|
"grad_norm": 0.34536194801330566, |
|
"learning_rate": 0.00048450260796979223, |
|
"loss": 4.7891, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 0.1318359375, |
|
"grad_norm": 0.347699373960495, |
|
"learning_rate": 0.0004844770482091242, |
|
"loss": 4.7773, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.13193359375, |
|
"grad_norm": 0.3164156675338745, |
|
"learning_rate": 0.00048445146814107804, |
|
"loss": 4.7656, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 0.13203125, |
|
"grad_norm": 0.2843954265117645, |
|
"learning_rate": 0.00048442586776813363, |
|
"loss": 4.7695, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 0.13212890625, |
|
"grad_norm": 0.34582918882369995, |
|
"learning_rate": 0.00048440024709277274, |
|
"loss": 4.7734, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 0.1322265625, |
|
"grad_norm": 0.4413771331310272, |
|
"learning_rate": 0.00048437460611747916, |
|
"loss": 4.7461, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 0.13232421875, |
|
"grad_norm": 0.4797145426273346, |
|
"learning_rate": 0.0004843489448447385, |
|
"loss": 4.793, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.132421875, |
|
"grad_norm": 0.47246068716049194, |
|
"learning_rate": 0.0004843232632770384, |
|
"loss": 4.8008, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 0.13251953125, |
|
"grad_norm": 0.39250800013542175, |
|
"learning_rate": 0.00048429756141686863, |
|
"loss": 4.7773, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 0.1326171875, |
|
"grad_norm": 0.3332385718822479, |
|
"learning_rate": 0.00048427183926672083, |
|
"loss": 4.7773, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 0.13271484375, |
|
"grad_norm": 0.3847937285900116, |
|
"learning_rate": 0.00048424609682908856, |
|
"loss": 4.7617, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 0.1328125, |
|
"grad_norm": 0.3584742546081543, |
|
"learning_rate": 0.0004842203341064673, |
|
"loss": 4.7734, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.13291015625, |
|
"grad_norm": 0.34857839345932007, |
|
"learning_rate": 0.0004841945511013547, |
|
"loss": 4.7344, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 0.1330078125, |
|
"grad_norm": 0.3411064147949219, |
|
"learning_rate": 0.00048416874781625016, |
|
"loss": 4.793, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 0.13310546875, |
|
"grad_norm": 0.3658686578273773, |
|
"learning_rate": 0.00048414292425365507, |
|
"loss": 4.7617, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 0.133203125, |
|
"grad_norm": 0.40414950251579285, |
|
"learning_rate": 0.00048411708041607305, |
|
"loss": 4.7539, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.13330078125, |
|
"grad_norm": 0.37495216727256775, |
|
"learning_rate": 0.0004840912163060093, |
|
"loss": 4.7773, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.1333984375, |
|
"grad_norm": 0.40422677993774414, |
|
"learning_rate": 0.00048406533192597124, |
|
"loss": 4.7539, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 0.13349609375, |
|
"grad_norm": 0.454258531332016, |
|
"learning_rate": 0.0004840394272784682, |
|
"loss": 4.7812, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 0.13359375, |
|
"grad_norm": 0.39893728494644165, |
|
"learning_rate": 0.00048401350236601146, |
|
"loss": 4.7695, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 0.13369140625, |
|
"grad_norm": 0.3277254104614258, |
|
"learning_rate": 0.00048398755719111417, |
|
"loss": 4.8086, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 0.1337890625, |
|
"grad_norm": 0.436477392911911, |
|
"learning_rate": 0.00048396159175629174, |
|
"loss": 4.7461, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.13388671875, |
|
"grad_norm": 0.33649933338165283, |
|
"learning_rate": 0.00048393560606406114, |
|
"loss": 4.7539, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 0.133984375, |
|
"grad_norm": 0.34157708287239075, |
|
"learning_rate": 0.0004839096001169416, |
|
"loss": 4.7539, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 0.13408203125, |
|
"grad_norm": 0.4028213322162628, |
|
"learning_rate": 0.0004838835739174541, |
|
"loss": 4.7773, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 0.1341796875, |
|
"grad_norm": 0.46322396397590637, |
|
"learning_rate": 0.0004838575274681219, |
|
"loss": 4.7617, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 0.13427734375, |
|
"grad_norm": 0.5127856731414795, |
|
"learning_rate": 0.0004838314607714699, |
|
"loss": 4.7266, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.134375, |
|
"grad_norm": 0.5536565780639648, |
|
"learning_rate": 0.00048380537383002517, |
|
"loss": 4.75, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 0.13447265625, |
|
"grad_norm": 0.5256456732749939, |
|
"learning_rate": 0.0004837792666463166, |
|
"loss": 4.7695, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 0.1345703125, |
|
"grad_norm": 0.5643973350524902, |
|
"learning_rate": 0.00048375313922287505, |
|
"loss": 4.7656, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 0.13466796875, |
|
"grad_norm": 0.8128441572189331, |
|
"learning_rate": 0.00048372699156223355, |
|
"loss": 4.7734, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 0.134765625, |
|
"grad_norm": 1.026166558265686, |
|
"learning_rate": 0.0004837008236669268, |
|
"loss": 4.7227, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.13486328125, |
|
"grad_norm": 0.4733608365058899, |
|
"learning_rate": 0.00048367463553949166, |
|
"loss": 4.7734, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 0.1349609375, |
|
"grad_norm": 0.46688416600227356, |
|
"learning_rate": 0.00048364842718246685, |
|
"loss": 4.7461, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 0.13505859375, |
|
"grad_norm": 0.42120811343193054, |
|
"learning_rate": 0.00048362219859839317, |
|
"loss": 4.7734, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 0.13515625, |
|
"grad_norm": 0.4719356298446655, |
|
"learning_rate": 0.0004835959497898133, |
|
"loss": 4.7695, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 0.13525390625, |
|
"grad_norm": 0.5680752992630005, |
|
"learning_rate": 0.0004835696807592718, |
|
"loss": 4.7383, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.1353515625, |
|
"grad_norm": 0.7138311862945557, |
|
"learning_rate": 0.0004835433915093153, |
|
"loss": 4.7812, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 0.13544921875, |
|
"grad_norm": 0.8844993114471436, |
|
"learning_rate": 0.00048351708204249247, |
|
"loss": 4.7383, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 0.135546875, |
|
"grad_norm": 1.3054742813110352, |
|
"learning_rate": 0.00048349075236135366, |
|
"loss": 4.7773, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 0.13564453125, |
|
"grad_norm": 0.5796347260475159, |
|
"learning_rate": 0.0004834644024684515, |
|
"loss": 4.7578, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 0.1357421875, |
|
"grad_norm": 0.5956249833106995, |
|
"learning_rate": 0.00048343803236634023, |
|
"loss": 4.7461, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.13583984375, |
|
"grad_norm": 0.53680819272995, |
|
"learning_rate": 0.00048341164205757654, |
|
"loss": 4.7773, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 0.1359375, |
|
"grad_norm": 0.4875905513763428, |
|
"learning_rate": 0.0004833852315447186, |
|
"loss": 4.75, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 0.13603515625, |
|
"grad_norm": 0.4100656807422638, |
|
"learning_rate": 0.0004833588008303267, |
|
"loss": 4.7305, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 0.1361328125, |
|
"grad_norm": 0.344696044921875, |
|
"learning_rate": 0.00048333234991696335, |
|
"loss": 4.7891, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 0.13623046875, |
|
"grad_norm": 0.37795841693878174, |
|
"learning_rate": 0.0004833058788071925, |
|
"loss": 4.7383, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.136328125, |
|
"grad_norm": 0.41078394651412964, |
|
"learning_rate": 0.0004832793875035805, |
|
"loss": 4.7148, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 0.13642578125, |
|
"grad_norm": 0.3967403471469879, |
|
"learning_rate": 0.0004832528760086956, |
|
"loss": 4.7734, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 0.1365234375, |
|
"grad_norm": 0.3479665517807007, |
|
"learning_rate": 0.00048322634432510766, |
|
"loss": 4.7539, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 0.13662109375, |
|
"grad_norm": 0.3690146803855896, |
|
"learning_rate": 0.00048319979245538887, |
|
"loss": 4.7695, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 0.13671875, |
|
"grad_norm": 0.3363431394100189, |
|
"learning_rate": 0.0004831732204021134, |
|
"loss": 4.7734, |
|
"step": 1400 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 10240, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 200, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.571508323306701e+18, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|