|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.04266712178263235, |
|
"eval_steps": 500, |
|
"global_step": 500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 8.53342435652647e-05, |
|
"grad_norm": 0.82421875, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.8, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0001706684871305294, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.8859, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0002560027306957941, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 3e-06, |
|
"loss": 1.7003, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0003413369742610588, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.6892, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0004266712178263235, |
|
"grad_norm": 0.77734375, |
|
"learning_rate": 5e-06, |
|
"loss": 1.6493, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0005120054613915882, |
|
"grad_norm": 0.80859375, |
|
"learning_rate": 6e-06, |
|
"loss": 1.7291, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0005973397049568529, |
|
"grad_norm": 0.8203125, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 1.7985, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0006826739485221176, |
|
"grad_norm": 0.875, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.6985, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0007680081920873823, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 9e-06, |
|
"loss": 1.9519, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.000853342435652647, |
|
"grad_norm": 0.83984375, |
|
"learning_rate": 1e-05, |
|
"loss": 1.8502, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0009386766792179117, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 1.6864, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0010240109227831764, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 1.2e-05, |
|
"loss": 2.0213, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.001109345166348441, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 1.7538, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0011946794099137058, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 1.8914, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0012800136534789704, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.7576, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0013653478970442352, |
|
"grad_norm": 0.84375, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.6817, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0014506821406094998, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 1.7000000000000003e-05, |
|
"loss": 1.9613, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0015360163841747646, |
|
"grad_norm": 0.86328125, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.692, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0016213506277400291, |
|
"grad_norm": 0.7578125, |
|
"learning_rate": 1.9e-05, |
|
"loss": 1.5385, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.001706684871305294, |
|
"grad_norm": 0.8515625, |
|
"learning_rate": 2e-05, |
|
"loss": 1.8615, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0017920191148705585, |
|
"grad_norm": 0.83984375, |
|
"learning_rate": 2.1e-05, |
|
"loss": 1.7127, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0018773533584358233, |
|
"grad_norm": 0.80859375, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 1.8537, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.001962687602001088, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 2.0063, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0020480218455663527, |
|
"grad_norm": 0.79296875, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.636, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0021333560891316173, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.7672, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.002218690332696882, |
|
"grad_norm": 0.81640625, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 2.0052, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.002304024576262147, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 1.9785, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0023893588198274115, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 1.7909, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.002474693063392676, |
|
"grad_norm": 0.765625, |
|
"learning_rate": 2.9e-05, |
|
"loss": 1.8871, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0025600273069579407, |
|
"grad_norm": 0.8203125, |
|
"learning_rate": 3e-05, |
|
"loss": 1.707, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0026453615505232057, |
|
"grad_norm": 0.796875, |
|
"learning_rate": 3.1e-05, |
|
"loss": 1.9111, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0027306957940884703, |
|
"grad_norm": 0.73828125, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 1.6338, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.002816030037653735, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 3.3e-05, |
|
"loss": 1.8687, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0029013642812189995, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 1.7552, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0029866985247842645, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 3.5e-05, |
|
"loss": 1.6459, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.003072032768349529, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 3.6e-05, |
|
"loss": 1.8945, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0031573670119147937, |
|
"grad_norm": 0.86328125, |
|
"learning_rate": 3.7e-05, |
|
"loss": 1.8111, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0032427012554800583, |
|
"grad_norm": 0.84765625, |
|
"learning_rate": 3.8e-05, |
|
"loss": 1.8449, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0033280354990453233, |
|
"grad_norm": 0.73828125, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 1.7649, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.003413369742610588, |
|
"grad_norm": 0.82421875, |
|
"learning_rate": 4e-05, |
|
"loss": 1.8078, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0034987039861758525, |
|
"grad_norm": 0.74609375, |
|
"learning_rate": 4.1e-05, |
|
"loss": 1.6198, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.003584038229741117, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 4.2e-05, |
|
"loss": 1.7853, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.003669372473306382, |
|
"grad_norm": 0.828125, |
|
"learning_rate": 4.3e-05, |
|
"loss": 1.8162, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0037547067168716467, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 1.8106, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.0038400409604369113, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 4.5e-05, |
|
"loss": 1.6102, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.003925375204002176, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 1.6463, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.004010709447567441, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 4.7e-05, |
|
"loss": 1.546, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0040960436911327055, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.7966, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.00418137793469797, |
|
"grad_norm": 0.796875, |
|
"learning_rate": 4.9e-05, |
|
"loss": 1.9185, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.004266712178263235, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 5e-05, |
|
"loss": 1.6677, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.004352046421828499, |
|
"grad_norm": 0.765625, |
|
"learning_rate": 5.1000000000000006e-05, |
|
"loss": 1.7566, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.004437380665393764, |
|
"grad_norm": 0.7578125, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 1.6925, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.004522714908959029, |
|
"grad_norm": 0.8515625, |
|
"learning_rate": 5.300000000000001e-05, |
|
"loss": 1.8328, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.004608049152524294, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 1.906, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.0046933833960895585, |
|
"grad_norm": 0.68359375, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 1.6741, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.004778717639654823, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 1.8366, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.004864051883220088, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 1.733, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.004949386126785352, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 5.8e-05, |
|
"loss": 1.8005, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.005034720370350617, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 5.9e-05, |
|
"loss": 1.7329, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.005120054613915881, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 6e-05, |
|
"loss": 1.7772, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.005205388857481147, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 6.1e-05, |
|
"loss": 1.6311, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.0052907231010464115, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 6.2e-05, |
|
"loss": 1.506, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.005376057344611676, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 6.3e-05, |
|
"loss": 1.6361, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.005461391588176941, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 1.6185, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.005546725831742205, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 1.7215, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.00563206007530747, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 6.6e-05, |
|
"loss": 1.8679, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.005717394318872734, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 6.7e-05, |
|
"loss": 1.7063, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.005802728562437999, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 1.6245, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.0058880628060032645, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 6.9e-05, |
|
"loss": 1.8109, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.005973397049568529, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 7e-05, |
|
"loss": 1.9045, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.006058731293133794, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 7.1e-05, |
|
"loss": 1.6802, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.006144065536699058, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 7.2e-05, |
|
"loss": 1.757, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.006229399780264323, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 7.3e-05, |
|
"loss": 1.7513, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.006314734023829587, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 7.4e-05, |
|
"loss": 1.8557, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.006400068267394852, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.6747, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.006485402510960117, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 7.6e-05, |
|
"loss": 1.6629, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.006570736754525381, |
|
"grad_norm": 0.78515625, |
|
"learning_rate": 7.7e-05, |
|
"loss": 1.8731, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.006656070998090647, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 1.6409, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.006741405241655911, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 7.900000000000001e-05, |
|
"loss": 1.6955, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.006826739485221176, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 8e-05, |
|
"loss": 1.6168, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.00691207372878644, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 8.1e-05, |
|
"loss": 1.8731, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.006997407972351705, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 8.2e-05, |
|
"loss": 1.8115, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.00708274221591697, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 8.3e-05, |
|
"loss": 1.8266, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.007168076459482234, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 8.4e-05, |
|
"loss": 1.6428, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.007253410703047499, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 8.5e-05, |
|
"loss": 1.6117, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.007338744946612764, |
|
"grad_norm": 0.5, |
|
"learning_rate": 8.6e-05, |
|
"loss": 1.5626, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.007424079190178029, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 8.7e-05, |
|
"loss": 1.686, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.007509413433743293, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 1.6377, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.007594747677308558, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 8.900000000000001e-05, |
|
"loss": 1.5294, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.007680081920873823, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 9e-05, |
|
"loss": 1.6331, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.007765416164439087, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 9.1e-05, |
|
"loss": 1.725, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.007850750408004353, |
|
"grad_norm": 0.5, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 1.6189, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.007936084651569617, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 9.300000000000001e-05, |
|
"loss": 1.5317, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.008021418895134882, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 9.4e-05, |
|
"loss": 1.4674, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.008106753138700146, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 9.5e-05, |
|
"loss": 1.8128, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.008192087382265411, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 9.6e-05, |
|
"loss": 1.5884, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.008277421625830676, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 9.7e-05, |
|
"loss": 1.6662, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.00836275586939594, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 9.8e-05, |
|
"loss": 1.5444, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.008448090112961205, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 1.8447, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.00853342435652647, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.0001, |
|
"loss": 1.5976, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.008618758600091734, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 9.999497487437187e-05, |
|
"loss": 1.4566, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.008704092843656999, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 9.998994974874373e-05, |
|
"loss": 1.9021, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.008789427087222263, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 9.998492462311558e-05, |
|
"loss": 1.4717, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.008874761330787528, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 9.997989949748744e-05, |
|
"loss": 1.5066, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.008960095574352792, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 9.99748743718593e-05, |
|
"loss": 1.553, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.009045429817918059, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 9.996984924623116e-05, |
|
"loss": 1.4151, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.009130764061483323, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 9.996482412060301e-05, |
|
"loss": 1.7529, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.009216098305048588, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 9.995979899497487e-05, |
|
"loss": 1.6218, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.009301432548613852, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 9.995477386934674e-05, |
|
"loss": 1.6131, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.009386766792179117, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 9.99497487437186e-05, |
|
"loss": 1.7024, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.009472101035744382, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 9.994472361809045e-05, |
|
"loss": 1.5234, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.009557435279309646, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 9.993969849246232e-05, |
|
"loss": 1.4346, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.00964276952287491, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 9.993467336683417e-05, |
|
"loss": 1.5256, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.009728103766440175, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 9.992964824120603e-05, |
|
"loss": 1.7505, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.00981343801000544, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 9.99246231155779e-05, |
|
"loss": 1.534, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.009898772253570704, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 9.991959798994976e-05, |
|
"loss": 1.7212, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.009984106497135969, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 9.99145728643216e-05, |
|
"loss": 1.4882, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.010069440740701234, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 9.990954773869348e-05, |
|
"loss": 1.6496, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.010154774984266498, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 9.990452261306533e-05, |
|
"loss": 1.547, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.010240109227831763, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 9.989949748743719e-05, |
|
"loss": 1.548, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.010325443471397027, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 9.989447236180905e-05, |
|
"loss": 1.5738, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.010410777714962294, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 9.988944723618092e-05, |
|
"loss": 1.4623, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.010496111958527558, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 9.988442211055276e-05, |
|
"loss": 1.5758, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.010581446202092823, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 9.987939698492463e-05, |
|
"loss": 1.4766, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.010666780445658088, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 9.987437185929649e-05, |
|
"loss": 1.5325, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.010752114689223352, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 9.986934673366835e-05, |
|
"loss": 1.5734, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.010837448932788617, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 9.98643216080402e-05, |
|
"loss": 1.5174, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.010922783176353881, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 9.985929648241207e-05, |
|
"loss": 1.4156, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.011008117419919146, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 9.985427135678392e-05, |
|
"loss": 1.5875, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.01109345166348441, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.984924623115578e-05, |
|
"loss": 1.4329, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.011178785907049675, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 9.984422110552765e-05, |
|
"loss": 1.6382, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.01126412015061494, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 9.983919597989951e-05, |
|
"loss": 1.5618, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.011349454394180204, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 9.983417085427136e-05, |
|
"loss": 1.4175, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.011434788637745469, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 9.982914572864322e-05, |
|
"loss": 1.5475, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.011520122881310733, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 9.982412060301508e-05, |
|
"loss": 1.5458, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.011605457124875998, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 9.981909547738694e-05, |
|
"loss": 1.5643, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.011690791368441263, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 9.98140703517588e-05, |
|
"loss": 1.7032, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.011776125612006529, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 9.980904522613065e-05, |
|
"loss": 1.4243, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.011861459855571794, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 9.980402010050252e-05, |
|
"loss": 1.4475, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.011946794099137058, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 9.979899497487438e-05, |
|
"loss": 1.5821, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.012032128342702323, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 9.979396984924624e-05, |
|
"loss": 1.5276, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.012117462586267587, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 9.978894472361809e-05, |
|
"loss": 1.5591, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.012202796829832852, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 9.978391959798995e-05, |
|
"loss": 1.3586, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.012288131073398116, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 9.977889447236181e-05, |
|
"loss": 1.506, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.012373465316963381, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 9.977386934673367e-05, |
|
"loss": 1.5435, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.012458799560528646, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 9.976884422110552e-05, |
|
"loss": 1.5242, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.01254413380409391, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 9.97638190954774e-05, |
|
"loss": 1.5617, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.012629468047659175, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 9.975879396984925e-05, |
|
"loss": 1.6439, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.01271480229122444, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 9.975376884422111e-05, |
|
"loss": 1.4132, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.012800136534789704, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 9.974874371859297e-05, |
|
"loss": 1.5351, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.012885470778354969, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 9.974371859296483e-05, |
|
"loss": 1.5083, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.012970805021920233, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 9.973869346733668e-05, |
|
"loss": 1.3979, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.013056139265485498, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 9.973366834170856e-05, |
|
"loss": 1.4763, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.013141473509050762, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 9.97286432160804e-05, |
|
"loss": 1.3848, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.013226807752616029, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.972361809045227e-05, |
|
"loss": 1.4794, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.013312141996181293, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 9.971859296482412e-05, |
|
"loss": 1.6615, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.013397476239746558, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 9.971356783919599e-05, |
|
"loss": 1.459, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.013482810483311822, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 9.970854271356784e-05, |
|
"loss": 1.4856, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.013568144726877087, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 9.97035175879397e-05, |
|
"loss": 1.5139, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.013653478970442352, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 9.969849246231156e-05, |
|
"loss": 1.4959, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.013738813214007616, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 9.969346733668343e-05, |
|
"loss": 1.5225, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.01382414745757288, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 9.968844221105527e-05, |
|
"loss": 1.4846, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.013909481701138145, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 9.968341708542715e-05, |
|
"loss": 1.461, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.01399481594470341, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 9.9678391959799e-05, |
|
"loss": 1.6441, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.014080150188268675, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 9.967336683417086e-05, |
|
"loss": 1.5616, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.01416548443183394, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 9.966834170854272e-05, |
|
"loss": 1.4335, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.014250818675399204, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 9.966331658291458e-05, |
|
"loss": 1.3676, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.014336152918964468, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 9.965829145728643e-05, |
|
"loss": 1.6303, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.014421487162529733, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 9.96532663316583e-05, |
|
"loss": 1.5304, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.014506821406094998, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 9.964824120603016e-05, |
|
"loss": 1.5157, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.014592155649660264, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 9.964321608040202e-05, |
|
"loss": 1.5339, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.014677489893225528, |
|
"grad_norm": 0.375, |
|
"learning_rate": 9.963819095477387e-05, |
|
"loss": 1.4739, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.014762824136790793, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 9.963316582914573e-05, |
|
"loss": 1.3935, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.014848158380356058, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 9.962814070351759e-05, |
|
"loss": 1.4655, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.014933492623921322, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 9.962311557788945e-05, |
|
"loss": 1.4473, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.015018826867486587, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 9.961809045226132e-05, |
|
"loss": 1.4698, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.015104161111051851, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.961306532663316e-05, |
|
"loss": 1.5137, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.015189495354617116, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 9.960804020100503e-05, |
|
"loss": 1.5024, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.01527482959818238, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 9.960301507537689e-05, |
|
"loss": 1.4906, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.015360163841747645, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 9.959798994974875e-05, |
|
"loss": 1.4285, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.01544549808531291, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 9.95929648241206e-05, |
|
"loss": 1.3331, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.015530832328878174, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 9.958793969849247e-05, |
|
"loss": 1.5696, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.015616166572443439, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 9.958291457286432e-05, |
|
"loss": 1.7032, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.015701500816008705, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 9.957788944723619e-05, |
|
"loss": 1.4965, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.01578683505957397, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 9.957286432160805e-05, |
|
"loss": 1.4278, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.015872169303139234, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 9.956783919597991e-05, |
|
"loss": 1.3238, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.0159575035467045, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 9.956281407035176e-05, |
|
"loss": 1.5803, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.016042837790269764, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 9.955778894472362e-05, |
|
"loss": 1.5097, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.016128172033835028, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 9.955276381909548e-05, |
|
"loss": 1.5302, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.016213506277400293, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 9.954773869346734e-05, |
|
"loss": 1.4605, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.016298840520965557, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 9.954271356783919e-05, |
|
"loss": 1.4818, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.016384174764530822, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 9.953768844221107e-05, |
|
"loss": 1.5929, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.016469509008096087, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 9.953266331658292e-05, |
|
"loss": 1.6972, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.01655484325166135, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 9.952763819095478e-05, |
|
"loss": 1.7035, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.016640177495226616, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 9.952261306532664e-05, |
|
"loss": 1.522, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.01672551173879188, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 9.95175879396985e-05, |
|
"loss": 1.5499, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.016810845982357145, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 9.951256281407035e-05, |
|
"loss": 1.3498, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.01689618022592241, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 9.950753768844223e-05, |
|
"loss": 1.4946, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.016981514469487674, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 9.950251256281408e-05, |
|
"loss": 1.4106, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.01706684871305294, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 9.949748743718594e-05, |
|
"loss": 1.3061, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.017152182956618203, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 9.94924623115578e-05, |
|
"loss": 1.6192, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.017237517200183468, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 9.948743718592966e-05, |
|
"loss": 1.6002, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.017322851443748732, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 9.948241206030151e-05, |
|
"loss": 1.4208, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.017408185687313997, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 9.947738693467337e-05, |
|
"loss": 1.4471, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.01749351993087926, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 9.947236180904523e-05, |
|
"loss": 1.5423, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.017578854174444526, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 9.94673366834171e-05, |
|
"loss": 1.4858, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.01766418841800979, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 9.946231155778894e-05, |
|
"loss": 1.492, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.017749522661575055, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 9.94572864321608e-05, |
|
"loss": 1.4228, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.01783485690514032, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 9.945226130653267e-05, |
|
"loss": 1.3717, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.017920191148705585, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 9.944723618090453e-05, |
|
"loss": 1.4904, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.018005525392270853, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 9.944221105527639e-05, |
|
"loss": 1.3978, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.018090859635836117, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 9.943718592964824e-05, |
|
"loss": 1.4583, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.018176193879401382, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 9.94321608040201e-05, |
|
"loss": 1.5892, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.018261528122966646, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.942713567839197e-05, |
|
"loss": 1.5942, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.01834686236653191, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 9.942211055276383e-05, |
|
"loss": 1.5318, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.018432196610097176, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 9.941708542713568e-05, |
|
"loss": 1.4243, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.01851753085366244, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 9.941206030150754e-05, |
|
"loss": 1.3628, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.018602865097227705, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.94070351758794e-05, |
|
"loss": 1.4799, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.01868819934079297, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 9.940201005025126e-05, |
|
"loss": 1.5227, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.018773533584358234, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 9.939698492462311e-05, |
|
"loss": 1.436, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.0188588678279235, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 9.939195979899499e-05, |
|
"loss": 1.4322, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.018944202071488763, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 9.938693467336683e-05, |
|
"loss": 1.5913, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.019029536315054028, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 9.93819095477387e-05, |
|
"loss": 1.5052, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.019114870558619292, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 9.937688442211056e-05, |
|
"loss": 1.6388, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.019200204802184557, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 9.937185929648242e-05, |
|
"loss": 1.5977, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.01928553904574982, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 9.936683417085427e-05, |
|
"loss": 1.4548, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.019370873289315086, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 9.936180904522614e-05, |
|
"loss": 1.4918, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.01945620753288035, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 9.935678391959799e-05, |
|
"loss": 1.5212, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.019541541776445615, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 9.935175879396985e-05, |
|
"loss": 1.5645, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.01962687602001088, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.934673366834172e-05, |
|
"loss": 1.4794, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.019712210263576144, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 9.934170854271358e-05, |
|
"loss": 1.4586, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.01979754450714141, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 9.933668341708543e-05, |
|
"loss": 1.5006, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.019882878750706674, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 9.933165829145729e-05, |
|
"loss": 1.5719, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.019968212994271938, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.932663316582915e-05, |
|
"loss": 1.3318, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.020053547237837203, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 9.932160804020101e-05, |
|
"loss": 1.5506, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.020138881481402467, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 9.931658291457286e-05, |
|
"loss": 1.4031, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.020224215724967732, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 9.931155778894474e-05, |
|
"loss": 1.5384, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.020309549968532997, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 9.930653266331659e-05, |
|
"loss": 1.3319, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.02039488421209826, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 9.930150753768845e-05, |
|
"loss": 1.441, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.020480218455663526, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 9.929648241206031e-05, |
|
"loss": 1.4311, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.02056555269922879, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 9.929145728643217e-05, |
|
"loss": 1.4405, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.020650886942794055, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.928643216080402e-05, |
|
"loss": 1.3873, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.02073622118635932, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 9.928140703517588e-05, |
|
"loss": 1.6589, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.020821555429924588, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 9.927638190954774e-05, |
|
"loss": 1.4386, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.020906889673489852, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 9.927135678391961e-05, |
|
"loss": 1.4542, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.020992223917055117, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 9.926633165829147e-05, |
|
"loss": 1.4885, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.02107755816062038, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 9.926130653266332e-05, |
|
"loss": 1.5343, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.021162892404185646, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 9.925628140703518e-05, |
|
"loss": 1.4289, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.02124822664775091, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 9.925125628140703e-05, |
|
"loss": 1.5846, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.021333560891316175, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 9.92462311557789e-05, |
|
"loss": 1.308, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.02141889513488144, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 9.924120603015075e-05, |
|
"loss": 1.6103, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.021504229378446704, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 9.923618090452261e-05, |
|
"loss": 1.5454, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.02158956362201197, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 9.923115577889448e-05, |
|
"loss": 1.5327, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.021674897865577233, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 9.922613065326634e-05, |
|
"loss": 1.4348, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.021760232109142498, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 9.922110552763819e-05, |
|
"loss": 1.5543, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.021845566352707763, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 9.921608040201006e-05, |
|
"loss": 1.4348, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.021930900596273027, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 9.921105527638191e-05, |
|
"loss": 1.3951, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.022016234839838292, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 9.920603015075377e-05, |
|
"loss": 1.3103, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.022101569083403556, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 9.920100502512563e-05, |
|
"loss": 1.3765, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.02218690332696882, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 9.91959798994975e-05, |
|
"loss": 1.3495, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.022272237570534086, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 9.919095477386935e-05, |
|
"loss": 1.3266, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.02235757181409935, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 9.918592964824122e-05, |
|
"loss": 1.2198, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.022442906057664615, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 9.918090452261307e-05, |
|
"loss": 1.419, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.02252824030122988, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 9.917587939698493e-05, |
|
"loss": 1.341, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.022613574544795144, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 9.917085427135678e-05, |
|
"loss": 1.323, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.02269890878836041, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 9.916582914572866e-05, |
|
"loss": 1.3533, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.022784243031925673, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 9.91608040201005e-05, |
|
"loss": 1.3956, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.022869577275490938, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 9.915577889447237e-05, |
|
"loss": 1.5848, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.022954911519056202, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 9.915075376884423e-05, |
|
"loss": 1.499, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.023040245762621467, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 9.914572864321609e-05, |
|
"loss": 1.4106, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.02312558000618673, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.914070351758794e-05, |
|
"loss": 1.4622, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.023210914249751996, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 9.913567839195981e-05, |
|
"loss": 1.3194, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.02329624849331726, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 9.913065326633166e-05, |
|
"loss": 1.432, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.023381582736882525, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 9.912562814070352e-05, |
|
"loss": 1.4197, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.02346691698044779, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 9.912060301507539e-05, |
|
"loss": 1.3807, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.023552251224013058, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 9.911557788944725e-05, |
|
"loss": 1.4024, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.023637585467578322, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 9.91105527638191e-05, |
|
"loss": 1.4863, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.023722919711143587, |
|
"grad_norm": 0.375, |
|
"learning_rate": 9.910552763819096e-05, |
|
"loss": 1.314, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.02380825395470885, |
|
"grad_norm": 0.375, |
|
"learning_rate": 9.910050251256282e-05, |
|
"loss": 1.4456, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.023893588198274116, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 9.909547738693468e-05, |
|
"loss": 1.5877, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.02397892244183938, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 9.909045226130653e-05, |
|
"loss": 1.5869, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.024064256685404645, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 9.90854271356784e-05, |
|
"loss": 1.5391, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.02414959092896991, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.908040201005026e-05, |
|
"loss": 1.3901, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.024234925172535175, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 9.90753768844221e-05, |
|
"loss": 1.4891, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.02432025941610044, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 9.907035175879398e-05, |
|
"loss": 1.3531, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.024405593659665704, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 9.906532663316583e-05, |
|
"loss": 1.3986, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.02449092790323097, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 9.906030150753769e-05, |
|
"loss": 1.5053, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.024576262146796233, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 9.905527638190955e-05, |
|
"loss": 1.4861, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.024661596390361498, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 9.905025125628141e-05, |
|
"loss": 1.3849, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.024746930633926762, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 9.904522613065326e-05, |
|
"loss": 1.4285, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.024832264877492027, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 9.904020100502514e-05, |
|
"loss": 1.3654, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.02491759912105729, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 9.903517587939699e-05, |
|
"loss": 1.453, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.025002933364622556, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 9.903015075376885e-05, |
|
"loss": 1.4705, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.02508826760818782, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 9.902512562814071e-05, |
|
"loss": 1.4251, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.025173601851753085, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 9.902010050251257e-05, |
|
"loss": 1.2653, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.02525893609531835, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 9.901507537688442e-05, |
|
"loss": 1.4037, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.025344270338883614, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 9.901005025125628e-05, |
|
"loss": 1.522, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.02542960458244888, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 9.900502512562815e-05, |
|
"loss": 1.2837, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.025514938826014143, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 1.3896, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.025600273069579408, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 9.899497487437186e-05, |
|
"loss": 1.5719, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.025685607313144673, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 9.898994974874373e-05, |
|
"loss": 1.6433, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.025770941556709937, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 9.898492462311558e-05, |
|
"loss": 1.4654, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.025856275800275202, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 9.897989949748744e-05, |
|
"loss": 1.5617, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.025941610043840466, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 9.89748743718593e-05, |
|
"loss": 1.3666, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.02602694428740573, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 9.896984924623117e-05, |
|
"loss": 1.3791, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.026112278530970996, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 9.896482412060301e-05, |
|
"loss": 1.9473, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.02619761277453626, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 9.895979899497489e-05, |
|
"loss": 1.3326, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.026282947018101525, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 9.895477386934674e-05, |
|
"loss": 1.4549, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.026368281261666793, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 9.89497487437186e-05, |
|
"loss": 1.3408, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.026453615505232057, |
|
"grad_norm": 0.375, |
|
"learning_rate": 9.894472361809046e-05, |
|
"loss": 1.4272, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.026538949748797322, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.893969849246232e-05, |
|
"loss": 1.3771, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.026624283992362587, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 9.893467336683417e-05, |
|
"loss": 1.406, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.02670961823592785, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 9.892964824120604e-05, |
|
"loss": 1.2111, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.026794952479493116, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 9.89246231155779e-05, |
|
"loss": 1.44, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.02688028672305838, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 9.891959798994975e-05, |
|
"loss": 1.3375, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.026965620966623645, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 9.891457286432161e-05, |
|
"loss": 1.3964, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.02705095521018891, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 9.890954773869347e-05, |
|
"loss": 1.2975, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.027136289453754174, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 9.890452261306533e-05, |
|
"loss": 1.5118, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.02722162369731944, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.889949748743718e-05, |
|
"loss": 1.4032, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.027306957940884703, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 9.889447236180906e-05, |
|
"loss": 1.355, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.027392292184449968, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 9.88894472361809e-05, |
|
"loss": 1.4382, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.027477626428015232, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.888442211055277e-05, |
|
"loss": 1.3795, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.027562960671580497, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.887939698492463e-05, |
|
"loss": 1.4173, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.02764829491514576, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.887437185929649e-05, |
|
"loss": 1.6702, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.027733629158711026, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 9.886934673366834e-05, |
|
"loss": 1.5437, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.02781896340227629, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 9.886432160804021e-05, |
|
"loss": 1.5578, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.027904297645841555, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 9.885929648241206e-05, |
|
"loss": 1.5042, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.02798963188940682, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 9.885427135678393e-05, |
|
"loss": 1.5055, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.028074966132972085, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 9.884924623115577e-05, |
|
"loss": 1.4351, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.02816030037653735, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 9.884422110552765e-05, |
|
"loss": 1.2851, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.028245634620102614, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 9.88391959798995e-05, |
|
"loss": 1.5372, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.02833096886366788, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 9.883417085427136e-05, |
|
"loss": 1.4504, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.028416303107233143, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 9.882914572864322e-05, |
|
"loss": 1.4873, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.028501637350798407, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 9.882412060301508e-05, |
|
"loss": 1.5475, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.028586971594363672, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 9.881909547738693e-05, |
|
"loss": 1.3558, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.028672305837928937, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 9.881407035175881e-05, |
|
"loss": 1.441, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.0287576400814942, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 9.880904522613066e-05, |
|
"loss": 1.4192, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.028842974325059466, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.880402010050252e-05, |
|
"loss": 1.4601, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.02892830856862473, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.879899497487438e-05, |
|
"loss": 1.4178, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.029013642812189995, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 9.879396984924624e-05, |
|
"loss": 1.416, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.029098977055755263, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 9.878894472361809e-05, |
|
"loss": 1.3266, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.029184311299320528, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 9.878391959798995e-05, |
|
"loss": 1.4582, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.029269645542885792, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 9.877889447236182e-05, |
|
"loss": 1.461, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.029354979786451057, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.877386934673368e-05, |
|
"loss": 1.4722, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.02944031403001632, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 9.876884422110553e-05, |
|
"loss": 1.2707, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.029525648273581586, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 9.87638190954774e-05, |
|
"loss": 1.2089, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.02961098251714685, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 9.875879396984925e-05, |
|
"loss": 1.5013, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.029696316760712115, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 9.875376884422111e-05, |
|
"loss": 1.46, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.02978165100427738, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 9.874874371859297e-05, |
|
"loss": 1.527, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.029866985247842644, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 9.874371859296482e-05, |
|
"loss": 1.4721, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.02995231949140791, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 9.873869346733668e-05, |
|
"loss": 1.4308, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.030037653734973174, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 9.873366834170855e-05, |
|
"loss": 1.423, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.030122987978538438, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 9.872864321608041e-05, |
|
"loss": 1.3206, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.030208322222103703, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 9.872361809045226e-05, |
|
"loss": 1.5606, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.030293656465668967, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 9.871859296482413e-05, |
|
"loss": 1.4185, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.030378990709234232, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 9.871356783919598e-05, |
|
"loss": 1.3768, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.030464324952799497, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 9.870854271356784e-05, |
|
"loss": 1.4831, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.03054965919636476, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 9.870351758793969e-05, |
|
"loss": 1.4651, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.030634993439930026, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 9.869849246231157e-05, |
|
"loss": 1.3874, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.03072032768349529, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.869346733668342e-05, |
|
"loss": 1.5274, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.030805661927060555, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 9.868844221105528e-05, |
|
"loss": 1.462, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.03089099617062582, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 9.868341708542714e-05, |
|
"loss": 1.5312, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.030976330414191084, |
|
"grad_norm": 0.375, |
|
"learning_rate": 9.8678391959799e-05, |
|
"loss": 1.44, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.03106166465775635, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 9.867336683417085e-05, |
|
"loss": 1.4294, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.031146998901321613, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 9.866834170854273e-05, |
|
"loss": 1.4829, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.031232333144886878, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.866331658291457e-05, |
|
"loss": 1.5956, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.031317667388452146, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 9.865829145728644e-05, |
|
"loss": 1.4825, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.03140300163201741, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 9.86532663316583e-05, |
|
"loss": 1.4988, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.031488335875582675, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 9.864824120603016e-05, |
|
"loss": 1.3063, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.03157367011914794, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 9.864321608040201e-05, |
|
"loss": 1.267, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.031659004362713204, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 9.863819095477388e-05, |
|
"loss": 1.2518, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.03174433860627847, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 9.863316582914573e-05, |
|
"loss": 1.3486, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.03182967284984373, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 9.86281407035176e-05, |
|
"loss": 1.4999, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.031915007093409, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 9.862311557788944e-05, |
|
"loss": 1.4815, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.03200034133697426, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 9.861809045226132e-05, |
|
"loss": 1.5226, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.03208567558053953, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 9.861306532663317e-05, |
|
"loss": 1.4169, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.03217100982410479, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.860804020100503e-05, |
|
"loss": 1.2659, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.032256344067670056, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 9.860301507537689e-05, |
|
"loss": 1.3325, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.03234167831123532, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.859798994974875e-05, |
|
"loss": 1.2241, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.032427012554800586, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 9.85929648241206e-05, |
|
"loss": 1.6017, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.03251234679836585, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 9.858793969849246e-05, |
|
"loss": 1.436, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.032597681041931115, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 9.858291457286433e-05, |
|
"loss": 1.4302, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.03268301528549638, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 9.857788944723619e-05, |
|
"loss": 1.3834, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.032768349529061644, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.857286432160805e-05, |
|
"loss": 1.3019, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.03285368377262691, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 9.85678391959799e-05, |
|
"loss": 1.487, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.03293901801619217, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 9.856281407035176e-05, |
|
"loss": 1.3938, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.03302435225975744, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 9.855778894472362e-05, |
|
"loss": 1.488, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.0331096865033227, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 9.855276381909548e-05, |
|
"loss": 1.3579, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.03319502074688797, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 9.854773869346733e-05, |
|
"loss": 1.3319, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.03328035499045323, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 9.85427135678392e-05, |
|
"loss": 1.3677, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.033365689234018496, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 9.853768844221106e-05, |
|
"loss": 1.4472, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.03345102347758376, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 9.853266331658292e-05, |
|
"loss": 1.388, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.033536357721149025, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 9.852763819095477e-05, |
|
"loss": 1.2981, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.03362169196471429, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 9.852261306532664e-05, |
|
"loss": 1.5465, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.033707026208279554, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 9.851758793969849e-05, |
|
"loss": 1.3837, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.03379236045184482, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.851256281407035e-05, |
|
"loss": 1.5756, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.033877694695410084, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 9.850753768844222e-05, |
|
"loss": 1.3774, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.03396302893897535, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 9.850251256281408e-05, |
|
"loss": 1.3807, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.03404836318254061, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 9.849748743718593e-05, |
|
"loss": 1.4118, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.03413369742610588, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 9.84924623115578e-05, |
|
"loss": 1.5277, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.03421903166967114, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 9.848743718592965e-05, |
|
"loss": 1.5215, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.034304365913236406, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 9.848241206030151e-05, |
|
"loss": 1.5074, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.03438970015680167, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 9.847738693467337e-05, |
|
"loss": 1.3304, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.034475034400366936, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 9.847236180904524e-05, |
|
"loss": 1.3198, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.0345603686439322, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 9.846733668341709e-05, |
|
"loss": 1.3872, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.034645702887497465, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 9.846231155778895e-05, |
|
"loss": 1.4763, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.03473103713106273, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 9.845728643216081e-05, |
|
"loss": 1.366, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.034816371374627994, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 9.845226130653267e-05, |
|
"loss": 1.3394, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.03490170561819326, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 9.844723618090452e-05, |
|
"loss": 1.4859, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.03498703986175852, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.84422110552764e-05, |
|
"loss": 1.4893, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.03507237410532379, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 9.843718592964824e-05, |
|
"loss": 1.4897, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.03515770834888905, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 9.84321608040201e-05, |
|
"loss": 1.4898, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.03524304259245432, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 9.842713567839197e-05, |
|
"loss": 1.2922, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.03532837683601958, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.842211055276383e-05, |
|
"loss": 1.4774, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.035413711079584846, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 9.841708542713568e-05, |
|
"loss": 1.3156, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.03549904532315011, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 9.841206030150754e-05, |
|
"loss": 1.219, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.035584379566715375, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 9.84070351758794e-05, |
|
"loss": 1.346, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.03566971381028064, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 9.840201005025126e-05, |
|
"loss": 1.3559, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.035755048053845905, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 9.839698492462313e-05, |
|
"loss": 1.3243, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.03584038229741117, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 9.839195979899497e-05, |
|
"loss": 1.492, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.035925716540976434, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 9.838693467336684e-05, |
|
"loss": 1.324, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.036011050784541705, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 9.83819095477387e-05, |
|
"loss": 1.4559, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.03609638502810697, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.837688442211056e-05, |
|
"loss": 1.2916, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.036181719271672234, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 9.837185929648241e-05, |
|
"loss": 1.5289, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.0362670535152375, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 9.836683417085427e-05, |
|
"loss": 1.4793, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.036352387758802764, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 9.836180904522613e-05, |
|
"loss": 1.3065, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.03643772200236803, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 9.8356783919598e-05, |
|
"loss": 1.5644, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.03652305624593329, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 9.835175879396984e-05, |
|
"loss": 1.4515, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.03660839048949856, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 9.834673366834172e-05, |
|
"loss": 1.3688, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.03669372473306382, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 9.834170854271357e-05, |
|
"loss": 1.4453, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.036779058976629087, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 9.833668341708543e-05, |
|
"loss": 1.4015, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.03686439322019435, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 9.833165829145729e-05, |
|
"loss": 1.3695, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.036949727463759616, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 9.832663316582915e-05, |
|
"loss": 1.2991, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.03703506170732488, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 9.8321608040201e-05, |
|
"loss": 1.4742, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.037120395950890145, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 9.831658291457288e-05, |
|
"loss": 1.3426, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.03720573019445541, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 9.831155778894473e-05, |
|
"loss": 1.3282, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.037291064438020674, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.830653266331659e-05, |
|
"loss": 1.4286, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.03737639868158594, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 9.830150753768844e-05, |
|
"loss": 1.5547, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.0374617329251512, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 9.829648241206031e-05, |
|
"loss": 1.3978, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.03754706716871647, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 9.829145728643216e-05, |
|
"loss": 1.4236, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.03763240141228173, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 9.828643216080402e-05, |
|
"loss": 1.3891, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.037717735655847, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 9.828140703517589e-05, |
|
"loss": 1.5679, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.03780306989941226, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 9.827638190954775e-05, |
|
"loss": 1.377, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.037888404142977526, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 9.82713567839196e-05, |
|
"loss": 1.4544, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.03797373838654279, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 9.826633165829147e-05, |
|
"loss": 1.4464, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.038059072630108055, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 9.826130653266332e-05, |
|
"loss": 1.3857, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.03814440687367332, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 9.825628140703518e-05, |
|
"loss": 1.4554, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.038229741117238585, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 9.825125628140704e-05, |
|
"loss": 1.4405, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.03831507536080385, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 9.82462311557789e-05, |
|
"loss": 1.4376, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.038400409604369114, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 9.824120603015075e-05, |
|
"loss": 1.5418, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.03848574384793438, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 9.823618090452262e-05, |
|
"loss": 1.3599, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.03857107809149964, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 9.823115577889448e-05, |
|
"loss": 1.2787, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.03865641233506491, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 9.822613065326634e-05, |
|
"loss": 1.3217, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.03874174657863017, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 9.822110552763819e-05, |
|
"loss": 1.2746, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.03882708082219544, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 9.821608040201005e-05, |
|
"loss": 1.3369, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.0389124150657607, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 9.821105527638191e-05, |
|
"loss": 1.4238, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.038997749309325966, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 9.820603015075378e-05, |
|
"loss": 1.4062, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.03908308355289123, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 9.820100502512564e-05, |
|
"loss": 1.5235, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.039168417796456495, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 9.819597989949749e-05, |
|
"loss": 1.411, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.03925375204002176, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 9.819095477386935e-05, |
|
"loss": 1.3977, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.039339086283587024, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.818592964824121e-05, |
|
"loss": 1.5666, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.03942442052715229, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 9.818090452261307e-05, |
|
"loss": 1.4605, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.03950975477071755, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 9.817587939698492e-05, |
|
"loss": 1.4669, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.03959508901428282, |
|
"grad_norm": 0.2578125, |
|
"learning_rate": 9.81708542713568e-05, |
|
"loss": 2.2431, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.03968042325784808, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 9.816582914572864e-05, |
|
"loss": 1.3993, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.03976575750141335, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.81608040201005e-05, |
|
"loss": 1.3912, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.03985109174497861, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 9.815577889447236e-05, |
|
"loss": 1.3504, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.039936425988543876, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 9.815075376884423e-05, |
|
"loss": 1.2783, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.04002176023210914, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 9.814572864321608e-05, |
|
"loss": 1.4368, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.040107094475674405, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 9.814070351758794e-05, |
|
"loss": 1.2483, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.04019242871923967, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 9.81356783919598e-05, |
|
"loss": 1.2745, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.040277762962804935, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 9.813065326633167e-05, |
|
"loss": 1.1939, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.0403630972063702, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 9.812562814070351e-05, |
|
"loss": 1.3728, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.040448431449935464, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.812060301507539e-05, |
|
"loss": 1.4527, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.04053376569350073, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 9.811557788944724e-05, |
|
"loss": 1.5409, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.04061909993706599, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 9.81105527638191e-05, |
|
"loss": 1.4426, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.04070443418063126, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 9.810552763819096e-05, |
|
"loss": 1.3423, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.04078976842419652, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 9.810050251256282e-05, |
|
"loss": 1.4133, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.04087510266776179, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 9.809547738693467e-05, |
|
"loss": 1.3434, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.04096043691132705, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 9.809045226130655e-05, |
|
"loss": 1.5089, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.041045771154892316, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 9.80854271356784e-05, |
|
"loss": 1.185, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.04113110539845758, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 9.808040201005026e-05, |
|
"loss": 1.4114, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.041216439642022845, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 9.807537688442211e-05, |
|
"loss": 1.2592, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.04130177388558811, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 9.807035175879398e-05, |
|
"loss": 1.3393, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.041387108129153374, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.806532663316583e-05, |
|
"loss": 1.5252, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.04147244237271864, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 9.806030150753769e-05, |
|
"loss": 1.5749, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.04155777661628391, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 9.805527638190956e-05, |
|
"loss": 1.3505, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.041643110859849175, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 9.805025125628142e-05, |
|
"loss": 1.3747, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.04172844510341444, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 9.804522613065327e-05, |
|
"loss": 1.393, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.041813779346979704, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 9.804020100502513e-05, |
|
"loss": 1.3142, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.04189911359054497, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 9.803517587939699e-05, |
|
"loss": 1.2547, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.04198444783411023, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 9.803015075376885e-05, |
|
"loss": 1.3805, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.0420697820776755, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 9.802512562814071e-05, |
|
"loss": 1.4887, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.04215511632124076, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.802010050251256e-05, |
|
"loss": 1.5492, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.04224045056480603, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 9.801507537688442e-05, |
|
"loss": 1.4367, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.04232578480837129, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 9.801005025125629e-05, |
|
"loss": 1.3598, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.042411119051936556, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 9.800502512562815e-05, |
|
"loss": 1.5329, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.04249645329550182, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 9.8e-05, |
|
"loss": 1.3892, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.042581787539067086, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 9.799497487437186e-05, |
|
"loss": 1.4771, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.04266712178263235, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 9.798994974874372e-05, |
|
"loss": 1.4531, |
|
"step": 500 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 20000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.29157249892352e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|