|
{ |
|
"best_metric": 0.6085956692695618, |
|
"best_model_checkpoint": "outputs/checkpoint-544", |
|
"epoch": 1.0055452865064696, |
|
"eval_steps": 136, |
|
"global_step": 544, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0018484288354898336, |
|
"grad_norm": 0.466289758682251, |
|
"learning_rate": 0.001, |
|
"loss": 1.4884, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0036968576709796672, |
|
"grad_norm": 0.15260683000087738, |
|
"learning_rate": 0.001, |
|
"loss": 1.2641, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005545286506469501, |
|
"grad_norm": 2.4310109615325928, |
|
"learning_rate": 0.001, |
|
"loss": 0.8826, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0073937153419593345, |
|
"grad_norm": 0.3376069664955139, |
|
"learning_rate": 0.001, |
|
"loss": 0.9718, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.009242144177449169, |
|
"grad_norm": 0.5089592337608337, |
|
"learning_rate": 0.001, |
|
"loss": 1.0333, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.011090573012939002, |
|
"grad_norm": 0.7866702079772949, |
|
"learning_rate": 0.001, |
|
"loss": 0.7162, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.012939001848428836, |
|
"grad_norm": 0.2517491579055786, |
|
"learning_rate": 0.001, |
|
"loss": 0.7631, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.014787430683918669, |
|
"grad_norm": 0.306551992893219, |
|
"learning_rate": 0.001, |
|
"loss": 0.6545, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0166358595194085, |
|
"grad_norm": 0.32659947872161865, |
|
"learning_rate": 0.001, |
|
"loss": 0.5165, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.018484288354898338, |
|
"grad_norm": 0.30095258355140686, |
|
"learning_rate": 0.001, |
|
"loss": 0.6372, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02033271719038817, |
|
"grad_norm": 0.33985891938209534, |
|
"learning_rate": 0.001, |
|
"loss": 0.8448, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.022181146025878003, |
|
"grad_norm": 0.42997440695762634, |
|
"learning_rate": 0.001, |
|
"loss": 0.8519, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.024029574861367836, |
|
"grad_norm": 0.4363529682159424, |
|
"learning_rate": 0.001, |
|
"loss": 0.7915, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.025878003696857672, |
|
"grad_norm": 0.2837740182876587, |
|
"learning_rate": 0.001, |
|
"loss": 0.5573, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.027726432532347505, |
|
"grad_norm": 0.33749231696128845, |
|
"learning_rate": 0.001, |
|
"loss": 0.6843, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.029574861367837338, |
|
"grad_norm": 0.6295365691184998, |
|
"learning_rate": 0.001, |
|
"loss": 0.6791, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.031423290203327174, |
|
"grad_norm": 0.2831462323665619, |
|
"learning_rate": 0.001, |
|
"loss": 0.6832, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.033271719038817, |
|
"grad_norm": 0.5176781415939331, |
|
"learning_rate": 0.001, |
|
"loss": 0.4944, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03512014787430684, |
|
"grad_norm": 0.5044294595718384, |
|
"learning_rate": 0.001, |
|
"loss": 0.8668, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.036968576709796676, |
|
"grad_norm": 0.27560847997665405, |
|
"learning_rate": 0.001, |
|
"loss": 0.697, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.038817005545286505, |
|
"grad_norm": 2.1971335411071777, |
|
"learning_rate": 0.001, |
|
"loss": 0.6876, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04066543438077634, |
|
"grad_norm": 0.4692804217338562, |
|
"learning_rate": 0.001, |
|
"loss": 0.6055, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04251386321626617, |
|
"grad_norm": 1.2793192863464355, |
|
"learning_rate": 0.001, |
|
"loss": 0.8302, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04436229205175601, |
|
"grad_norm": 0.7404688000679016, |
|
"learning_rate": 0.001, |
|
"loss": 0.4546, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04621072088724584, |
|
"grad_norm": 0.2830832600593567, |
|
"learning_rate": 0.001, |
|
"loss": 0.6068, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04805914972273567, |
|
"grad_norm": 0.3706754744052887, |
|
"learning_rate": 0.001, |
|
"loss": 0.5629, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04990757855822551, |
|
"grad_norm": 0.5734543800354004, |
|
"learning_rate": 0.001, |
|
"loss": 0.7772, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.051756007393715345, |
|
"grad_norm": 0.4173612594604492, |
|
"learning_rate": 0.001, |
|
"loss": 0.7023, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.053604436229205174, |
|
"grad_norm": 0.35146352648735046, |
|
"learning_rate": 0.001, |
|
"loss": 0.7729, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05545286506469501, |
|
"grad_norm": 0.3884046673774719, |
|
"learning_rate": 0.001, |
|
"loss": 0.7233, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05730129390018484, |
|
"grad_norm": 0.34868955612182617, |
|
"learning_rate": 0.001, |
|
"loss": 0.5834, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.059149722735674676, |
|
"grad_norm": 1.9348665475845337, |
|
"learning_rate": 0.001, |
|
"loss": 0.6, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06099815157116451, |
|
"grad_norm": 0.49200278520584106, |
|
"learning_rate": 0.001, |
|
"loss": 0.4783, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06284658040665435, |
|
"grad_norm": 0.4063400626182556, |
|
"learning_rate": 0.001, |
|
"loss": 0.5797, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06469500924214418, |
|
"grad_norm": 0.4601685702800751, |
|
"learning_rate": 0.001, |
|
"loss": 0.5789, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.066543438077634, |
|
"grad_norm": 0.5618835687637329, |
|
"learning_rate": 0.001, |
|
"loss": 0.7446, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06839186691312385, |
|
"grad_norm": 0.5381736755371094, |
|
"learning_rate": 0.001, |
|
"loss": 1.1037, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.07024029574861368, |
|
"grad_norm": 0.579923689365387, |
|
"learning_rate": 0.001, |
|
"loss": 0.5367, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07208872458410351, |
|
"grad_norm": 0.4076865613460541, |
|
"learning_rate": 0.001, |
|
"loss": 0.6835, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07393715341959335, |
|
"grad_norm": 0.3602221608161926, |
|
"learning_rate": 0.001, |
|
"loss": 0.5843, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07578558225508318, |
|
"grad_norm": 0.4595842957496643, |
|
"learning_rate": 0.001, |
|
"loss": 0.8391, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07763401109057301, |
|
"grad_norm": 1.456554651260376, |
|
"learning_rate": 0.001, |
|
"loss": 0.8826, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07948243992606285, |
|
"grad_norm": 0.40481042861938477, |
|
"learning_rate": 0.001, |
|
"loss": 0.6189, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.08133086876155268, |
|
"grad_norm": 2.4103527069091797, |
|
"learning_rate": 0.001, |
|
"loss": 0.7887, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08317929759704251, |
|
"grad_norm": 0.42522993683815, |
|
"learning_rate": 0.001, |
|
"loss": 0.6804, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08502772643253234, |
|
"grad_norm": 0.5652860999107361, |
|
"learning_rate": 0.001, |
|
"loss": 0.5217, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08687615526802218, |
|
"grad_norm": 0.6977353096008301, |
|
"learning_rate": 0.001, |
|
"loss": 0.7222, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08872458410351201, |
|
"grad_norm": 0.49619829654693604, |
|
"learning_rate": 0.001, |
|
"loss": 0.8871, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.09057301293900184, |
|
"grad_norm": 0.3837599456310272, |
|
"learning_rate": 0.001, |
|
"loss": 0.745, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09242144177449169, |
|
"grad_norm": 0.35798409581184387, |
|
"learning_rate": 0.001, |
|
"loss": 0.7044, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09426987060998152, |
|
"grad_norm": 0.3777969777584076, |
|
"learning_rate": 0.001, |
|
"loss": 0.5723, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09611829944547134, |
|
"grad_norm": 0.35432809591293335, |
|
"learning_rate": 0.001, |
|
"loss": 0.6851, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.09796672828096119, |
|
"grad_norm": 0.4954202175140381, |
|
"learning_rate": 0.001, |
|
"loss": 0.8028, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.09981515711645102, |
|
"grad_norm": 0.3448912501335144, |
|
"learning_rate": 0.001, |
|
"loss": 0.6723, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.10166358595194085, |
|
"grad_norm": 0.4123232960700989, |
|
"learning_rate": 0.001, |
|
"loss": 0.6049, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.10351201478743069, |
|
"grad_norm": 0.37372708320617676, |
|
"learning_rate": 0.001, |
|
"loss": 0.5747, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10536044362292052, |
|
"grad_norm": 0.4807383120059967, |
|
"learning_rate": 0.001, |
|
"loss": 0.6363, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.10720887245841035, |
|
"grad_norm": 0.8781284093856812, |
|
"learning_rate": 0.001, |
|
"loss": 2.1231, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.10905730129390019, |
|
"grad_norm": 0.5836363434791565, |
|
"learning_rate": 0.001, |
|
"loss": 0.7727, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.11090573012939002, |
|
"grad_norm": 0.41094064712524414, |
|
"learning_rate": 0.001, |
|
"loss": 0.6618, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11275415896487985, |
|
"grad_norm": 0.4478781521320343, |
|
"learning_rate": 0.001, |
|
"loss": 0.73, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.11460258780036968, |
|
"grad_norm": 0.36589401960372925, |
|
"learning_rate": 0.001, |
|
"loss": 0.5947, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11645101663585952, |
|
"grad_norm": 0.5363914966583252, |
|
"learning_rate": 0.001, |
|
"loss": 0.5481, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11829944547134935, |
|
"grad_norm": 0.42670416831970215, |
|
"learning_rate": 0.001, |
|
"loss": 0.508, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.12014787430683918, |
|
"grad_norm": 0.7648505568504333, |
|
"learning_rate": 0.001, |
|
"loss": 0.6343, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.12199630314232902, |
|
"grad_norm": 0.4689379334449768, |
|
"learning_rate": 0.001, |
|
"loss": 0.7469, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12384473197781885, |
|
"grad_norm": 0.44720444083213806, |
|
"learning_rate": 0.001, |
|
"loss": 0.6952, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.1256931608133087, |
|
"grad_norm": 0.35768693685531616, |
|
"learning_rate": 0.001, |
|
"loss": 0.432, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12754158964879853, |
|
"grad_norm": 0.4105698764324188, |
|
"learning_rate": 0.001, |
|
"loss": 0.6861, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12939001848428835, |
|
"grad_norm": 0.38207560777664185, |
|
"learning_rate": 0.001, |
|
"loss": 0.5908, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.13123844731977818, |
|
"grad_norm": 0.46701478958129883, |
|
"learning_rate": 0.001, |
|
"loss": 0.6832, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.133086876155268, |
|
"grad_norm": 0.501502513885498, |
|
"learning_rate": 0.001, |
|
"loss": 0.7041, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13493530499075784, |
|
"grad_norm": 0.3278079926967621, |
|
"learning_rate": 0.001, |
|
"loss": 0.4946, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.1367837338262477, |
|
"grad_norm": 0.4573265612125397, |
|
"learning_rate": 0.001, |
|
"loss": 0.6106, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13863216266173753, |
|
"grad_norm": 0.37268367409706116, |
|
"learning_rate": 0.001, |
|
"loss": 0.4687, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.14048059149722736, |
|
"grad_norm": 0.41542476415634155, |
|
"learning_rate": 0.001, |
|
"loss": 0.7345, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.1423290203327172, |
|
"grad_norm": 0.36338284611701965, |
|
"learning_rate": 0.001, |
|
"loss": 0.5258, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.14417744916820702, |
|
"grad_norm": 0.35523292422294617, |
|
"learning_rate": 0.001, |
|
"loss": 0.5006, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.14602587800369685, |
|
"grad_norm": 0.7458345293998718, |
|
"learning_rate": 0.001, |
|
"loss": 1.6438, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1478743068391867, |
|
"grad_norm": 0.4105502963066101, |
|
"learning_rate": 0.001, |
|
"loss": 0.5162, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14972273567467653, |
|
"grad_norm": 0.4603978395462036, |
|
"learning_rate": 0.001, |
|
"loss": 0.7375, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.15157116451016636, |
|
"grad_norm": 0.3389419913291931, |
|
"learning_rate": 0.001, |
|
"loss": 0.4315, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1534195933456562, |
|
"grad_norm": 0.4641773998737335, |
|
"learning_rate": 0.001, |
|
"loss": 0.5741, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.15526802218114602, |
|
"grad_norm": 0.7335869073867798, |
|
"learning_rate": 0.001, |
|
"loss": 0.6726, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.15711645101663585, |
|
"grad_norm": 0.45697247982025146, |
|
"learning_rate": 0.001, |
|
"loss": 0.6194, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1589648798521257, |
|
"grad_norm": 0.5353591442108154, |
|
"learning_rate": 0.001, |
|
"loss": 0.6411, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.16081330868761554, |
|
"grad_norm": 0.5396345257759094, |
|
"learning_rate": 0.001, |
|
"loss": 0.6453, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.16266173752310537, |
|
"grad_norm": 0.5480281114578247, |
|
"learning_rate": 0.001, |
|
"loss": 0.6838, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.1645101663585952, |
|
"grad_norm": 0.455941379070282, |
|
"learning_rate": 0.001, |
|
"loss": 0.58, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.16635859519408502, |
|
"grad_norm": 0.8435553312301636, |
|
"learning_rate": 0.001, |
|
"loss": 0.8912, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.16820702402957485, |
|
"grad_norm": 0.44801416993141174, |
|
"learning_rate": 0.001, |
|
"loss": 0.6285, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.17005545286506468, |
|
"grad_norm": 0.4071008563041687, |
|
"learning_rate": 0.001, |
|
"loss": 0.6715, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.17190388170055454, |
|
"grad_norm": 0.41972634196281433, |
|
"learning_rate": 0.001, |
|
"loss": 0.453, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.17375231053604437, |
|
"grad_norm": 0.60628741979599, |
|
"learning_rate": 0.001, |
|
"loss": 0.7723, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.1756007393715342, |
|
"grad_norm": 0.4178489148616791, |
|
"learning_rate": 0.001, |
|
"loss": 0.4684, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.17744916820702403, |
|
"grad_norm": 0.5033894777297974, |
|
"learning_rate": 0.001, |
|
"loss": 0.6145, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.17929759704251386, |
|
"grad_norm": 0.6618285179138184, |
|
"learning_rate": 0.001, |
|
"loss": 0.9387, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.18114602587800369, |
|
"grad_norm": 0.47585925459861755, |
|
"learning_rate": 0.001, |
|
"loss": 0.7797, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.18299445471349354, |
|
"grad_norm": 0.41978687047958374, |
|
"learning_rate": 0.001, |
|
"loss": 0.5475, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.18484288354898337, |
|
"grad_norm": 0.5080726742744446, |
|
"learning_rate": 0.001, |
|
"loss": 0.5243, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1866913123844732, |
|
"grad_norm": 0.38001152873039246, |
|
"learning_rate": 0.001, |
|
"loss": 0.532, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.18853974121996303, |
|
"grad_norm": 0.5791141986846924, |
|
"learning_rate": 0.001, |
|
"loss": 0.7756, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.19038817005545286, |
|
"grad_norm": 0.42902910709381104, |
|
"learning_rate": 0.001, |
|
"loss": 0.6425, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1922365988909427, |
|
"grad_norm": 0.5157350301742554, |
|
"learning_rate": 0.001, |
|
"loss": 0.819, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.19408502772643252, |
|
"grad_norm": 12.421481132507324, |
|
"learning_rate": 0.001, |
|
"loss": 0.8929, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.19593345656192238, |
|
"grad_norm": 0.5132973790168762, |
|
"learning_rate": 0.001, |
|
"loss": 0.6348, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1977818853974122, |
|
"grad_norm": 0.4973335862159729, |
|
"learning_rate": 0.001, |
|
"loss": 0.6071, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.19963031423290203, |
|
"grad_norm": 0.326150119304657, |
|
"learning_rate": 0.001, |
|
"loss": 0.6531, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.20147874306839186, |
|
"grad_norm": 0.4327104985713959, |
|
"learning_rate": 0.001, |
|
"loss": 0.6975, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.2033271719038817, |
|
"grad_norm": 0.5976394414901733, |
|
"learning_rate": 0.001, |
|
"loss": 0.7276, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.20517560073937152, |
|
"grad_norm": 0.35819554328918457, |
|
"learning_rate": 0.001, |
|
"loss": 0.5249, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.20702402957486138, |
|
"grad_norm": 0.35644710063934326, |
|
"learning_rate": 0.001, |
|
"loss": 0.4523, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.2088724584103512, |
|
"grad_norm": 0.45812854170799255, |
|
"learning_rate": 0.001, |
|
"loss": 0.7442, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.21072088724584104, |
|
"grad_norm": 0.5240729451179504, |
|
"learning_rate": 0.001, |
|
"loss": 0.6039, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.21256931608133087, |
|
"grad_norm": 0.6027355790138245, |
|
"learning_rate": 0.001, |
|
"loss": 0.7314, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2144177449168207, |
|
"grad_norm": 0.4197920560836792, |
|
"learning_rate": 0.001, |
|
"loss": 0.644, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.21626617375231053, |
|
"grad_norm": 0.5917842388153076, |
|
"learning_rate": 0.001, |
|
"loss": 0.6872, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.21811460258780038, |
|
"grad_norm": 0.49236243963241577, |
|
"learning_rate": 0.001, |
|
"loss": 0.9002, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2199630314232902, |
|
"grad_norm": 0.6250895857810974, |
|
"learning_rate": 0.001, |
|
"loss": 0.6905, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.22181146025878004, |
|
"grad_norm": 0.5853796005249023, |
|
"learning_rate": 0.001, |
|
"loss": 0.7602, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.22365988909426987, |
|
"grad_norm": 0.5543091297149658, |
|
"learning_rate": 0.001, |
|
"loss": 0.7356, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.2255083179297597, |
|
"grad_norm": 0.4340505003929138, |
|
"learning_rate": 0.001, |
|
"loss": 0.6715, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.22735674676524953, |
|
"grad_norm": 0.4405982792377472, |
|
"learning_rate": 0.001, |
|
"loss": 0.6898, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.22920517560073936, |
|
"grad_norm": 0.5105734467506409, |
|
"learning_rate": 0.001, |
|
"loss": 0.7762, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.23105360443622922, |
|
"grad_norm": 0.4798790216445923, |
|
"learning_rate": 0.001, |
|
"loss": 0.5982, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.23290203327171904, |
|
"grad_norm": 0.4916364252567291, |
|
"learning_rate": 0.001, |
|
"loss": 0.4894, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.23475046210720887, |
|
"grad_norm": 0.5022260546684265, |
|
"learning_rate": 0.001, |
|
"loss": 0.614, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.2365988909426987, |
|
"grad_norm": 0.4792012870311737, |
|
"learning_rate": 0.001, |
|
"loss": 0.6265, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.23844731977818853, |
|
"grad_norm": 0.5663838982582092, |
|
"learning_rate": 0.001, |
|
"loss": 0.5971, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.24029574861367836, |
|
"grad_norm": 0.49800997972488403, |
|
"learning_rate": 0.001, |
|
"loss": 0.6318, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.24214417744916822, |
|
"grad_norm": 0.35220399498939514, |
|
"learning_rate": 0.001, |
|
"loss": 0.3083, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.24399260628465805, |
|
"grad_norm": 0.4706744849681854, |
|
"learning_rate": 0.001, |
|
"loss": 0.6436, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.24584103512014788, |
|
"grad_norm": 0.5163246393203735, |
|
"learning_rate": 0.001, |
|
"loss": 0.7155, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.2476894639556377, |
|
"grad_norm": 0.4994561970233917, |
|
"learning_rate": 0.001, |
|
"loss": 0.8529, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.24953789279112754, |
|
"grad_norm": 0.47701308131217957, |
|
"learning_rate": 0.001, |
|
"loss": 0.6465, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2513863216266174, |
|
"grad_norm": 0.5003355145454407, |
|
"learning_rate": 0.001, |
|
"loss": 0.6787, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.2513863216266174, |
|
"eval_loss": 0.6116308569908142, |
|
"eval_runtime": 37.1894, |
|
"eval_samples_per_second": 1.479, |
|
"eval_steps_per_second": 0.188, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.2532347504621072, |
|
"grad_norm": 0.46851846575737, |
|
"learning_rate": 0.001, |
|
"loss": 0.7461, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.25508317929759705, |
|
"grad_norm": 0.703341007232666, |
|
"learning_rate": 0.001, |
|
"loss": 0.709, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.25693160813308685, |
|
"grad_norm": 0.48569703102111816, |
|
"learning_rate": 0.001, |
|
"loss": 0.7207, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2587800369685767, |
|
"grad_norm": 0.3450978398323059, |
|
"learning_rate": 0.001, |
|
"loss": 0.5599, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.26062846580406657, |
|
"grad_norm": 0.5488552451133728, |
|
"learning_rate": 0.001, |
|
"loss": 0.8029, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.26247689463955637, |
|
"grad_norm": 0.43643897771835327, |
|
"learning_rate": 0.001, |
|
"loss": 0.6259, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.2643253234750462, |
|
"grad_norm": 0.4441094398498535, |
|
"learning_rate": 0.001, |
|
"loss": 0.688, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.266173752310536, |
|
"grad_norm": 0.40471598505973816, |
|
"learning_rate": 0.001, |
|
"loss": 0.666, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.2680221811460259, |
|
"grad_norm": 0.5318337678909302, |
|
"learning_rate": 0.001, |
|
"loss": 0.6774, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2698706099815157, |
|
"grad_norm": 0.3838026225566864, |
|
"learning_rate": 0.001, |
|
"loss": 0.5882, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.27171903881700554, |
|
"grad_norm": 757.2123413085938, |
|
"learning_rate": 0.001, |
|
"loss": 3.2426, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.2735674676524954, |
|
"grad_norm": 0.4157928228378296, |
|
"learning_rate": 0.001, |
|
"loss": 0.5649, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.2754158964879852, |
|
"grad_norm": 0.3751083314418793, |
|
"learning_rate": 0.001, |
|
"loss": 0.5098, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.27726432532347506, |
|
"grad_norm": 0.38269737362861633, |
|
"learning_rate": 0.001, |
|
"loss": 0.6048, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.27911275415896486, |
|
"grad_norm": 0.9310194849967957, |
|
"learning_rate": 0.001, |
|
"loss": 0.5548, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.2809611829944547, |
|
"grad_norm": 0.6662898063659668, |
|
"learning_rate": 0.001, |
|
"loss": 1.0353, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2828096118299446, |
|
"grad_norm": 0.5112884640693665, |
|
"learning_rate": 0.001, |
|
"loss": 0.5875, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.2846580406654344, |
|
"grad_norm": 0.682369589805603, |
|
"learning_rate": 0.001, |
|
"loss": 0.7255, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.28650646950092423, |
|
"grad_norm": 0.37935149669647217, |
|
"learning_rate": 0.001, |
|
"loss": 0.453, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.28835489833641403, |
|
"grad_norm": 0.5815888047218323, |
|
"learning_rate": 0.001, |
|
"loss": 0.6359, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2902033271719039, |
|
"grad_norm": 0.5233355164527893, |
|
"learning_rate": 0.001, |
|
"loss": 0.588, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.2920517560073937, |
|
"grad_norm": 0.5995686650276184, |
|
"learning_rate": 0.001, |
|
"loss": 0.6695, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.29390018484288355, |
|
"grad_norm": 0.5288013815879822, |
|
"learning_rate": 0.001, |
|
"loss": 0.6172, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.2957486136783734, |
|
"grad_norm": 0.4866679310798645, |
|
"learning_rate": 0.001, |
|
"loss": 0.6383, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2975970425138632, |
|
"grad_norm": 0.4867928922176361, |
|
"learning_rate": 0.001, |
|
"loss": 0.6561, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.29944547134935307, |
|
"grad_norm": 85.95575714111328, |
|
"learning_rate": 0.001, |
|
"loss": 7.2015, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.30129390018484287, |
|
"grad_norm": 0.9067592620849609, |
|
"learning_rate": 0.001, |
|
"loss": 0.7286, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.3031423290203327, |
|
"grad_norm": 0.8694595098495483, |
|
"learning_rate": 0.001, |
|
"loss": 0.5795, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.3049907578558225, |
|
"grad_norm": 0.5374184846878052, |
|
"learning_rate": 0.001, |
|
"loss": 0.5078, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3068391866913124, |
|
"grad_norm": 0.42466646432876587, |
|
"learning_rate": 0.001, |
|
"loss": 0.5351, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.30868761552680224, |
|
"grad_norm": 0.707733690738678, |
|
"learning_rate": 0.001, |
|
"loss": 0.8727, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.31053604436229204, |
|
"grad_norm": 0.7297586798667908, |
|
"learning_rate": 0.001, |
|
"loss": 0.6873, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.3123844731977819, |
|
"grad_norm": 0.587944507598877, |
|
"learning_rate": 0.001, |
|
"loss": 2.0879, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3142329020332717, |
|
"grad_norm": 0.9605491161346436, |
|
"learning_rate": 0.001, |
|
"loss": 1.2765, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.31608133086876156, |
|
"grad_norm": 0.5181152820587158, |
|
"learning_rate": 0.001, |
|
"loss": 0.6069, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3179297597042514, |
|
"grad_norm": 2.2382378578186035, |
|
"learning_rate": 0.001, |
|
"loss": 0.7284, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3197781885397412, |
|
"grad_norm": 0.42649027705192566, |
|
"learning_rate": 0.001, |
|
"loss": 0.5089, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.32162661737523107, |
|
"grad_norm": 0.5831953287124634, |
|
"learning_rate": 0.001, |
|
"loss": 0.6019, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.3234750462107209, |
|
"grad_norm": 11.331945419311523, |
|
"learning_rate": 0.001, |
|
"loss": 0.7763, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.32532347504621073, |
|
"grad_norm": 0.4289061725139618, |
|
"learning_rate": 0.001, |
|
"loss": 0.4493, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.32717190388170053, |
|
"grad_norm": 0.7920455932617188, |
|
"learning_rate": 0.001, |
|
"loss": 0.7945, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3290203327171904, |
|
"grad_norm": 1.0137889385223389, |
|
"learning_rate": 0.001, |
|
"loss": 0.7158, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.33086876155268025, |
|
"grad_norm": 1.1164557933807373, |
|
"learning_rate": 0.001, |
|
"loss": 0.7007, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.33271719038817005, |
|
"grad_norm": 0.5898825526237488, |
|
"learning_rate": 0.001, |
|
"loss": 0.7362, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3345656192236599, |
|
"grad_norm": 2144.9521484375, |
|
"learning_rate": 0.001, |
|
"loss": 5.333, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3364140480591497, |
|
"grad_norm": 1.0912034511566162, |
|
"learning_rate": 0.001, |
|
"loss": 0.8123, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.33826247689463956, |
|
"grad_norm": 0.5837512612342834, |
|
"learning_rate": 0.001, |
|
"loss": 0.738, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.34011090573012936, |
|
"grad_norm": 0.5561927556991577, |
|
"learning_rate": 0.001, |
|
"loss": 0.5875, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.3419593345656192, |
|
"grad_norm": 3.1537868976593018, |
|
"learning_rate": 0.001, |
|
"loss": 0.8469, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.3438077634011091, |
|
"grad_norm": 0.751020610332489, |
|
"learning_rate": 0.001, |
|
"loss": 0.6567, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.3456561922365989, |
|
"grad_norm": 2.071126699447632, |
|
"learning_rate": 0.001, |
|
"loss": 0.7666, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.34750462107208874, |
|
"grad_norm": 0.8807157874107361, |
|
"learning_rate": 0.001, |
|
"loss": 1.0371, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.34935304990757854, |
|
"grad_norm": 2.8368020057678223, |
|
"learning_rate": 0.001, |
|
"loss": 0.9064, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.3512014787430684, |
|
"grad_norm": 16.864635467529297, |
|
"learning_rate": 0.001, |
|
"loss": 0.9614, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.35304990757855825, |
|
"grad_norm": 0.7199917435646057, |
|
"learning_rate": 0.001, |
|
"loss": 0.7248, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.35489833641404805, |
|
"grad_norm": 0.6968075633049011, |
|
"learning_rate": 0.001, |
|
"loss": 0.6518, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.3567467652495379, |
|
"grad_norm": 0.7219743728637695, |
|
"learning_rate": 0.001, |
|
"loss": 0.7583, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.3585951940850277, |
|
"grad_norm": 0.5318032503128052, |
|
"learning_rate": 0.001, |
|
"loss": 0.638, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.36044362292051757, |
|
"grad_norm": 0.740088701248169, |
|
"learning_rate": 0.001, |
|
"loss": 0.6744, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.36229205175600737, |
|
"grad_norm": 0.743021547794342, |
|
"learning_rate": 0.001, |
|
"loss": 0.6755, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.36414048059149723, |
|
"grad_norm": 4.2798004150390625, |
|
"learning_rate": 0.001, |
|
"loss": 0.6902, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.3659889094269871, |
|
"grad_norm": 0.7763252854347229, |
|
"learning_rate": 0.001, |
|
"loss": 0.7363, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.3678373382624769, |
|
"grad_norm": 0.8664296865463257, |
|
"learning_rate": 0.001, |
|
"loss": 0.9194, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.36968576709796674, |
|
"grad_norm": 1.4949946403503418, |
|
"learning_rate": 0.001, |
|
"loss": 0.6825, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.37153419593345655, |
|
"grad_norm": 0.47144782543182373, |
|
"learning_rate": 0.001, |
|
"loss": 0.4892, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.3733826247689464, |
|
"grad_norm": 2.078660249710083, |
|
"learning_rate": 0.001, |
|
"loss": 0.9382, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.3752310536044362, |
|
"grad_norm": 0.8569644689559937, |
|
"learning_rate": 0.001, |
|
"loss": 0.6567, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.37707948243992606, |
|
"grad_norm": 0.7167682647705078, |
|
"learning_rate": 0.001, |
|
"loss": 0.8144, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.3789279112754159, |
|
"grad_norm": 2.2069590091705322, |
|
"learning_rate": 0.001, |
|
"loss": 0.8378, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3807763401109057, |
|
"grad_norm": 4.755973815917969, |
|
"learning_rate": 0.001, |
|
"loss": 0.6908, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.3826247689463956, |
|
"grad_norm": 0.6981136202812195, |
|
"learning_rate": 0.001, |
|
"loss": 0.5822, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.3844731977818854, |
|
"grad_norm": 0.7765632271766663, |
|
"learning_rate": 0.001, |
|
"loss": 0.8156, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.38632162661737524, |
|
"grad_norm": 2.0891387462615967, |
|
"learning_rate": 0.001, |
|
"loss": 0.558, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.38817005545286504, |
|
"grad_norm": 0.7609770894050598, |
|
"learning_rate": 0.001, |
|
"loss": 0.6879, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.3900184842883549, |
|
"grad_norm": 0.6121675372123718, |
|
"learning_rate": 0.001, |
|
"loss": 0.8023, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.39186691312384475, |
|
"grad_norm": 1.2764517068862915, |
|
"learning_rate": 0.001, |
|
"loss": 0.671, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.39371534195933455, |
|
"grad_norm": 0.9839686155319214, |
|
"learning_rate": 0.001, |
|
"loss": 0.6025, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.3955637707948244, |
|
"grad_norm": 0.8942749500274658, |
|
"learning_rate": 0.001, |
|
"loss": 0.7837, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.3974121996303142, |
|
"grad_norm": 8.554291725158691, |
|
"learning_rate": 0.001, |
|
"loss": 0.9271, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.39926062846580407, |
|
"grad_norm": 36.47178649902344, |
|
"learning_rate": 0.001, |
|
"loss": 1.0102, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.4011090573012939, |
|
"grad_norm": 4.423219203948975, |
|
"learning_rate": 0.001, |
|
"loss": 0.614, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.4029574861367837, |
|
"grad_norm": 0.673527181148529, |
|
"learning_rate": 0.001, |
|
"loss": 0.6804, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.4048059149722736, |
|
"grad_norm": 0.8505294322967529, |
|
"learning_rate": 0.001, |
|
"loss": 0.6223, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.4066543438077634, |
|
"grad_norm": 0.6489795446395874, |
|
"learning_rate": 0.001, |
|
"loss": 0.6429, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.40850277264325324, |
|
"grad_norm": 0.8948644399642944, |
|
"learning_rate": 0.001, |
|
"loss": 1.4723, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.41035120147874304, |
|
"grad_norm": 210.18814086914062, |
|
"learning_rate": 0.001, |
|
"loss": 1.6274, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.4121996303142329, |
|
"grad_norm": 23.37410545349121, |
|
"learning_rate": 0.001, |
|
"loss": 0.6821, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.41404805914972276, |
|
"grad_norm": 25.335840225219727, |
|
"learning_rate": 0.001, |
|
"loss": 1.5518, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.41589648798521256, |
|
"grad_norm": 1.2805469036102295, |
|
"learning_rate": 0.001, |
|
"loss": 0.755, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4177449168207024, |
|
"grad_norm": 3.5078518390655518, |
|
"learning_rate": 0.001, |
|
"loss": 0.765, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.4195933456561922, |
|
"grad_norm": 2.665682315826416, |
|
"learning_rate": 0.001, |
|
"loss": 1.0047, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.4214417744916821, |
|
"grad_norm": 0.8399984240531921, |
|
"learning_rate": 0.001, |
|
"loss": 0.6717, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.4232902033271719, |
|
"grad_norm": 0.5785517692565918, |
|
"learning_rate": 0.001, |
|
"loss": 0.6277, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.42513863216266173, |
|
"grad_norm": 0.7574679851531982, |
|
"learning_rate": 0.001, |
|
"loss": 0.6001, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4269870609981516, |
|
"grad_norm": 1.1060686111450195, |
|
"learning_rate": 0.001, |
|
"loss": 0.4657, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.4288354898336414, |
|
"grad_norm": 0.8456437587738037, |
|
"learning_rate": 0.001, |
|
"loss": 0.6504, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.43068391866913125, |
|
"grad_norm": 1.1631237268447876, |
|
"learning_rate": 0.001, |
|
"loss": 0.6474, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.43253234750462105, |
|
"grad_norm": 0.8575699925422668, |
|
"learning_rate": 0.001, |
|
"loss": 0.8597, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4343807763401109, |
|
"grad_norm": 1.1094099283218384, |
|
"learning_rate": 0.001, |
|
"loss": 0.6128, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.43622920517560076, |
|
"grad_norm": 0.765487015247345, |
|
"learning_rate": 0.001, |
|
"loss": 0.685, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.43807763401109057, |
|
"grad_norm": 0.713035523891449, |
|
"learning_rate": 0.001, |
|
"loss": 0.6375, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.4399260628465804, |
|
"grad_norm": 1.1346995830535889, |
|
"learning_rate": 0.001, |
|
"loss": 0.8121, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.4417744916820702, |
|
"grad_norm": 10.435094833374023, |
|
"learning_rate": 0.001, |
|
"loss": 0.7499, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.4436229205175601, |
|
"grad_norm": 1.442493200302124, |
|
"learning_rate": 0.001, |
|
"loss": 0.6899, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4454713493530499, |
|
"grad_norm": 1.5144755840301514, |
|
"learning_rate": 0.001, |
|
"loss": 0.566, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.44731977818853974, |
|
"grad_norm": 1.2616835832595825, |
|
"learning_rate": 0.001, |
|
"loss": 0.6848, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.4491682070240296, |
|
"grad_norm": 1.6803869009017944, |
|
"learning_rate": 0.001, |
|
"loss": 1.9734, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.4510166358595194, |
|
"grad_norm": 1.6784708499908447, |
|
"learning_rate": 0.001, |
|
"loss": 0.8113, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.45286506469500926, |
|
"grad_norm": 0.6301059126853943, |
|
"learning_rate": 0.001, |
|
"loss": 0.5922, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.45471349353049906, |
|
"grad_norm": 2.435321807861328, |
|
"learning_rate": 0.001, |
|
"loss": 0.7648, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.4565619223659889, |
|
"grad_norm": 0.9158065915107727, |
|
"learning_rate": 0.001, |
|
"loss": 0.6473, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.4584103512014787, |
|
"grad_norm": 1.1794534921646118, |
|
"learning_rate": 0.001, |
|
"loss": 0.6715, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.4602587800369686, |
|
"grad_norm": 5.524970054626465, |
|
"learning_rate": 0.001, |
|
"loss": 0.7828, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.46210720887245843, |
|
"grad_norm": 1.7100472450256348, |
|
"learning_rate": 0.001, |
|
"loss": 0.8253, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.46395563770794823, |
|
"grad_norm": 43.962615966796875, |
|
"learning_rate": 0.001, |
|
"loss": 7.6281, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.4658040665434381, |
|
"grad_norm": 8.905851364135742, |
|
"learning_rate": 0.001, |
|
"loss": 1.2425, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.4676524953789279, |
|
"grad_norm": 1.4537878036499023, |
|
"learning_rate": 0.001, |
|
"loss": 0.6607, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.46950092421441775, |
|
"grad_norm": 1.0906081199645996, |
|
"learning_rate": 0.001, |
|
"loss": 0.7711, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.4713493530499076, |
|
"grad_norm": 4.024237155914307, |
|
"learning_rate": 0.001, |
|
"loss": 0.8851, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.4731977818853974, |
|
"grad_norm": 0.7822116017341614, |
|
"learning_rate": 0.001, |
|
"loss": 0.5662, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.47504621072088726, |
|
"grad_norm": 1.1747384071350098, |
|
"learning_rate": 0.001, |
|
"loss": 0.6728, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.47689463955637706, |
|
"grad_norm": 1.1935127973556519, |
|
"learning_rate": 0.001, |
|
"loss": 0.5782, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.4787430683918669, |
|
"grad_norm": 0.5861594080924988, |
|
"learning_rate": 0.001, |
|
"loss": 0.6337, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.4805914972273567, |
|
"grad_norm": 1.6729638576507568, |
|
"learning_rate": 0.001, |
|
"loss": 0.7732, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4824399260628466, |
|
"grad_norm": 6.095468044281006, |
|
"learning_rate": 0.001, |
|
"loss": 0.8832, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.48428835489833644, |
|
"grad_norm": 366.1634521484375, |
|
"learning_rate": 0.001, |
|
"loss": 1.8354, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.48613678373382624, |
|
"grad_norm": 119.24848175048828, |
|
"learning_rate": 0.001, |
|
"loss": 2.4536, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.4879852125693161, |
|
"grad_norm": 1.6337554454803467, |
|
"learning_rate": 0.001, |
|
"loss": 0.7948, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.4898336414048059, |
|
"grad_norm": 0.7200238704681396, |
|
"learning_rate": 0.001, |
|
"loss": 0.5746, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.49168207024029575, |
|
"grad_norm": 0.9898024201393127, |
|
"learning_rate": 0.001, |
|
"loss": 0.9548, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.49353049907578556, |
|
"grad_norm": 48.905426025390625, |
|
"learning_rate": 0.001, |
|
"loss": 2.7047, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.4953789279112754, |
|
"grad_norm": 2.0077579021453857, |
|
"learning_rate": 0.001, |
|
"loss": 0.9014, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.49722735674676527, |
|
"grad_norm": 0.7598840594291687, |
|
"learning_rate": 0.001, |
|
"loss": 0.6205, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.49907578558225507, |
|
"grad_norm": 0.6317667365074158, |
|
"learning_rate": 0.001, |
|
"loss": 0.6323, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5009242144177449, |
|
"grad_norm": 1.1243677139282227, |
|
"learning_rate": 0.001, |
|
"loss": 0.699, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.5027726432532348, |
|
"grad_norm": 0.7714710235595703, |
|
"learning_rate": 0.001, |
|
"loss": 0.6488, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.5027726432532348, |
|
"eval_loss": 2.399911403656006, |
|
"eval_runtime": 36.8979, |
|
"eval_samples_per_second": 1.491, |
|
"eval_steps_per_second": 0.19, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.5046210720887245, |
|
"grad_norm": 5.736697196960449, |
|
"learning_rate": 0.001, |
|
"loss": 0.7481, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.5064695009242144, |
|
"grad_norm": 0.6620627641677856, |
|
"learning_rate": 0.001, |
|
"loss": 0.6781, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.5083179297597042, |
|
"grad_norm": 1.1013222932815552, |
|
"learning_rate": 0.001, |
|
"loss": 0.8447, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5101663585951941, |
|
"grad_norm": 0.6351478099822998, |
|
"learning_rate": 0.001, |
|
"loss": 0.486, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.512014787430684, |
|
"grad_norm": 0.7216677069664001, |
|
"learning_rate": 0.001, |
|
"loss": 0.6427, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.5138632162661737, |
|
"grad_norm": 0.9114554524421692, |
|
"learning_rate": 0.001, |
|
"loss": 2.2048, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.5157116451016636, |
|
"grad_norm": 0.9319707155227661, |
|
"learning_rate": 0.001, |
|
"loss": 0.6169, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.5175600739371534, |
|
"grad_norm": 1.0675102472305298, |
|
"learning_rate": 0.001, |
|
"loss": 0.8317, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5194085027726433, |
|
"grad_norm": 1.91627037525177, |
|
"learning_rate": 0.001, |
|
"loss": 0.7027, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5212569316081331, |
|
"grad_norm": 1.4550344944000244, |
|
"learning_rate": 0.001, |
|
"loss": 0.7876, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.5231053604436229, |
|
"grad_norm": 1.139053463935852, |
|
"learning_rate": 0.001, |
|
"loss": 2.3306, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.5249537892791127, |
|
"grad_norm": 1.3426302671432495, |
|
"learning_rate": 0.001, |
|
"loss": 0.4473, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5268022181146026, |
|
"grad_norm": 1.4722453355789185, |
|
"learning_rate": 0.001, |
|
"loss": 0.7528, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5286506469500925, |
|
"grad_norm": 0.6636189222335815, |
|
"learning_rate": 0.001, |
|
"loss": 0.7003, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.5304990757855823, |
|
"grad_norm": 0.6289682388305664, |
|
"learning_rate": 0.001, |
|
"loss": 0.6891, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.532347504621072, |
|
"grad_norm": 0.518354058265686, |
|
"learning_rate": 0.001, |
|
"loss": 0.6492, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.5341959334565619, |
|
"grad_norm": 0.6105506420135498, |
|
"learning_rate": 0.001, |
|
"loss": 0.642, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.5360443622920518, |
|
"grad_norm": 0.663215160369873, |
|
"learning_rate": 0.001, |
|
"loss": 0.7104, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5378927911275416, |
|
"grad_norm": 0.6891048550605774, |
|
"learning_rate": 0.001, |
|
"loss": 0.7431, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.5397412199630314, |
|
"grad_norm": 1.533583641052246, |
|
"learning_rate": 0.001, |
|
"loss": 1.886, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5415896487985212, |
|
"grad_norm": 0.933929979801178, |
|
"learning_rate": 0.001, |
|
"loss": 0.6183, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.5434380776340111, |
|
"grad_norm": 0.7768216729164124, |
|
"learning_rate": 0.001, |
|
"loss": 0.7606, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5452865064695009, |
|
"grad_norm": 2.2823166847229004, |
|
"learning_rate": 0.001, |
|
"loss": 0.7254, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5471349353049908, |
|
"grad_norm": 0.7304850816726685, |
|
"learning_rate": 0.001, |
|
"loss": 0.7621, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5489833641404805, |
|
"grad_norm": 0.593639075756073, |
|
"learning_rate": 0.001, |
|
"loss": 0.6813, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5508317929759704, |
|
"grad_norm": 2.0610456466674805, |
|
"learning_rate": 0.001, |
|
"loss": 1.0654, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5526802218114603, |
|
"grad_norm": 1.1367557048797607, |
|
"learning_rate": 0.001, |
|
"loss": 0.6536, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5545286506469501, |
|
"grad_norm": 5.493245601654053, |
|
"learning_rate": 0.001, |
|
"loss": 0.6969, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.55637707948244, |
|
"grad_norm": 0.56303471326828, |
|
"learning_rate": 0.001, |
|
"loss": 0.7435, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.5582255083179297, |
|
"grad_norm": 0.5343788266181946, |
|
"learning_rate": 0.001, |
|
"loss": 0.6129, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5600739371534196, |
|
"grad_norm": 0.6194228529930115, |
|
"learning_rate": 0.001, |
|
"loss": 0.6848, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.5619223659889094, |
|
"grad_norm": 0.7855530977249146, |
|
"learning_rate": 0.001, |
|
"loss": 0.8936, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.5637707948243993, |
|
"grad_norm": 0.5858953595161438, |
|
"learning_rate": 0.001, |
|
"loss": 0.7653, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5656192236598891, |
|
"grad_norm": 48.524574279785156, |
|
"learning_rate": 0.001, |
|
"loss": 9.1166, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.5674676524953789, |
|
"grad_norm": 0.6151651740074158, |
|
"learning_rate": 0.001, |
|
"loss": 0.7313, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.5693160813308688, |
|
"grad_norm": 0.9248428344726562, |
|
"learning_rate": 0.001, |
|
"loss": 0.5781, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.5711645101663586, |
|
"grad_norm": 0.49555471539497375, |
|
"learning_rate": 0.001, |
|
"loss": 0.5315, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.5730129390018485, |
|
"grad_norm": 0.5464370250701904, |
|
"learning_rate": 0.001, |
|
"loss": 0.6076, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5748613678373382, |
|
"grad_norm": 0.6241145730018616, |
|
"learning_rate": 0.001, |
|
"loss": 0.3794, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5767097966728281, |
|
"grad_norm": 0.6810182929039001, |
|
"learning_rate": 0.001, |
|
"loss": 0.8058, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5785582255083179, |
|
"grad_norm": 0.6118370890617371, |
|
"learning_rate": 0.001, |
|
"loss": 0.6892, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.5804066543438078, |
|
"grad_norm": 0.8108903169631958, |
|
"learning_rate": 0.001, |
|
"loss": 0.7409, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.5822550831792976, |
|
"grad_norm": 0.6275304555892944, |
|
"learning_rate": 0.001, |
|
"loss": 0.5313, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5841035120147874, |
|
"grad_norm": 0.8600186705589294, |
|
"learning_rate": 0.001, |
|
"loss": 0.7863, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.5859519408502772, |
|
"grad_norm": 1.5396201610565186, |
|
"learning_rate": 0.001, |
|
"loss": 0.8304, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.5878003696857671, |
|
"grad_norm": 0.6472541689872742, |
|
"learning_rate": 0.001, |
|
"loss": 0.5984, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.589648798521257, |
|
"grad_norm": 1.398506760597229, |
|
"learning_rate": 0.001, |
|
"loss": 0.9278, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.5914972273567468, |
|
"grad_norm": 0.8885821104049683, |
|
"learning_rate": 0.001, |
|
"loss": 0.7355, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5933456561922366, |
|
"grad_norm": 0.6919491291046143, |
|
"learning_rate": 0.001, |
|
"loss": 0.7955, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.5951940850277264, |
|
"grad_norm": 0.5543484687805176, |
|
"learning_rate": 0.001, |
|
"loss": 0.5661, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.5970425138632163, |
|
"grad_norm": 0.9218202829360962, |
|
"learning_rate": 0.001, |
|
"loss": 0.6754, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.5988909426987061, |
|
"grad_norm": 0.7282502055168152, |
|
"learning_rate": 0.001, |
|
"loss": 0.7551, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.600739371534196, |
|
"grad_norm": 0.8452831506729126, |
|
"learning_rate": 0.001, |
|
"loss": 0.7975, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.6025878003696857, |
|
"grad_norm": 0.6374185085296631, |
|
"learning_rate": 0.001, |
|
"loss": 0.6155, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.6044362292051756, |
|
"grad_norm": 0.8224793076515198, |
|
"learning_rate": 0.001, |
|
"loss": 0.8842, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.6062846580406654, |
|
"grad_norm": 0.8151664137840271, |
|
"learning_rate": 0.001, |
|
"loss": 0.6684, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.6081330868761553, |
|
"grad_norm": 0.8286120295524597, |
|
"learning_rate": 0.001, |
|
"loss": 0.6752, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.609981515711645, |
|
"grad_norm": 0.6680192351341248, |
|
"learning_rate": 0.001, |
|
"loss": 0.5938, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6118299445471349, |
|
"grad_norm": 0.6495516896247864, |
|
"learning_rate": 0.001, |
|
"loss": 0.6121, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.6136783733826248, |
|
"grad_norm": 0.484153151512146, |
|
"learning_rate": 0.001, |
|
"loss": 0.5139, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.6155268022181146, |
|
"grad_norm": 0.7952211499214172, |
|
"learning_rate": 0.001, |
|
"loss": 1.9693, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.6173752310536045, |
|
"grad_norm": 0.6049392819404602, |
|
"learning_rate": 0.001, |
|
"loss": 0.6355, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.6192236598890942, |
|
"grad_norm": 0.5543565154075623, |
|
"learning_rate": 0.001, |
|
"loss": 0.5789, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6210720887245841, |
|
"grad_norm": 0.6608275771141052, |
|
"learning_rate": 0.001, |
|
"loss": 0.7809, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.6229205175600739, |
|
"grad_norm": 0.7678483724594116, |
|
"learning_rate": 0.001, |
|
"loss": 0.901, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6247689463955638, |
|
"grad_norm": 0.7045119404792786, |
|
"learning_rate": 0.001, |
|
"loss": 0.6893, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.6266173752310537, |
|
"grad_norm": 0.6836982369422913, |
|
"learning_rate": 0.001, |
|
"loss": 0.6864, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.6284658040665434, |
|
"grad_norm": 0.5288094878196716, |
|
"learning_rate": 0.001, |
|
"loss": 0.5089, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6303142329020333, |
|
"grad_norm": 0.6091610193252563, |
|
"learning_rate": 0.001, |
|
"loss": 0.8265, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.6321626617375231, |
|
"grad_norm": 0.7190775871276855, |
|
"learning_rate": 0.001, |
|
"loss": 0.5927, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.634011090573013, |
|
"grad_norm": 16.79271697998047, |
|
"learning_rate": 0.001, |
|
"loss": 2.4376, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.6358595194085028, |
|
"grad_norm": 0.6407002806663513, |
|
"learning_rate": 0.001, |
|
"loss": 0.6132, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.6377079482439926, |
|
"grad_norm": 0.8412754535675049, |
|
"learning_rate": 0.001, |
|
"loss": 0.6561, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6395563770794824, |
|
"grad_norm": 0.6675889492034912, |
|
"learning_rate": 0.001, |
|
"loss": 0.5961, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.6414048059149723, |
|
"grad_norm": 0.6772903203964233, |
|
"learning_rate": 0.001, |
|
"loss": 0.7657, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.6432532347504621, |
|
"grad_norm": 0.568170964717865, |
|
"learning_rate": 0.001, |
|
"loss": 0.5673, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.6451016635859519, |
|
"grad_norm": 0.7691311240196228, |
|
"learning_rate": 0.001, |
|
"loss": 0.6749, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.6469500924214417, |
|
"grad_norm": 1.38419771194458, |
|
"learning_rate": 0.001, |
|
"loss": 0.7093, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6487985212569316, |
|
"grad_norm": 17.263277053833008, |
|
"learning_rate": 0.001, |
|
"loss": 1.152, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.6506469500924215, |
|
"grad_norm": 1.9294712543487549, |
|
"learning_rate": 0.001, |
|
"loss": 0.857, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.6524953789279113, |
|
"grad_norm": 0.6687116026878357, |
|
"learning_rate": 0.001, |
|
"loss": 0.7798, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6543438077634011, |
|
"grad_norm": 0.600409984588623, |
|
"learning_rate": 0.001, |
|
"loss": 0.5035, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6561922365988909, |
|
"grad_norm": 0.7064170837402344, |
|
"learning_rate": 0.001, |
|
"loss": 0.8408, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6580406654343808, |
|
"grad_norm": 0.5997954607009888, |
|
"learning_rate": 0.001, |
|
"loss": 0.7668, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6598890942698706, |
|
"grad_norm": 0.8595522046089172, |
|
"learning_rate": 0.001, |
|
"loss": 0.6487, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6617375231053605, |
|
"grad_norm": 0.8264375329017639, |
|
"learning_rate": 0.001, |
|
"loss": 0.8053, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6635859519408502, |
|
"grad_norm": 0.45927339792251587, |
|
"learning_rate": 0.001, |
|
"loss": 0.6095, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.6654343807763401, |
|
"grad_norm": 0.734035849571228, |
|
"learning_rate": 0.001, |
|
"loss": 0.6616, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.66728280961183, |
|
"grad_norm": 0.639272928237915, |
|
"learning_rate": 0.001, |
|
"loss": 0.7055, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.6691312384473198, |
|
"grad_norm": 0.5212395787239075, |
|
"learning_rate": 0.001, |
|
"loss": 0.6112, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.6709796672828097, |
|
"grad_norm": 0.5960317850112915, |
|
"learning_rate": 0.001, |
|
"loss": 0.6009, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.6728280961182994, |
|
"grad_norm": 0.7983746528625488, |
|
"learning_rate": 0.001, |
|
"loss": 0.8488, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.6746765249537893, |
|
"grad_norm": 0.5350251793861389, |
|
"learning_rate": 0.001, |
|
"loss": 0.6422, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6765249537892791, |
|
"grad_norm": 34.95353698730469, |
|
"learning_rate": 0.001, |
|
"loss": 2.1827, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.678373382624769, |
|
"grad_norm": 0.6277118921279907, |
|
"learning_rate": 0.001, |
|
"loss": 0.636, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.6802218114602587, |
|
"grad_norm": 0.5147830843925476, |
|
"learning_rate": 0.001, |
|
"loss": 0.6665, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.6820702402957486, |
|
"grad_norm": 0.9916329979896545, |
|
"learning_rate": 0.001, |
|
"loss": 0.8003, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.6839186691312384, |
|
"grad_norm": 0.49549809098243713, |
|
"learning_rate": 0.001, |
|
"loss": 0.7006, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6857670979667283, |
|
"grad_norm": 0.6405476331710815, |
|
"learning_rate": 0.001, |
|
"loss": 0.6889, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.6876155268022182, |
|
"grad_norm": 17.999553680419922, |
|
"learning_rate": 0.001, |
|
"loss": 1.08, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.6894639556377079, |
|
"grad_norm": 0.6254942417144775, |
|
"learning_rate": 0.001, |
|
"loss": 0.7336, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.6913123844731978, |
|
"grad_norm": 0.7496931552886963, |
|
"learning_rate": 0.001, |
|
"loss": 0.7513, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.6931608133086876, |
|
"grad_norm": 0.7643265128135681, |
|
"learning_rate": 0.001, |
|
"loss": 0.6091, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6950092421441775, |
|
"grad_norm": 0.6992492079734802, |
|
"learning_rate": 0.001, |
|
"loss": 0.6633, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.6968576709796673, |
|
"grad_norm": 1.2083274126052856, |
|
"learning_rate": 0.001, |
|
"loss": 0.9582, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.6987060998151571, |
|
"grad_norm": 1.052863359451294, |
|
"learning_rate": 0.001, |
|
"loss": 0.7435, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.7005545286506469, |
|
"grad_norm": 5.199072360992432, |
|
"learning_rate": 0.001, |
|
"loss": 1.9021, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.7024029574861368, |
|
"grad_norm": 0.6658142805099487, |
|
"learning_rate": 0.001, |
|
"loss": 0.578, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7042513863216266, |
|
"grad_norm": 0.9965769052505493, |
|
"learning_rate": 0.001, |
|
"loss": 0.6766, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.7060998151571165, |
|
"grad_norm": 1.3572355508804321, |
|
"learning_rate": 0.001, |
|
"loss": 0.925, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.7079482439926063, |
|
"grad_norm": 0.796585202217102, |
|
"learning_rate": 0.001, |
|
"loss": 0.8363, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.7097966728280961, |
|
"grad_norm": 1.294584035873413, |
|
"learning_rate": 0.001, |
|
"loss": 0.6018, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.711645101663586, |
|
"grad_norm": 0.5120808482170105, |
|
"learning_rate": 0.001, |
|
"loss": 0.6919, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.7134935304990758, |
|
"grad_norm": 0.6790513396263123, |
|
"learning_rate": 0.001, |
|
"loss": 0.7894, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.7153419593345656, |
|
"grad_norm": 0.796168863773346, |
|
"learning_rate": 0.001, |
|
"loss": 0.7301, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.7171903881700554, |
|
"grad_norm": 0.7078443169593811, |
|
"learning_rate": 0.001, |
|
"loss": 0.614, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.7190388170055453, |
|
"grad_norm": 0.656707227230072, |
|
"learning_rate": 0.001, |
|
"loss": 0.7422, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.7208872458410351, |
|
"grad_norm": 1.1826304197311401, |
|
"learning_rate": 0.001, |
|
"loss": 0.9052, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.722735674676525, |
|
"grad_norm": 1.317765474319458, |
|
"learning_rate": 0.001, |
|
"loss": 0.7672, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.7245841035120147, |
|
"grad_norm": 0.6513253450393677, |
|
"learning_rate": 0.001, |
|
"loss": 0.6832, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.7264325323475046, |
|
"grad_norm": 0.6068792939186096, |
|
"learning_rate": 0.001, |
|
"loss": 0.6591, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.7282809611829945, |
|
"grad_norm": 0.7942211031913757, |
|
"learning_rate": 0.001, |
|
"loss": 2.1942, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.7301293900184843, |
|
"grad_norm": 0.7121322751045227, |
|
"learning_rate": 0.001, |
|
"loss": 0.7641, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.7319778188539742, |
|
"grad_norm": 0.5792383551597595, |
|
"learning_rate": 0.001, |
|
"loss": 0.6137, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.7338262476894639, |
|
"grad_norm": 0.40939533710479736, |
|
"learning_rate": 0.001, |
|
"loss": 0.4706, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.7356746765249538, |
|
"grad_norm": 0.8896576762199402, |
|
"learning_rate": 0.001, |
|
"loss": 0.7001, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.7375231053604436, |
|
"grad_norm": 0.5687329173088074, |
|
"learning_rate": 0.001, |
|
"loss": 0.6926, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.7393715341959335, |
|
"grad_norm": 0.7827416658401489, |
|
"learning_rate": 0.001, |
|
"loss": 0.8797, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7412199630314233, |
|
"grad_norm": 0.6891829371452332, |
|
"learning_rate": 0.001, |
|
"loss": 0.6442, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.7430683918669131, |
|
"grad_norm": 0.9651913046836853, |
|
"learning_rate": 0.001, |
|
"loss": 0.7578, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.744916820702403, |
|
"grad_norm": 1.134435772895813, |
|
"learning_rate": 0.001, |
|
"loss": 0.7586, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.7467652495378928, |
|
"grad_norm": 0.7820314764976501, |
|
"learning_rate": 0.001, |
|
"loss": 0.7503, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.7486136783733827, |
|
"grad_norm": 1.1618527173995972, |
|
"learning_rate": 0.001, |
|
"loss": 0.7242, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7504621072088724, |
|
"grad_norm": 0.8468978404998779, |
|
"learning_rate": 0.001, |
|
"loss": 0.804, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.7523105360443623, |
|
"grad_norm": 0.6495232582092285, |
|
"learning_rate": 0.001, |
|
"loss": 0.4844, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.7541589648798521, |
|
"grad_norm": 62.157989501953125, |
|
"learning_rate": 0.001, |
|
"loss": 2.3715, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.7541589648798521, |
|
"eval_loss": 0.6921550631523132, |
|
"eval_runtime": 36.9225, |
|
"eval_samples_per_second": 1.49, |
|
"eval_steps_per_second": 0.19, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.756007393715342, |
|
"grad_norm": 0.9253891110420227, |
|
"learning_rate": 0.001, |
|
"loss": 1.0238, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.7578558225508318, |
|
"grad_norm": 1.3141051530838013, |
|
"learning_rate": 0.001, |
|
"loss": 1.2068, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7597042513863216, |
|
"grad_norm": 0.5310596227645874, |
|
"learning_rate": 0.001, |
|
"loss": 0.5432, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.7615526802218114, |
|
"grad_norm": 0.9271712899208069, |
|
"learning_rate": 0.001, |
|
"loss": 0.7643, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.7634011090573013, |
|
"grad_norm": 0.9340744018554688, |
|
"learning_rate": 0.001, |
|
"loss": 0.8286, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7652495378927912, |
|
"grad_norm": 1.0287261009216309, |
|
"learning_rate": 0.001, |
|
"loss": 0.9113, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.767097966728281, |
|
"grad_norm": 0.6159982085227966, |
|
"learning_rate": 0.001, |
|
"loss": 0.6518, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7689463955637708, |
|
"grad_norm": 0.6261630654335022, |
|
"learning_rate": 0.001, |
|
"loss": 0.8691, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7707948243992606, |
|
"grad_norm": 0.5993965268135071, |
|
"learning_rate": 0.001, |
|
"loss": 0.7519, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.7726432532347505, |
|
"grad_norm": 0.738401472568512, |
|
"learning_rate": 0.001, |
|
"loss": 0.6912, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7744916820702403, |
|
"grad_norm": 0.7634181976318359, |
|
"learning_rate": 0.001, |
|
"loss": 0.5789, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.7763401109057301, |
|
"grad_norm": 1.065432071685791, |
|
"learning_rate": 0.001, |
|
"loss": 0.7503, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7781885397412199, |
|
"grad_norm": 0.9008420705795288, |
|
"learning_rate": 0.001, |
|
"loss": 0.5917, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.7800369685767098, |
|
"grad_norm": 0.6660546660423279, |
|
"learning_rate": 0.001, |
|
"loss": 0.685, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.7818853974121996, |
|
"grad_norm": 0.6387668251991272, |
|
"learning_rate": 0.001, |
|
"loss": 0.7161, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.7837338262476895, |
|
"grad_norm": 1.0390757322311401, |
|
"learning_rate": 0.001, |
|
"loss": 0.8252, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.7855822550831792, |
|
"grad_norm": 0.7057231664657593, |
|
"learning_rate": 0.001, |
|
"loss": 0.6607, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.7874306839186691, |
|
"grad_norm": 1.4526139497756958, |
|
"learning_rate": 0.001, |
|
"loss": 0.7296, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.789279112754159, |
|
"grad_norm": 0.6918472051620483, |
|
"learning_rate": 0.001, |
|
"loss": 0.6665, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.7911275415896488, |
|
"grad_norm": 0.7483262419700623, |
|
"learning_rate": 0.001, |
|
"loss": 0.7229, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.7929759704251387, |
|
"grad_norm": 0.5912255048751831, |
|
"learning_rate": 0.001, |
|
"loss": 0.765, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.7948243992606284, |
|
"grad_norm": 0.8008376955986023, |
|
"learning_rate": 0.001, |
|
"loss": 0.7337, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7966728280961183, |
|
"grad_norm": 0.687903106212616, |
|
"learning_rate": 0.001, |
|
"loss": 0.7869, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.7985212569316081, |
|
"grad_norm": 1.3599655628204346, |
|
"learning_rate": 0.001, |
|
"loss": 1.124, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.800369685767098, |
|
"grad_norm": 0.8680609464645386, |
|
"learning_rate": 0.001, |
|
"loss": 0.7317, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.8022181146025879, |
|
"grad_norm": 0.712820291519165, |
|
"learning_rate": 0.001, |
|
"loss": 0.6848, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.8040665434380776, |
|
"grad_norm": 0.5497101545333862, |
|
"learning_rate": 0.001, |
|
"loss": 0.7699, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.8059149722735675, |
|
"grad_norm": 0.657538115978241, |
|
"learning_rate": 0.001, |
|
"loss": 0.6637, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.8077634011090573, |
|
"grad_norm": 0.5894818305969238, |
|
"learning_rate": 0.001, |
|
"loss": 0.7878, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.8096118299445472, |
|
"grad_norm": 0.596224844455719, |
|
"learning_rate": 0.001, |
|
"loss": 0.7473, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.8114602587800369, |
|
"grad_norm": 1.2735570669174194, |
|
"learning_rate": 0.001, |
|
"loss": 0.6826, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.8133086876155268, |
|
"grad_norm": 0.9592067003250122, |
|
"learning_rate": 0.001, |
|
"loss": 0.9536, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8151571164510166, |
|
"grad_norm": 0.7559153437614441, |
|
"learning_rate": 0.001, |
|
"loss": 0.8591, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.8170055452865065, |
|
"grad_norm": 0.49736127257347107, |
|
"learning_rate": 0.001, |
|
"loss": 0.6603, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.8188539741219963, |
|
"grad_norm": 36.26533126831055, |
|
"learning_rate": 0.001, |
|
"loss": 2.834, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.8207024029574861, |
|
"grad_norm": 0.7133930325508118, |
|
"learning_rate": 0.001, |
|
"loss": 0.6979, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.822550831792976, |
|
"grad_norm": 0.5370646119117737, |
|
"learning_rate": 0.001, |
|
"loss": 0.549, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.8243992606284658, |
|
"grad_norm": 0.4864414930343628, |
|
"learning_rate": 0.001, |
|
"loss": 0.516, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.8262476894639557, |
|
"grad_norm": 1.0422941446304321, |
|
"learning_rate": 0.001, |
|
"loss": 0.7549, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.8280961182994455, |
|
"grad_norm": 0.796935498714447, |
|
"learning_rate": 0.001, |
|
"loss": 1.1122, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.8299445471349353, |
|
"grad_norm": 0.708077073097229, |
|
"learning_rate": 0.001, |
|
"loss": 0.5895, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.8317929759704251, |
|
"grad_norm": 0.6083373427391052, |
|
"learning_rate": 0.001, |
|
"loss": 0.7167, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.833641404805915, |
|
"grad_norm": 0.568143904209137, |
|
"learning_rate": 0.001, |
|
"loss": 0.5395, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.8354898336414048, |
|
"grad_norm": 1.630936861038208, |
|
"learning_rate": 0.001, |
|
"loss": 0.9712, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.8373382624768947, |
|
"grad_norm": 0.8898182511329651, |
|
"learning_rate": 0.001, |
|
"loss": 0.7966, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.8391866913123844, |
|
"grad_norm": 0.6210492253303528, |
|
"learning_rate": 0.001, |
|
"loss": 0.7789, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.8410351201478743, |
|
"grad_norm": 0.5374466180801392, |
|
"learning_rate": 0.001, |
|
"loss": 0.5124, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8428835489833642, |
|
"grad_norm": 0.731884241104126, |
|
"learning_rate": 0.001, |
|
"loss": 0.657, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.844731977818854, |
|
"grad_norm": 0.6285644173622131, |
|
"learning_rate": 0.001, |
|
"loss": 0.878, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.8465804066543438, |
|
"grad_norm": 0.6428564786911011, |
|
"learning_rate": 0.001, |
|
"loss": 0.6888, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.8484288354898336, |
|
"grad_norm": 0.9038918614387512, |
|
"learning_rate": 0.001, |
|
"loss": 0.7343, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.8502772643253235, |
|
"grad_norm": 0.6318102478981018, |
|
"learning_rate": 0.001, |
|
"loss": 0.803, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.8521256931608133, |
|
"grad_norm": 0.7558817863464355, |
|
"learning_rate": 0.001, |
|
"loss": 0.847, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.8539741219963032, |
|
"grad_norm": 0.6051446199417114, |
|
"learning_rate": 0.001, |
|
"loss": 0.7294, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.8558225508317929, |
|
"grad_norm": 0.8236597776412964, |
|
"learning_rate": 0.001, |
|
"loss": 0.8389, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.8576709796672828, |
|
"grad_norm": 0.5667428970336914, |
|
"learning_rate": 0.001, |
|
"loss": 0.5978, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.8595194085027726, |
|
"grad_norm": 0.6218537092208862, |
|
"learning_rate": 0.001, |
|
"loss": 0.6539, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.8613678373382625, |
|
"grad_norm": 0.6580603718757629, |
|
"learning_rate": 0.001, |
|
"loss": 0.5846, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.8632162661737524, |
|
"grad_norm": 0.8344849348068237, |
|
"learning_rate": 0.001, |
|
"loss": 0.819, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.8650646950092421, |
|
"grad_norm": 0.6353901028633118, |
|
"learning_rate": 0.001, |
|
"loss": 0.6255, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.866913123844732, |
|
"grad_norm": 0.5879977345466614, |
|
"learning_rate": 0.001, |
|
"loss": 0.8907, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.8687615526802218, |
|
"grad_norm": 0.7070827484130859, |
|
"learning_rate": 0.001, |
|
"loss": 0.7093, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8706099815157117, |
|
"grad_norm": 0.6677271127700806, |
|
"learning_rate": 0.001, |
|
"loss": 0.6718, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.8724584103512015, |
|
"grad_norm": 0.5725215673446655, |
|
"learning_rate": 0.001, |
|
"loss": 0.7003, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8743068391866913, |
|
"grad_norm": 0.6160258054733276, |
|
"learning_rate": 0.001, |
|
"loss": 0.6447, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.8761552680221811, |
|
"grad_norm": 0.8822168111801147, |
|
"learning_rate": 0.001, |
|
"loss": 0.6368, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.878003696857671, |
|
"grad_norm": 1.4470586776733398, |
|
"learning_rate": 0.001, |
|
"loss": 0.9277, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8798521256931608, |
|
"grad_norm": 0.6264105439186096, |
|
"learning_rate": 0.001, |
|
"loss": 0.6375, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.8817005545286506, |
|
"grad_norm": 0.7535873651504517, |
|
"learning_rate": 0.001, |
|
"loss": 0.6433, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.8835489833641405, |
|
"grad_norm": 0.6162166595458984, |
|
"learning_rate": 0.001, |
|
"loss": 0.6918, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.8853974121996303, |
|
"grad_norm": 0.6916104555130005, |
|
"learning_rate": 0.001, |
|
"loss": 0.6636, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.8872458410351202, |
|
"grad_norm": 0.7060137987136841, |
|
"learning_rate": 0.001, |
|
"loss": 0.4684, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.88909426987061, |
|
"grad_norm": 0.6374285221099854, |
|
"learning_rate": 0.001, |
|
"loss": 0.9017, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.8909426987060998, |
|
"grad_norm": 0.5305145382881165, |
|
"learning_rate": 0.001, |
|
"loss": 0.5699, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.8927911275415896, |
|
"grad_norm": 0.6026877164840698, |
|
"learning_rate": 0.001, |
|
"loss": 0.9496, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.8946395563770795, |
|
"grad_norm": 0.6786549091339111, |
|
"learning_rate": 0.001, |
|
"loss": 0.804, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.8964879852125693, |
|
"grad_norm": 0.7018754482269287, |
|
"learning_rate": 0.001, |
|
"loss": 0.737, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.8983364140480592, |
|
"grad_norm": 0.4089026153087616, |
|
"learning_rate": 0.001, |
|
"loss": 0.5953, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.9001848428835489, |
|
"grad_norm": 0.48161014914512634, |
|
"learning_rate": 0.001, |
|
"loss": 0.5167, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.9020332717190388, |
|
"grad_norm": 0.7609049677848816, |
|
"learning_rate": 0.001, |
|
"loss": 0.7631, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.9038817005545287, |
|
"grad_norm": 0.8507420420646667, |
|
"learning_rate": 0.001, |
|
"loss": 0.8331, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.9057301293900185, |
|
"grad_norm": 0.549686074256897, |
|
"learning_rate": 0.001, |
|
"loss": 0.5609, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9075785582255084, |
|
"grad_norm": 0.8549586534500122, |
|
"learning_rate": 0.001, |
|
"loss": 0.7975, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.9094269870609981, |
|
"grad_norm": 0.5464617609977722, |
|
"learning_rate": 0.001, |
|
"loss": 0.7603, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.911275415896488, |
|
"grad_norm": 0.8106308579444885, |
|
"learning_rate": 0.001, |
|
"loss": 0.5753, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.9131238447319778, |
|
"grad_norm": 1.0989980697631836, |
|
"learning_rate": 0.001, |
|
"loss": 0.8119, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.9149722735674677, |
|
"grad_norm": 53.603729248046875, |
|
"learning_rate": 0.001, |
|
"loss": 3.5664, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.9168207024029574, |
|
"grad_norm": 0.6865960359573364, |
|
"learning_rate": 0.001, |
|
"loss": 0.6459, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.9186691312384473, |
|
"grad_norm": 0.7828227877616882, |
|
"learning_rate": 0.001, |
|
"loss": 0.9751, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.9205175600739371, |
|
"grad_norm": 0.8201976418495178, |
|
"learning_rate": 0.001, |
|
"loss": 0.9485, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.922365988909427, |
|
"grad_norm": 0.7697083353996277, |
|
"learning_rate": 0.001, |
|
"loss": 0.8864, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.9242144177449169, |
|
"grad_norm": 0.4081413447856903, |
|
"learning_rate": 0.001, |
|
"loss": 0.5863, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9260628465804066, |
|
"grad_norm": 0.592320442199707, |
|
"learning_rate": 0.001, |
|
"loss": 0.7524, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.9279112754158965, |
|
"grad_norm": 0.4706171452999115, |
|
"learning_rate": 0.001, |
|
"loss": 0.5649, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.9297597042513863, |
|
"grad_norm": 0.4379749000072479, |
|
"learning_rate": 0.001, |
|
"loss": 0.5178, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.9316081330868762, |
|
"grad_norm": 0.6452725529670715, |
|
"learning_rate": 0.001, |
|
"loss": 0.6012, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.933456561922366, |
|
"grad_norm": 0.7428618669509888, |
|
"learning_rate": 0.001, |
|
"loss": 0.6258, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.9353049907578558, |
|
"grad_norm": 0.7185813188552856, |
|
"learning_rate": 0.001, |
|
"loss": 0.899, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.9371534195933456, |
|
"grad_norm": 0.8039231896400452, |
|
"learning_rate": 0.001, |
|
"loss": 0.8567, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.9390018484288355, |
|
"grad_norm": 0.6219761371612549, |
|
"learning_rate": 0.001, |
|
"loss": 0.6672, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.9408502772643254, |
|
"grad_norm": 0.8613112568855286, |
|
"learning_rate": 0.001, |
|
"loss": 1.0144, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.9426987060998152, |
|
"grad_norm": 0.6800719499588013, |
|
"learning_rate": 0.001, |
|
"loss": 0.6831, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.944547134935305, |
|
"grad_norm": 0.8042735457420349, |
|
"learning_rate": 0.001, |
|
"loss": 0.8125, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.9463955637707948, |
|
"grad_norm": 0.8199442625045776, |
|
"learning_rate": 0.001, |
|
"loss": 1.2534, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.9482439926062847, |
|
"grad_norm": 6.547449111938477, |
|
"learning_rate": 0.001, |
|
"loss": 0.8561, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.9500924214417745, |
|
"grad_norm": 1.0684571266174316, |
|
"learning_rate": 0.001, |
|
"loss": 0.7813, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.9519408502772643, |
|
"grad_norm": 4.597997665405273, |
|
"learning_rate": 0.001, |
|
"loss": 0.9715, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.9537892791127541, |
|
"grad_norm": 1.0539919137954712, |
|
"learning_rate": 0.001, |
|
"loss": 0.7927, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.955637707948244, |
|
"grad_norm": 0.7296995520591736, |
|
"learning_rate": 0.001, |
|
"loss": 0.7152, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.9574861367837338, |
|
"grad_norm": 0.6043654084205627, |
|
"learning_rate": 0.001, |
|
"loss": 0.6799, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.9593345656192237, |
|
"grad_norm": 0.5910910964012146, |
|
"learning_rate": 0.001, |
|
"loss": 0.6224, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.9611829944547134, |
|
"grad_norm": 2.4806816577911377, |
|
"learning_rate": 0.001, |
|
"loss": 0.4854, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9630314232902033, |
|
"grad_norm": 1.4178262948989868, |
|
"learning_rate": 0.001, |
|
"loss": 0.9269, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.9648798521256932, |
|
"grad_norm": 0.5048137307167053, |
|
"learning_rate": 0.001, |
|
"loss": 0.5043, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.966728280961183, |
|
"grad_norm": 1.0623221397399902, |
|
"learning_rate": 0.001, |
|
"loss": 1.0081, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.9685767097966729, |
|
"grad_norm": 0.6304468512535095, |
|
"learning_rate": 0.001, |
|
"loss": 0.6194, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.9704251386321626, |
|
"grad_norm": 0.5621396899223328, |
|
"learning_rate": 0.001, |
|
"loss": 0.4772, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.9722735674676525, |
|
"grad_norm": 0.7014063000679016, |
|
"learning_rate": 0.001, |
|
"loss": 0.7481, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.9741219963031423, |
|
"grad_norm": 0.7803249359130859, |
|
"learning_rate": 0.001, |
|
"loss": 0.6818, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.9759704251386322, |
|
"grad_norm": 0.56339430809021, |
|
"learning_rate": 0.001, |
|
"loss": 0.529, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.977818853974122, |
|
"grad_norm": 1.3150532245635986, |
|
"learning_rate": 0.001, |
|
"loss": 0.85, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.9796672828096118, |
|
"grad_norm": 0.73233562707901, |
|
"learning_rate": 0.001, |
|
"loss": 0.6697, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9815157116451017, |
|
"grad_norm": 15.183834075927734, |
|
"learning_rate": 0.001, |
|
"loss": 0.6668, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.9833641404805915, |
|
"grad_norm": 0.710564136505127, |
|
"learning_rate": 0.001, |
|
"loss": 0.5376, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.9852125693160814, |
|
"grad_norm": 0.9392178654670715, |
|
"learning_rate": 0.001, |
|
"loss": 0.8452, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.9870609981515711, |
|
"grad_norm": 0.6461498737335205, |
|
"learning_rate": 0.001, |
|
"loss": 0.7486, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.988909426987061, |
|
"grad_norm": 0.8957709670066833, |
|
"learning_rate": 0.001, |
|
"loss": 0.6943, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.9907578558225508, |
|
"grad_norm": 1.0718097686767578, |
|
"learning_rate": 0.001, |
|
"loss": 0.7135, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.9926062846580407, |
|
"grad_norm": 0.8124735355377197, |
|
"learning_rate": 0.001, |
|
"loss": 0.9046, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.9944547134935305, |
|
"grad_norm": 0.6564399600028992, |
|
"learning_rate": 0.001, |
|
"loss": 0.7277, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.9963031423290203, |
|
"grad_norm": 4.252345085144043, |
|
"learning_rate": 0.001, |
|
"loss": 0.9342, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.9981515711645101, |
|
"grad_norm": 0.8855632543563843, |
|
"learning_rate": 0.001, |
|
"loss": 0.7394, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 3.0222349166870117, |
|
"learning_rate": 0.001, |
|
"loss": 0.8512, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.0018484288354899, |
|
"grad_norm": 0.7332062721252441, |
|
"learning_rate": 0.001, |
|
"loss": 0.7089, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.0036968576709797, |
|
"grad_norm": 0.7275331020355225, |
|
"learning_rate": 0.001, |
|
"loss": 0.6, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.0055452865064696, |
|
"grad_norm": 0.7912113070487976, |
|
"learning_rate": 0.001, |
|
"loss": 0.7719, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.0055452865064696, |
|
"eval_loss": 0.6085956692695618, |
|
"eval_runtime": 36.9382, |
|
"eval_samples_per_second": 1.489, |
|
"eval_steps_per_second": 0.19, |
|
"step": 544 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1623, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 136, |
|
"total_flos": 7.871105024380109e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|