|
{ |
|
"best_metric": 0.24498265981674194, |
|
"best_model_checkpoint": "PhoBert-MultiLabel/checkpoint-7386", |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 7386, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.010154346060113728, |
|
"grad_norm": 3.567084312438965, |
|
"learning_rate": 1.6914749661705008e-06, |
|
"loss": 1.7799, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.020308692120227456, |
|
"grad_norm": 3.705420732498169, |
|
"learning_rate": 3.3829499323410016e-06, |
|
"loss": 1.6722, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.030463038180341188, |
|
"grad_norm": 3.097123384475708, |
|
"learning_rate": 5.074424898511502e-06, |
|
"loss": 1.5722, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04061738424045491, |
|
"grad_norm": 14.346776008605957, |
|
"learning_rate": 6.765899864682003e-06, |
|
"loss": 1.5003, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05077173030056864, |
|
"grad_norm": 3.2664577960968018, |
|
"learning_rate": 8.457374830852504e-06, |
|
"loss": 1.4085, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.060926076360682375, |
|
"grad_norm": 3.7164502143859863, |
|
"learning_rate": 1.0148849797023005e-05, |
|
"loss": 1.3505, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0710804224207961, |
|
"grad_norm": 3.933560848236084, |
|
"learning_rate": 1.1840324763193506e-05, |
|
"loss": 1.217, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.08123476848090982, |
|
"grad_norm": 5.295672416687012, |
|
"learning_rate": 1.3531799729364006e-05, |
|
"loss": 1.0736, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.09138911454102355, |
|
"grad_norm": 12.415136337280273, |
|
"learning_rate": 1.5223274695534506e-05, |
|
"loss": 1.1182, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.10154346060113728, |
|
"grad_norm": 10.874429702758789, |
|
"learning_rate": 1.6914749661705008e-05, |
|
"loss": 1.123, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.11169780666125101, |
|
"grad_norm": 6.618790149688721, |
|
"learning_rate": 1.8606224627875505e-05, |
|
"loss": 1.1378, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.12185215272136475, |
|
"grad_norm": 9.960480690002441, |
|
"learning_rate": 2.029769959404601e-05, |
|
"loss": 1.0658, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.13200649878147847, |
|
"grad_norm": 3.2485241889953613, |
|
"learning_rate": 2.198917456021651e-05, |
|
"loss": 0.9226, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.1421608448415922, |
|
"grad_norm": 14.158449172973633, |
|
"learning_rate": 2.368064952638701e-05, |
|
"loss": 0.93, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.15231519090170592, |
|
"grad_norm": 2.6080679893493652, |
|
"learning_rate": 2.5372124492557515e-05, |
|
"loss": 0.7421, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.16246953696181965, |
|
"grad_norm": 4.75559663772583, |
|
"learning_rate": 2.7063599458728013e-05, |
|
"loss": 0.8481, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.17262388302193338, |
|
"grad_norm": 13.718697547912598, |
|
"learning_rate": 2.8755074424898514e-05, |
|
"loss": 0.8549, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.1827782290820471, |
|
"grad_norm": 5.8711466789245605, |
|
"learning_rate": 3.044654939106901e-05, |
|
"loss": 0.911, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.19293257514216083, |
|
"grad_norm": 4.887762546539307, |
|
"learning_rate": 3.2138024357239515e-05, |
|
"loss": 0.8915, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.20308692120227456, |
|
"grad_norm": 5.983439922332764, |
|
"learning_rate": 3.3829499323410016e-05, |
|
"loss": 0.8632, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.2132412672623883, |
|
"grad_norm": 2.231613874435425, |
|
"learning_rate": 3.552097428958052e-05, |
|
"loss": 0.7462, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.22339561332250202, |
|
"grad_norm": 13.936301231384277, |
|
"learning_rate": 3.721244925575101e-05, |
|
"loss": 0.7668, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.23354995938261575, |
|
"grad_norm": 15.290505409240723, |
|
"learning_rate": 3.890392422192152e-05, |
|
"loss": 0.7142, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.2437043054427295, |
|
"grad_norm": 33.517913818359375, |
|
"learning_rate": 4.059539918809202e-05, |
|
"loss": 0.6321, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.2538586515028432, |
|
"grad_norm": 8.801501274108887, |
|
"learning_rate": 4.228687415426252e-05, |
|
"loss": 0.7626, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.26401299756295693, |
|
"grad_norm": 16.529769897460938, |
|
"learning_rate": 4.397834912043302e-05, |
|
"loss": 0.811, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.27416734362307066, |
|
"grad_norm": 15.602673530578613, |
|
"learning_rate": 4.566982408660352e-05, |
|
"loss": 0.8739, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.2843216896831844, |
|
"grad_norm": 4.3945393562316895, |
|
"learning_rate": 4.736129905277402e-05, |
|
"loss": 0.8839, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.2944760357432981, |
|
"grad_norm": 6.59278678894043, |
|
"learning_rate": 4.9052774018944517e-05, |
|
"loss": 0.7022, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.30463038180341184, |
|
"grad_norm": 42.47446060180664, |
|
"learning_rate": 4.991725590491951e-05, |
|
"loss": 0.8486, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.31478472786352557, |
|
"grad_norm": 7.104182243347168, |
|
"learning_rate": 4.972920114337295e-05, |
|
"loss": 0.8155, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.3249390739236393, |
|
"grad_norm": 2.5681631565093994, |
|
"learning_rate": 4.954114638182639e-05, |
|
"loss": 0.6559, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.335093419983753, |
|
"grad_norm": 7.099673271179199, |
|
"learning_rate": 4.935309162027983e-05, |
|
"loss": 0.6985, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.34524776604386676, |
|
"grad_norm": 6.138451099395752, |
|
"learning_rate": 4.9165036858733264e-05, |
|
"loss": 0.8701, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.3554021121039805, |
|
"grad_norm": 2.4096810817718506, |
|
"learning_rate": 4.89769820971867e-05, |
|
"loss": 0.5512, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.3655564581640942, |
|
"grad_norm": 17.88941192626953, |
|
"learning_rate": 4.8788927335640136e-05, |
|
"loss": 0.8775, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.37571080422420794, |
|
"grad_norm": 3.2206945419311523, |
|
"learning_rate": 4.860087257409358e-05, |
|
"loss": 0.6589, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.38586515028432167, |
|
"grad_norm": 12.562983512878418, |
|
"learning_rate": 4.8412817812547015e-05, |
|
"loss": 0.6729, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.3960194963444354, |
|
"grad_norm": 0.8055168986320496, |
|
"learning_rate": 4.8224763051000455e-05, |
|
"loss": 0.6476, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.4061738424045491, |
|
"grad_norm": 3.906461715698242, |
|
"learning_rate": 4.803670828945389e-05, |
|
"loss": 0.8366, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.41632818846466285, |
|
"grad_norm": 10.65334415435791, |
|
"learning_rate": 4.784865352790733e-05, |
|
"loss": 0.7228, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.4264825345247766, |
|
"grad_norm": 3.829928398132324, |
|
"learning_rate": 4.766059876636077e-05, |
|
"loss": 0.5464, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.4366368805848903, |
|
"grad_norm": 14.11302375793457, |
|
"learning_rate": 4.747254400481421e-05, |
|
"loss": 0.7691, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.44679122664500404, |
|
"grad_norm": 7.801684379577637, |
|
"learning_rate": 4.7284489243267647e-05, |
|
"loss": 0.7456, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.45694557270511776, |
|
"grad_norm": 21.654991149902344, |
|
"learning_rate": 4.709643448172108e-05, |
|
"loss": 0.6827, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.4670999187652315, |
|
"grad_norm": 5.018062114715576, |
|
"learning_rate": 4.690837972017452e-05, |
|
"loss": 0.7129, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.4772542648253452, |
|
"grad_norm": 1.8878921270370483, |
|
"learning_rate": 4.672032495862795e-05, |
|
"loss": 0.8354, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.487408610885459, |
|
"grad_norm": 28.074739456176758, |
|
"learning_rate": 4.653227019708139e-05, |
|
"loss": 0.6568, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.49756295694557273, |
|
"grad_norm": 16.48390769958496, |
|
"learning_rate": 4.634421543553483e-05, |
|
"loss": 0.5789, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.5077173030056864, |
|
"grad_norm": 6.18392276763916, |
|
"learning_rate": 4.615616067398827e-05, |
|
"loss": 0.6729, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.5178716490658002, |
|
"grad_norm": 5.78372859954834, |
|
"learning_rate": 4.5968105912441704e-05, |
|
"loss": 0.7166, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.5280259951259139, |
|
"grad_norm": 21.222583770751953, |
|
"learning_rate": 4.5780051150895143e-05, |
|
"loss": 0.8119, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.5381803411860276, |
|
"grad_norm": 23.661069869995117, |
|
"learning_rate": 4.5591996389348576e-05, |
|
"loss": 0.5273, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.5483346872461413, |
|
"grad_norm": 0.8338175415992737, |
|
"learning_rate": 4.5403941627802016e-05, |
|
"loss": 0.5088, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.5584890333062551, |
|
"grad_norm": 12.653846740722656, |
|
"learning_rate": 4.5215886866255456e-05, |
|
"loss": 0.7058, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.5686433793663688, |
|
"grad_norm": 12.017010688781738, |
|
"learning_rate": 4.5027832104708895e-05, |
|
"loss": 0.4224, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.5787977254264826, |
|
"grad_norm": 19.048152923583984, |
|
"learning_rate": 4.483977734316233e-05, |
|
"loss": 0.7842, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.5889520714865962, |
|
"grad_norm": 7.068014621734619, |
|
"learning_rate": 4.465172258161577e-05, |
|
"loss": 0.68, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.59910641754671, |
|
"grad_norm": 10.340466499328613, |
|
"learning_rate": 4.446366782006921e-05, |
|
"loss": 0.8232, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.6092607636068237, |
|
"grad_norm": 23.22475814819336, |
|
"learning_rate": 4.427561305852264e-05, |
|
"loss": 0.5502, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.6194151096669375, |
|
"grad_norm": 1.5949617624282837, |
|
"learning_rate": 4.408755829697609e-05, |
|
"loss": 0.5688, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.6295694557270511, |
|
"grad_norm": 2.9755280017852783, |
|
"learning_rate": 4.389950353542952e-05, |
|
"loss": 0.6534, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.6397238017871649, |
|
"grad_norm": 3.379225492477417, |
|
"learning_rate": 4.371144877388296e-05, |
|
"loss": 0.7196, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.6498781478472786, |
|
"grad_norm": 9.963435173034668, |
|
"learning_rate": 4.352339401233639e-05, |
|
"loss": 0.6822, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.6600324939073924, |
|
"grad_norm": 7.474037170410156, |
|
"learning_rate": 4.333533925078983e-05, |
|
"loss": 0.7572, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.670186839967506, |
|
"grad_norm": 2.3705031871795654, |
|
"learning_rate": 4.3147284489243265e-05, |
|
"loss": 0.7087, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.6803411860276198, |
|
"grad_norm": 18.592605590820312, |
|
"learning_rate": 4.295922972769671e-05, |
|
"loss": 0.8911, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.6904955320877335, |
|
"grad_norm": 3.978590965270996, |
|
"learning_rate": 4.2771174966150144e-05, |
|
"loss": 0.5713, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.7006498781478473, |
|
"grad_norm": 65.83806610107422, |
|
"learning_rate": 4.2583120204603584e-05, |
|
"loss": 0.6628, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.710804224207961, |
|
"grad_norm": 16.577617645263672, |
|
"learning_rate": 4.2395065443057016e-05, |
|
"loss": 0.8093, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.7209585702680747, |
|
"grad_norm": 14.462265968322754, |
|
"learning_rate": 4.2207010681510456e-05, |
|
"loss": 0.6299, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.7311129163281884, |
|
"grad_norm": 2.203425407409668, |
|
"learning_rate": 4.2018955919963896e-05, |
|
"loss": 0.6891, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.7412672623883022, |
|
"grad_norm": 9.452720642089844, |
|
"learning_rate": 4.1830901158417335e-05, |
|
"loss": 0.7139, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.7514216084484159, |
|
"grad_norm": 5.190005302429199, |
|
"learning_rate": 4.164284639687077e-05, |
|
"loss": 0.6791, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.7615759545085297, |
|
"grad_norm": 31.0141544342041, |
|
"learning_rate": 4.145479163532421e-05, |
|
"loss": 0.8139, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.7717303005686433, |
|
"grad_norm": 2.5944440364837646, |
|
"learning_rate": 4.126673687377765e-05, |
|
"loss": 0.6534, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.7818846466287571, |
|
"grad_norm": 75.8336181640625, |
|
"learning_rate": 4.107868211223108e-05, |
|
"loss": 0.7027, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.7920389926888708, |
|
"grad_norm": 5.533077239990234, |
|
"learning_rate": 4.089062735068453e-05, |
|
"loss": 0.5782, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.8021933387489846, |
|
"grad_norm": 1.3959672451019287, |
|
"learning_rate": 4.070257258913796e-05, |
|
"loss": 0.7102, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.8123476848090982, |
|
"grad_norm": 3.2874767780303955, |
|
"learning_rate": 4.05145178275914e-05, |
|
"loss": 0.3529, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.822502030869212, |
|
"grad_norm": 6.342546463012695, |
|
"learning_rate": 4.032646306604483e-05, |
|
"loss": 0.8077, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.8326563769293257, |
|
"grad_norm": 10.019979476928711, |
|
"learning_rate": 4.013840830449827e-05, |
|
"loss": 0.7253, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.8428107229894395, |
|
"grad_norm": 7.302906513214111, |
|
"learning_rate": 3.9950353542951705e-05, |
|
"loss": 0.707, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.8529650690495532, |
|
"grad_norm": 2.442748546600342, |
|
"learning_rate": 3.976229878140515e-05, |
|
"loss": 0.6455, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.8631194151096669, |
|
"grad_norm": 32.23735809326172, |
|
"learning_rate": 3.9574244019858584e-05, |
|
"loss": 0.5444, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.8732737611697806, |
|
"grad_norm": 32.863529205322266, |
|
"learning_rate": 3.9386189258312024e-05, |
|
"loss": 0.6351, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.8834281072298944, |
|
"grad_norm": 2.3048183917999268, |
|
"learning_rate": 3.919813449676546e-05, |
|
"loss": 0.5735, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.8935824532900081, |
|
"grad_norm": 2.6550726890563965, |
|
"learning_rate": 3.9010079735218896e-05, |
|
"loss": 0.7734, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.9037367993501219, |
|
"grad_norm": 9.991121292114258, |
|
"learning_rate": 3.8822024973672336e-05, |
|
"loss": 0.8157, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.9138911454102355, |
|
"grad_norm": 13.777947425842285, |
|
"learning_rate": 3.8633970212125776e-05, |
|
"loss": 0.5001, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.9240454914703493, |
|
"grad_norm": 21.878284454345703, |
|
"learning_rate": 3.844591545057921e-05, |
|
"loss": 0.7351, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.934199837530463, |
|
"grad_norm": 25.31257438659668, |
|
"learning_rate": 3.825786068903265e-05, |
|
"loss": 0.7697, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.9443541835905768, |
|
"grad_norm": 11.627613067626953, |
|
"learning_rate": 3.806980592748609e-05, |
|
"loss": 0.6896, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.9545085296506904, |
|
"grad_norm": 3.0957629680633545, |
|
"learning_rate": 3.788175116593952e-05, |
|
"loss": 0.7445, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.9646628757108042, |
|
"grad_norm": 0.6913394927978516, |
|
"learning_rate": 3.769369640439296e-05, |
|
"loss": 0.6054, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.974817221770918, |
|
"grad_norm": 18.786376953125, |
|
"learning_rate": 3.75056416428464e-05, |
|
"loss": 0.8017, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.9849715678310317, |
|
"grad_norm": 4.412717342376709, |
|
"learning_rate": 3.731758688129984e-05, |
|
"loss": 0.6515, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.9951259138911455, |
|
"grad_norm": 12.414152145385742, |
|
"learning_rate": 3.712953211975327e-05, |
|
"loss": 0.6862, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.8931388955295243, |
|
"eval_f1_macro": 0.8264839311317734, |
|
"eval_f1_micro": 0.8931388955295243, |
|
"eval_f1_weighted": 0.8989228701567059, |
|
"eval_loss": 0.3588244616985321, |
|
"eval_precision_macro": 0.805949898458524, |
|
"eval_precision_micro": 0.8931388955295243, |
|
"eval_precision_weighted": 0.9113590658232466, |
|
"eval_recall_macro": 0.8824212045028931, |
|
"eval_recall_micro": 0.8931388955295243, |
|
"eval_recall_weighted": 0.8931388955295243, |
|
"eval_runtime": 771.6514, |
|
"eval_samples_per_second": 5.421, |
|
"eval_steps_per_second": 0.34, |
|
"step": 2462 |
|
}, |
|
{ |
|
"epoch": 1.0052802599512591, |
|
"grad_norm": 24.999309539794922, |
|
"learning_rate": 3.694147735820671e-05, |
|
"loss": 0.54, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 1.0154346060113728, |
|
"grad_norm": 8.744691848754883, |
|
"learning_rate": 3.6753422596660145e-05, |
|
"loss": 0.6095, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.0255889520714867, |
|
"grad_norm": 0.3475080132484436, |
|
"learning_rate": 3.6565367835113585e-05, |
|
"loss": 0.5011, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 1.0357432981316004, |
|
"grad_norm": 6.633773326873779, |
|
"learning_rate": 3.6377313073567024e-05, |
|
"loss": 0.5915, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.045897644191714, |
|
"grad_norm": 29.779521942138672, |
|
"learning_rate": 3.6189258312020464e-05, |
|
"loss": 0.5742, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 1.0560519902518277, |
|
"grad_norm": 7.780822277069092, |
|
"learning_rate": 3.60012035504739e-05, |
|
"loss": 0.6241, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.0662063363119416, |
|
"grad_norm": 4.101163864135742, |
|
"learning_rate": 3.5813148788927336e-05, |
|
"loss": 0.6369, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 1.0763606823720553, |
|
"grad_norm": 1.4056626558303833, |
|
"learning_rate": 3.5625094027380776e-05, |
|
"loss": 0.528, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.086515028432169, |
|
"grad_norm": 3.9203994274139404, |
|
"learning_rate": 3.543703926583421e-05, |
|
"loss": 0.5903, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 1.0966693744922826, |
|
"grad_norm": 65.12606811523438, |
|
"learning_rate": 3.5248984504287655e-05, |
|
"loss": 0.5847, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.1068237205523965, |
|
"grad_norm": 0.19117872416973114, |
|
"learning_rate": 3.506092974274109e-05, |
|
"loss": 0.6722, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 1.1169780666125102, |
|
"grad_norm": 20.18349838256836, |
|
"learning_rate": 3.487287498119453e-05, |
|
"loss": 0.6545, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.1271324126726239, |
|
"grad_norm": 6.374608993530273, |
|
"learning_rate": 3.468482021964796e-05, |
|
"loss": 0.5155, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 1.1372867587327375, |
|
"grad_norm": 1.4384276866912842, |
|
"learning_rate": 3.44967654581014e-05, |
|
"loss": 0.5108, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.1474411047928514, |
|
"grad_norm": 2.850290298461914, |
|
"learning_rate": 3.430871069655484e-05, |
|
"loss": 0.698, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 1.1575954508529651, |
|
"grad_norm": 0.23200608789920807, |
|
"learning_rate": 3.412065593500828e-05, |
|
"loss": 0.5569, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.1677497969130788, |
|
"grad_norm": 21.061634063720703, |
|
"learning_rate": 3.393260117346171e-05, |
|
"loss": 0.6227, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 1.1779041429731925, |
|
"grad_norm": 13.794580459594727, |
|
"learning_rate": 3.374454641191515e-05, |
|
"loss": 0.5611, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.1880584890333061, |
|
"grad_norm": 2.9508566856384277, |
|
"learning_rate": 3.3556491650368585e-05, |
|
"loss": 0.5975, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 1.19821283509342, |
|
"grad_norm": 6.502300262451172, |
|
"learning_rate": 3.3368436888822025e-05, |
|
"loss": 0.5307, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.2083671811535337, |
|
"grad_norm": 0.9668971300125122, |
|
"learning_rate": 3.3180382127275464e-05, |
|
"loss": 0.3585, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 1.2185215272136474, |
|
"grad_norm": 87.11711883544922, |
|
"learning_rate": 3.2992327365728904e-05, |
|
"loss": 0.5552, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.2286758732737613, |
|
"grad_norm": 46.89622116088867, |
|
"learning_rate": 3.280427260418234e-05, |
|
"loss": 0.5145, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 1.238830219333875, |
|
"grad_norm": 8.770398139953613, |
|
"learning_rate": 3.261621784263578e-05, |
|
"loss": 0.6135, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.2489845653939886, |
|
"grad_norm": 50.35541915893555, |
|
"learning_rate": 3.2428163081089216e-05, |
|
"loss": 0.5482, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 1.2591389114541023, |
|
"grad_norm": 4.785242557525635, |
|
"learning_rate": 3.224010831954265e-05, |
|
"loss": 0.5847, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.269293257514216, |
|
"grad_norm": 78.41075134277344, |
|
"learning_rate": 3.2052053557996096e-05, |
|
"loss": 0.7707, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 1.2794476035743299, |
|
"grad_norm": 15.466938018798828, |
|
"learning_rate": 3.186399879644953e-05, |
|
"loss": 0.533, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.2896019496344435, |
|
"grad_norm": 54.91196060180664, |
|
"learning_rate": 3.167594403490297e-05, |
|
"loss": 0.3996, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 1.2997562956945572, |
|
"grad_norm": 6.946578502655029, |
|
"learning_rate": 3.14878892733564e-05, |
|
"loss": 0.5956, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.309910641754671, |
|
"grad_norm": 46.90751647949219, |
|
"learning_rate": 3.129983451180984e-05, |
|
"loss": 0.5211, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 1.3200649878147848, |
|
"grad_norm": 1.6958080530166626, |
|
"learning_rate": 3.1111779750263274e-05, |
|
"loss": 0.7166, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.3302193338748984, |
|
"grad_norm": 15.985214233398438, |
|
"learning_rate": 3.092372498871672e-05, |
|
"loss": 0.6351, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 1.340373679935012, |
|
"grad_norm": 13.915630340576172, |
|
"learning_rate": 3.073567022717015e-05, |
|
"loss": 0.5382, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.3505280259951258, |
|
"grad_norm": 5.265201568603516, |
|
"learning_rate": 3.054761546562359e-05, |
|
"loss": 0.5942, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 1.3606823720552397, |
|
"grad_norm": 15.926793098449707, |
|
"learning_rate": 3.035956070407703e-05, |
|
"loss": 0.5539, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.3708367181153533, |
|
"grad_norm": 0.9688799381256104, |
|
"learning_rate": 3.0171505942530465e-05, |
|
"loss": 0.5066, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 1.380991064175467, |
|
"grad_norm": 3.8147382736206055, |
|
"learning_rate": 2.99834511809839e-05, |
|
"loss": 0.6402, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.391145410235581, |
|
"grad_norm": 54.062618255615234, |
|
"learning_rate": 2.9795396419437344e-05, |
|
"loss": 0.6448, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 1.4012997562956946, |
|
"grad_norm": 2.0628786087036133, |
|
"learning_rate": 2.960734165789078e-05, |
|
"loss": 0.7356, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.4114541023558083, |
|
"grad_norm": 23.24148178100586, |
|
"learning_rate": 2.9419286896344217e-05, |
|
"loss": 0.6129, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 1.421608448415922, |
|
"grad_norm": 61.582332611083984, |
|
"learning_rate": 2.9231232134797653e-05, |
|
"loss": 0.6045, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.4317627944760356, |
|
"grad_norm": 21.00168228149414, |
|
"learning_rate": 2.904317737325109e-05, |
|
"loss": 0.6363, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 1.4419171405361495, |
|
"grad_norm": 3.503591299057007, |
|
"learning_rate": 2.8855122611704526e-05, |
|
"loss": 0.5809, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.4520714865962632, |
|
"grad_norm": 39.078346252441406, |
|
"learning_rate": 2.866706785015797e-05, |
|
"loss": 0.6195, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 1.4622258326563768, |
|
"grad_norm": 5.405745983123779, |
|
"learning_rate": 2.8479013088611405e-05, |
|
"loss": 0.5682, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.4723801787164907, |
|
"grad_norm": 2.4771933555603027, |
|
"learning_rate": 2.829095832706484e-05, |
|
"loss": 0.7841, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 1.4825345247766044, |
|
"grad_norm": 39.39826202392578, |
|
"learning_rate": 2.810290356551828e-05, |
|
"loss": 0.6259, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.492688870836718, |
|
"grad_norm": 15.72121810913086, |
|
"learning_rate": 2.7914848803971717e-05, |
|
"loss": 0.5468, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 1.502843216896832, |
|
"grad_norm": 22.185754776000977, |
|
"learning_rate": 2.7726794042425157e-05, |
|
"loss": 0.4637, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.5129975629569454, |
|
"grad_norm": 3.0825910568237305, |
|
"learning_rate": 2.7538739280878596e-05, |
|
"loss": 0.5223, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 1.5231519090170593, |
|
"grad_norm": 5.231273174285889, |
|
"learning_rate": 2.7350684519332033e-05, |
|
"loss": 0.6151, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.533306255077173, |
|
"grad_norm": 0.22756709158420563, |
|
"learning_rate": 2.716262975778547e-05, |
|
"loss": 0.6018, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 1.5434606011372867, |
|
"grad_norm": 0.42972368001937866, |
|
"learning_rate": 2.6974574996238905e-05, |
|
"loss": 0.5058, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.5536149471974006, |
|
"grad_norm": 21.145174026489258, |
|
"learning_rate": 2.678652023469234e-05, |
|
"loss": 0.7302, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 1.5637692932575142, |
|
"grad_norm": 2.556708812713623, |
|
"learning_rate": 2.6598465473145784e-05, |
|
"loss": 0.4961, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.573923639317628, |
|
"grad_norm": 24.811382293701172, |
|
"learning_rate": 2.641041071159922e-05, |
|
"loss": 0.5132, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 1.5840779853777418, |
|
"grad_norm": 2.887291669845581, |
|
"learning_rate": 2.6222355950052657e-05, |
|
"loss": 0.6373, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.5942323314378553, |
|
"grad_norm": 3.0445516109466553, |
|
"learning_rate": 2.6034301188506093e-05, |
|
"loss": 0.5825, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 1.6043866774979691, |
|
"grad_norm": 6.381601333618164, |
|
"learning_rate": 2.584624642695953e-05, |
|
"loss": 0.6217, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.6145410235580828, |
|
"grad_norm": 2.4623990058898926, |
|
"learning_rate": 2.5658191665412966e-05, |
|
"loss": 0.6727, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 1.6246953696181965, |
|
"grad_norm": 21.089733123779297, |
|
"learning_rate": 2.547013690386641e-05, |
|
"loss": 0.5148, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.6348497156783104, |
|
"grad_norm": 1.0210952758789062, |
|
"learning_rate": 2.5282082142319845e-05, |
|
"loss": 0.5116, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 1.645004061738424, |
|
"grad_norm": 6.323028564453125, |
|
"learning_rate": 2.509402738077328e-05, |
|
"loss": 0.4013, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 1.6551584077985377, |
|
"grad_norm": 15.812409400939941, |
|
"learning_rate": 2.490597261922672e-05, |
|
"loss": 0.4226, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 1.6653127538586516, |
|
"grad_norm": 4.813828945159912, |
|
"learning_rate": 2.471791785768016e-05, |
|
"loss": 0.5652, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.675467099918765, |
|
"grad_norm": 0.253533273935318, |
|
"learning_rate": 2.4529863096133597e-05, |
|
"loss": 0.5531, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 1.685621445978879, |
|
"grad_norm": 0.4044888913631439, |
|
"learning_rate": 2.4341808334587033e-05, |
|
"loss": 0.494, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 1.6957757920389926, |
|
"grad_norm": 3.0946457386016846, |
|
"learning_rate": 2.4153753573040473e-05, |
|
"loss": 0.6151, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 1.7059301380991063, |
|
"grad_norm": 1.8007519245147705, |
|
"learning_rate": 2.396569881149391e-05, |
|
"loss": 0.7379, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.7160844841592202, |
|
"grad_norm": 7.954397201538086, |
|
"learning_rate": 2.3777644049947345e-05, |
|
"loss": 0.5232, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 1.7262388302193339, |
|
"grad_norm": 1.1328774690628052, |
|
"learning_rate": 2.3589589288400785e-05, |
|
"loss": 0.5858, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 1.7363931762794476, |
|
"grad_norm": 22.545045852661133, |
|
"learning_rate": 2.340153452685422e-05, |
|
"loss": 0.5951, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 1.7465475223395615, |
|
"grad_norm": 8.525069236755371, |
|
"learning_rate": 2.3213479765307658e-05, |
|
"loss": 0.4659, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.756701868399675, |
|
"grad_norm": 2.1010477542877197, |
|
"learning_rate": 2.3025425003761097e-05, |
|
"loss": 0.4882, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 1.7668562144597888, |
|
"grad_norm": 10.944445610046387, |
|
"learning_rate": 2.2837370242214533e-05, |
|
"loss": 0.5209, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 1.7770105605199025, |
|
"grad_norm": 34.85451889038086, |
|
"learning_rate": 2.264931548066797e-05, |
|
"loss": 0.6565, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 1.7871649065800161, |
|
"grad_norm": 11.516502380371094, |
|
"learning_rate": 2.246126071912141e-05, |
|
"loss": 0.6836, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.79731925264013, |
|
"grad_norm": 1.427713394165039, |
|
"learning_rate": 2.2273205957574846e-05, |
|
"loss": 0.5473, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 1.8074735987002437, |
|
"grad_norm": 22.646167755126953, |
|
"learning_rate": 2.2085151196028285e-05, |
|
"loss": 0.6735, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 1.8176279447603574, |
|
"grad_norm": 2.9643514156341553, |
|
"learning_rate": 2.189709643448172e-05, |
|
"loss": 0.4089, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 1.8277822908204713, |
|
"grad_norm": 5.935336589813232, |
|
"learning_rate": 2.170904167293516e-05, |
|
"loss": 0.6947, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.8379366368805847, |
|
"grad_norm": 5.152228832244873, |
|
"learning_rate": 2.1520986911388597e-05, |
|
"loss": 0.4509, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 1.8480909829406986, |
|
"grad_norm": 13.407612800598145, |
|
"learning_rate": 2.1332932149842037e-05, |
|
"loss": 0.5937, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 1.8582453290008123, |
|
"grad_norm": 3.732926368713379, |
|
"learning_rate": 2.1144877388295473e-05, |
|
"loss": 0.5812, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 1.868399675060926, |
|
"grad_norm": 10.97332763671875, |
|
"learning_rate": 2.095682262674891e-05, |
|
"loss": 0.4815, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 1.8785540211210399, |
|
"grad_norm": 0.9578274488449097, |
|
"learning_rate": 2.076876786520235e-05, |
|
"loss": 0.4805, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 1.8887083671811535, |
|
"grad_norm": 4.262126445770264, |
|
"learning_rate": 2.0580713103655786e-05, |
|
"loss": 0.6214, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 1.8988627132412672, |
|
"grad_norm": 2.12185001373291, |
|
"learning_rate": 2.0392658342109222e-05, |
|
"loss": 0.4471, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 1.909017059301381, |
|
"grad_norm": 2.6735076904296875, |
|
"learning_rate": 2.020460358056266e-05, |
|
"loss": 0.427, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 1.9191714053614946, |
|
"grad_norm": 18.710649490356445, |
|
"learning_rate": 2.0016548819016098e-05, |
|
"loss": 0.6322, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 1.9293257514216084, |
|
"grad_norm": 8.641931533813477, |
|
"learning_rate": 1.9828494057469534e-05, |
|
"loss": 0.5399, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 1.9394800974817223, |
|
"grad_norm": 3.205247640609741, |
|
"learning_rate": 1.9640439295922974e-05, |
|
"loss": 0.5887, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 1.9496344435418358, |
|
"grad_norm": 1.3619911670684814, |
|
"learning_rate": 1.945238453437641e-05, |
|
"loss": 0.5996, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 1.9597887896019497, |
|
"grad_norm": 6.821827411651611, |
|
"learning_rate": 1.9264329772829846e-05, |
|
"loss": 0.4801, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 1.9699431356620634, |
|
"grad_norm": 2.5747969150543213, |
|
"learning_rate": 1.9076275011283286e-05, |
|
"loss": 0.5523, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 1.980097481722177, |
|
"grad_norm": 13.470471382141113, |
|
"learning_rate": 1.8888220249736725e-05, |
|
"loss": 0.5534, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 1.990251827782291, |
|
"grad_norm": 12.533121109008789, |
|
"learning_rate": 1.8700165488190162e-05, |
|
"loss": 0.6055, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9266076978245279, |
|
"eval_f1_macro": 0.9119749098386866, |
|
"eval_f1_micro": 0.9266076978245279, |
|
"eval_f1_weighted": 0.926625315413396, |
|
"eval_loss": 0.2657528519630432, |
|
"eval_precision_macro": 0.928091787369496, |
|
"eval_precision_micro": 0.9266076978245279, |
|
"eval_precision_weighted": 0.9272787083264409, |
|
"eval_recall_macro": 0.8972230372979126, |
|
"eval_recall_micro": 0.9266076978245279, |
|
"eval_recall_weighted": 0.9266076978245279, |
|
"eval_runtime": 805.7166, |
|
"eval_samples_per_second": 5.192, |
|
"eval_steps_per_second": 0.325, |
|
"step": 4924 |
|
}, |
|
{ |
|
"epoch": 2.0004061738424044, |
|
"grad_norm": 0.43904566764831543, |
|
"learning_rate": 1.85121107266436e-05, |
|
"loss": 0.5567, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 2.0105605199025183, |
|
"grad_norm": 25.5123291015625, |
|
"learning_rate": 1.8324055965097038e-05, |
|
"loss": 0.4346, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 2.020714865962632, |
|
"grad_norm": 15.89987850189209, |
|
"learning_rate": 1.8136001203550474e-05, |
|
"loss": 0.5333, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 2.0308692120227456, |
|
"grad_norm": 2.8034896850585938, |
|
"learning_rate": 1.7947946442003914e-05, |
|
"loss": 0.4765, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.0410235580828595, |
|
"grad_norm": 2.8935348987579346, |
|
"learning_rate": 1.775989168045735e-05, |
|
"loss": 0.4422, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 2.0511779041429734, |
|
"grad_norm": 8.14145565032959, |
|
"learning_rate": 1.757183691891079e-05, |
|
"loss": 0.5282, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 2.061332250203087, |
|
"grad_norm": 6.452521800994873, |
|
"learning_rate": 1.7383782157364226e-05, |
|
"loss": 0.4882, |
|
"step": 5075 |
|
}, |
|
{ |
|
"epoch": 2.0714865962632008, |
|
"grad_norm": 3.1078271865844727, |
|
"learning_rate": 1.7195727395817662e-05, |
|
"loss": 0.4243, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 2.081640942323314, |
|
"grad_norm": 4.052325248718262, |
|
"learning_rate": 1.70076726342711e-05, |
|
"loss": 0.3915, |
|
"step": 5125 |
|
}, |
|
{ |
|
"epoch": 2.091795288383428, |
|
"grad_norm": 0.33482909202575684, |
|
"learning_rate": 1.6819617872724538e-05, |
|
"loss": 0.4279, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 2.101949634443542, |
|
"grad_norm": 0.27442964911460876, |
|
"learning_rate": 1.6631563111177974e-05, |
|
"loss": 0.4043, |
|
"step": 5175 |
|
}, |
|
{ |
|
"epoch": 2.1121039805036554, |
|
"grad_norm": 0.5872493982315063, |
|
"learning_rate": 1.6443508349631414e-05, |
|
"loss": 0.5726, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 2.1222583265637693, |
|
"grad_norm": 2.22892689704895, |
|
"learning_rate": 1.625545358808485e-05, |
|
"loss": 0.52, |
|
"step": 5225 |
|
}, |
|
{ |
|
"epoch": 2.1324126726238832, |
|
"grad_norm": 2.56426739692688, |
|
"learning_rate": 1.6067398826538286e-05, |
|
"loss": 0.4345, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 2.1425670186839967, |
|
"grad_norm": 13.771442413330078, |
|
"learning_rate": 1.5879344064991726e-05, |
|
"loss": 0.4729, |
|
"step": 5275 |
|
}, |
|
{ |
|
"epoch": 2.1527213647441106, |
|
"grad_norm": 0.1530083268880844, |
|
"learning_rate": 1.5691289303445166e-05, |
|
"loss": 0.446, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 2.162875710804224, |
|
"grad_norm": 2.819397211074829, |
|
"learning_rate": 1.5503234541898602e-05, |
|
"loss": 0.7085, |
|
"step": 5325 |
|
}, |
|
{ |
|
"epoch": 2.173030056864338, |
|
"grad_norm": 12.847749710083008, |
|
"learning_rate": 1.531517978035204e-05, |
|
"loss": 0.6421, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 2.183184402924452, |
|
"grad_norm": 9.512947082519531, |
|
"learning_rate": 1.5127125018805476e-05, |
|
"loss": 0.535, |
|
"step": 5375 |
|
}, |
|
{ |
|
"epoch": 2.1933387489845653, |
|
"grad_norm": 27.62982749938965, |
|
"learning_rate": 1.4939070257258914e-05, |
|
"loss": 0.5626, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 2.203493095044679, |
|
"grad_norm": 10.81323528289795, |
|
"learning_rate": 1.4751015495712354e-05, |
|
"loss": 0.4339, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 2.213647441104793, |
|
"grad_norm": 3.875645637512207, |
|
"learning_rate": 1.456296073416579e-05, |
|
"loss": 0.4962, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 2.2238017871649065, |
|
"grad_norm": 0.1911429613828659, |
|
"learning_rate": 1.4374905972619226e-05, |
|
"loss": 0.5207, |
|
"step": 5475 |
|
}, |
|
{ |
|
"epoch": 2.2339561332250204, |
|
"grad_norm": 96.48568725585938, |
|
"learning_rate": 1.4186851211072666e-05, |
|
"loss": 0.5451, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 2.244110479285134, |
|
"grad_norm": 2.502739191055298, |
|
"learning_rate": 1.3998796449526102e-05, |
|
"loss": 0.4988, |
|
"step": 5525 |
|
}, |
|
{ |
|
"epoch": 2.2542648253452477, |
|
"grad_norm": 5.941136360168457, |
|
"learning_rate": 1.3810741687979538e-05, |
|
"loss": 0.4889, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 2.2644191714053616, |
|
"grad_norm": 20.13005828857422, |
|
"learning_rate": 1.3622686926432978e-05, |
|
"loss": 0.5544, |
|
"step": 5575 |
|
}, |
|
{ |
|
"epoch": 2.274573517465475, |
|
"grad_norm": 5.842233657836914, |
|
"learning_rate": 1.3434632164886416e-05, |
|
"loss": 0.5751, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 2.284727863525589, |
|
"grad_norm": 11.564367294311523, |
|
"learning_rate": 1.3246577403339852e-05, |
|
"loss": 0.5099, |
|
"step": 5625 |
|
}, |
|
{ |
|
"epoch": 2.294882209585703, |
|
"grad_norm": 0.15794846415519714, |
|
"learning_rate": 1.3058522641793292e-05, |
|
"loss": 0.4425, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 2.3050365556458163, |
|
"grad_norm": 4.0853142738342285, |
|
"learning_rate": 1.2870467880246728e-05, |
|
"loss": 0.3685, |
|
"step": 5675 |
|
}, |
|
{ |
|
"epoch": 2.3151909017059302, |
|
"grad_norm": 9.024371147155762, |
|
"learning_rate": 1.2682413118700164e-05, |
|
"loss": 0.4964, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 2.3253452477660437, |
|
"grad_norm": 15.076045989990234, |
|
"learning_rate": 1.2494358357153604e-05, |
|
"loss": 0.5681, |
|
"step": 5725 |
|
}, |
|
{ |
|
"epoch": 2.3354995938261576, |
|
"grad_norm": 0.19159801304340363, |
|
"learning_rate": 1.230630359560704e-05, |
|
"loss": 0.3851, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 2.3456539398862715, |
|
"grad_norm": 54.02906036376953, |
|
"learning_rate": 1.2118248834060478e-05, |
|
"loss": 0.5483, |
|
"step": 5775 |
|
}, |
|
{ |
|
"epoch": 2.355808285946385, |
|
"grad_norm": 7.049926280975342, |
|
"learning_rate": 1.1930194072513918e-05, |
|
"loss": 0.5981, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 2.365962632006499, |
|
"grad_norm": 16.41009521484375, |
|
"learning_rate": 1.1742139310967354e-05, |
|
"loss": 0.4977, |
|
"step": 5825 |
|
}, |
|
{ |
|
"epoch": 2.3761169780666123, |
|
"grad_norm": 0.19413629174232483, |
|
"learning_rate": 1.1554084549420792e-05, |
|
"loss": 0.5617, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 2.386271324126726, |
|
"grad_norm": 1.1825770139694214, |
|
"learning_rate": 1.136602978787423e-05, |
|
"loss": 0.4328, |
|
"step": 5875 |
|
}, |
|
{ |
|
"epoch": 2.39642567018684, |
|
"grad_norm": 8.703160285949707, |
|
"learning_rate": 1.1177975026327666e-05, |
|
"loss": 0.4351, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 2.406580016246954, |
|
"grad_norm": 3.950902223587036, |
|
"learning_rate": 1.0989920264781104e-05, |
|
"loss": 0.4248, |
|
"step": 5925 |
|
}, |
|
{ |
|
"epoch": 2.4167343623070674, |
|
"grad_norm": 2.6033504009246826, |
|
"learning_rate": 1.0801865503234542e-05, |
|
"loss": 0.4334, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 2.4268887083671813, |
|
"grad_norm": 0.4781394600868225, |
|
"learning_rate": 1.061381074168798e-05, |
|
"loss": 0.4309, |
|
"step": 5975 |
|
}, |
|
{ |
|
"epoch": 2.4370430544272947, |
|
"grad_norm": 1.0080628395080566, |
|
"learning_rate": 1.0425755980141418e-05, |
|
"loss": 0.4494, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.4471974004874086, |
|
"grad_norm": 11.38261890411377, |
|
"learning_rate": 1.0237701218594856e-05, |
|
"loss": 0.5053, |
|
"step": 6025 |
|
}, |
|
{ |
|
"epoch": 2.4573517465475225, |
|
"grad_norm": 1.4227663278579712, |
|
"learning_rate": 1.0049646457048292e-05, |
|
"loss": 0.4257, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 2.467506092607636, |
|
"grad_norm": 0.9921310544013977, |
|
"learning_rate": 9.86159169550173e-06, |
|
"loss": 0.5595, |
|
"step": 6075 |
|
}, |
|
{ |
|
"epoch": 2.47766043866775, |
|
"grad_norm": 14.428972244262695, |
|
"learning_rate": 9.673536933955168e-06, |
|
"loss": 0.3958, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 2.4878147847278633, |
|
"grad_norm": 1.1158331632614136, |
|
"learning_rate": 9.485482172408605e-06, |
|
"loss": 0.4468, |
|
"step": 6125 |
|
}, |
|
{ |
|
"epoch": 2.497969130787977, |
|
"grad_norm": 0.15840868651866913, |
|
"learning_rate": 9.297427410862043e-06, |
|
"loss": 0.4436, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 2.508123476848091, |
|
"grad_norm": 3.016507387161255, |
|
"learning_rate": 9.10937264931548e-06, |
|
"loss": 0.5268, |
|
"step": 6175 |
|
}, |
|
{ |
|
"epoch": 2.5182778229082046, |
|
"grad_norm": 0.9153080582618713, |
|
"learning_rate": 8.921317887768919e-06, |
|
"loss": 0.5499, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 2.5284321689683185, |
|
"grad_norm": 2.3704733848571777, |
|
"learning_rate": 8.733263126222356e-06, |
|
"loss": 0.4143, |
|
"step": 6225 |
|
}, |
|
{ |
|
"epoch": 2.538586515028432, |
|
"grad_norm": 2.596069574356079, |
|
"learning_rate": 8.545208364675794e-06, |
|
"loss": 0.388, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 2.548740861088546, |
|
"grad_norm": 14.82557487487793, |
|
"learning_rate": 8.357153603129232e-06, |
|
"loss": 0.4274, |
|
"step": 6275 |
|
}, |
|
{ |
|
"epoch": 2.5588952071486597, |
|
"grad_norm": 1.2737632989883423, |
|
"learning_rate": 8.169098841582669e-06, |
|
"loss": 0.3534, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 2.5690495532087736, |
|
"grad_norm": 18.806808471679688, |
|
"learning_rate": 7.981044080036107e-06, |
|
"loss": 0.6733, |
|
"step": 6325 |
|
}, |
|
{ |
|
"epoch": 2.579203899268887, |
|
"grad_norm": 21.513843536376953, |
|
"learning_rate": 7.792989318489545e-06, |
|
"loss": 0.4451, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 2.589358245329001, |
|
"grad_norm": 4.2879815101623535, |
|
"learning_rate": 7.604934556942982e-06, |
|
"loss": 0.545, |
|
"step": 6375 |
|
}, |
|
{ |
|
"epoch": 2.5995125913891144, |
|
"grad_norm": 2.1117591857910156, |
|
"learning_rate": 7.41687979539642e-06, |
|
"loss": 0.4982, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 2.6096669374492283, |
|
"grad_norm": 25.265079498291016, |
|
"learning_rate": 7.228825033849858e-06, |
|
"loss": 0.4269, |
|
"step": 6425 |
|
}, |
|
{ |
|
"epoch": 2.619821283509342, |
|
"grad_norm": 0.9816861152648926, |
|
"learning_rate": 7.040770272303295e-06, |
|
"loss": 0.4884, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 2.6299756295694556, |
|
"grad_norm": 2.7001571655273438, |
|
"learning_rate": 6.852715510756733e-06, |
|
"loss": 0.6128, |
|
"step": 6475 |
|
}, |
|
{ |
|
"epoch": 2.6401299756295695, |
|
"grad_norm": 17.872499465942383, |
|
"learning_rate": 6.664660749210171e-06, |
|
"loss": 0.4402, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 2.650284321689683, |
|
"grad_norm": 3.858314275741577, |
|
"learning_rate": 6.476605987663608e-06, |
|
"loss": 0.5872, |
|
"step": 6525 |
|
}, |
|
{ |
|
"epoch": 2.660438667749797, |
|
"grad_norm": 16.253244400024414, |
|
"learning_rate": 6.288551226117046e-06, |
|
"loss": 0.5678, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 2.6705930138099108, |
|
"grad_norm": 0.6387231349945068, |
|
"learning_rate": 6.100496464570484e-06, |
|
"loss": 0.383, |
|
"step": 6575 |
|
}, |
|
{ |
|
"epoch": 2.680747359870024, |
|
"grad_norm": 27.796384811401367, |
|
"learning_rate": 5.912441703023921e-06, |
|
"loss": 0.4691, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 2.690901705930138, |
|
"grad_norm": 1.4365334510803223, |
|
"learning_rate": 5.724386941477358e-06, |
|
"loss": 0.4579, |
|
"step": 6625 |
|
}, |
|
{ |
|
"epoch": 2.7010560519902516, |
|
"grad_norm": 6.362787246704102, |
|
"learning_rate": 5.536332179930797e-06, |
|
"loss": 0.5751, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 2.7112103980503655, |
|
"grad_norm": 5.281644344329834, |
|
"learning_rate": 5.348277418384234e-06, |
|
"loss": 0.5035, |
|
"step": 6675 |
|
}, |
|
{ |
|
"epoch": 2.7213647441104794, |
|
"grad_norm": 2.8617539405822754, |
|
"learning_rate": 5.160222656837671e-06, |
|
"loss": 0.373, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 2.7315190901705932, |
|
"grad_norm": 6.853511810302734, |
|
"learning_rate": 4.972167895291109e-06, |
|
"loss": 0.4706, |
|
"step": 6725 |
|
}, |
|
{ |
|
"epoch": 2.7416734362307067, |
|
"grad_norm": 2.8362090587615967, |
|
"learning_rate": 4.784113133744547e-06, |
|
"loss": 0.6053, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 2.7518277822908206, |
|
"grad_norm": 6.8812031745910645, |
|
"learning_rate": 4.596058372197985e-06, |
|
"loss": 0.433, |
|
"step": 6775 |
|
}, |
|
{ |
|
"epoch": 2.761982128350934, |
|
"grad_norm": 0.7493718862533569, |
|
"learning_rate": 4.408003610651422e-06, |
|
"loss": 0.6035, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 2.772136474411048, |
|
"grad_norm": 12.810718536376953, |
|
"learning_rate": 4.219948849104859e-06, |
|
"loss": 0.4921, |
|
"step": 6825 |
|
}, |
|
{ |
|
"epoch": 2.782290820471162, |
|
"grad_norm": 4.376484394073486, |
|
"learning_rate": 4.031894087558298e-06, |
|
"loss": 0.453, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 2.7924451665312753, |
|
"grad_norm": 5.702888011932373, |
|
"learning_rate": 3.843839326011735e-06, |
|
"loss": 0.487, |
|
"step": 6875 |
|
}, |
|
{ |
|
"epoch": 2.802599512591389, |
|
"grad_norm": 1.284523844718933, |
|
"learning_rate": 3.655784564465172e-06, |
|
"loss": 0.4004, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 2.8127538586515026, |
|
"grad_norm": 43.9447135925293, |
|
"learning_rate": 3.4677298029186104e-06, |
|
"loss": 0.5632, |
|
"step": 6925 |
|
}, |
|
{ |
|
"epoch": 2.8229082047116165, |
|
"grad_norm": 1.2876818180084229, |
|
"learning_rate": 3.2796750413720475e-06, |
|
"loss": 0.3177, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 2.8330625507717304, |
|
"grad_norm": 0.49143844842910767, |
|
"learning_rate": 3.0916202798254854e-06, |
|
"loss": 0.4961, |
|
"step": 6975 |
|
}, |
|
{ |
|
"epoch": 2.843216896831844, |
|
"grad_norm": 9.238175392150879, |
|
"learning_rate": 2.903565518278923e-06, |
|
"loss": 0.4309, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.8533712428919578, |
|
"grad_norm": 1.5201661586761475, |
|
"learning_rate": 2.715510756732361e-06, |
|
"loss": 0.4751, |
|
"step": 7025 |
|
}, |
|
{ |
|
"epoch": 2.863525588952071, |
|
"grad_norm": 0.13578936457633972, |
|
"learning_rate": 2.527455995185798e-06, |
|
"loss": 0.34, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 2.873679935012185, |
|
"grad_norm": 12.835733413696289, |
|
"learning_rate": 2.339401233639236e-06, |
|
"loss": 0.2968, |
|
"step": 7075 |
|
}, |
|
{ |
|
"epoch": 2.883834281072299, |
|
"grad_norm": 0.2961287796497345, |
|
"learning_rate": 2.1513464720926735e-06, |
|
"loss": 0.4396, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 2.893988627132413, |
|
"grad_norm": 12.266148567199707, |
|
"learning_rate": 1.9632917105461115e-06, |
|
"loss": 0.5608, |
|
"step": 7125 |
|
}, |
|
{ |
|
"epoch": 2.9041429731925263, |
|
"grad_norm": 13.638496398925781, |
|
"learning_rate": 1.7752369489995486e-06, |
|
"loss": 0.4852, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 2.9142973192526402, |
|
"grad_norm": 8.737442016601562, |
|
"learning_rate": 1.5871821874529863e-06, |
|
"loss": 0.5331, |
|
"step": 7175 |
|
}, |
|
{ |
|
"epoch": 2.9244516653127537, |
|
"grad_norm": 1.0912017822265625, |
|
"learning_rate": 1.399127425906424e-06, |
|
"loss": 0.4314, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 2.9346060113728676, |
|
"grad_norm": 4.351292610168457, |
|
"learning_rate": 1.2110726643598616e-06, |
|
"loss": 0.5376, |
|
"step": 7225 |
|
}, |
|
{ |
|
"epoch": 2.9447603574329815, |
|
"grad_norm": 6.2538065910339355, |
|
"learning_rate": 1.0230179028132994e-06, |
|
"loss": 0.4093, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 2.954914703493095, |
|
"grad_norm": 5.2288126945495605, |
|
"learning_rate": 8.349631412667369e-07, |
|
"loss": 0.7437, |
|
"step": 7275 |
|
}, |
|
{ |
|
"epoch": 2.965069049553209, |
|
"grad_norm": 1.296960711479187, |
|
"learning_rate": 6.469083797201745e-07, |
|
"loss": 0.396, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 2.9752233956133223, |
|
"grad_norm": 5.880867958068848, |
|
"learning_rate": 4.5885361817361217e-07, |
|
"loss": 0.4873, |
|
"step": 7325 |
|
}, |
|
{ |
|
"epoch": 2.985377741673436, |
|
"grad_norm": 2.8329901695251465, |
|
"learning_rate": 2.707988566270498e-07, |
|
"loss": 0.5592, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 2.99553208773355, |
|
"grad_norm": 5.462076663970947, |
|
"learning_rate": 8.274409508048744e-08, |
|
"loss": 0.627, |
|
"step": 7375 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9390389672483863, |
|
"eval_f1_macro": 0.9244631728799089, |
|
"eval_f1_micro": 0.9390389672483863, |
|
"eval_f1_weighted": 0.9389134636561386, |
|
"eval_loss": 0.24498265981674194, |
|
"eval_precision_macro": 0.9458913396195016, |
|
"eval_precision_micro": 0.9390389672483863, |
|
"eval_precision_weighted": 0.9393976940878069, |
|
"eval_recall_macro": 0.9051122705051101, |
|
"eval_recall_micro": 0.9390389672483863, |
|
"eval_recall_weighted": 0.9390389672483863, |
|
"eval_runtime": 773.6973, |
|
"eval_samples_per_second": 5.407, |
|
"eval_steps_per_second": 0.339, |
|
"step": 7386 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 7386, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.01 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3885829395485184.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|