|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.900480377050666, |
|
"eval_steps": 500, |
|
"global_step": 16000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999549488698767e-05, |
|
"loss": 0.9367, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.998198117163414e-05, |
|
"loss": 0.8539, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.995946372440459e-05, |
|
"loss": 0.8563, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.99279506607906e-05, |
|
"loss": 0.8552, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9887453338385195e-05, |
|
"loss": 0.8377, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.98379863527895e-05, |
|
"loss": 0.8346, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9779567532352364e-05, |
|
"loss": 0.8338, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.971221793174483e-05, |
|
"loss": 0.8176, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9635961824371865e-05, |
|
"loss": 0.8297, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.9550826693624e-05, |
|
"loss": 0.8337, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.945684322297207e-05, |
|
"loss": 0.8243, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.935404528490858e-05, |
|
"loss": 0.8289, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.924246992873982e-05, |
|
"loss": 0.818, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.9122157367232894e-05, |
|
"loss": 0.8361, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.899315096212272e-05, |
|
"loss": 0.8055, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.8855497208484046e-05, |
|
"loss": 0.8284, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.8709245717974204e-05, |
|
"loss": 0.8214, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.8554449200952635e-05, |
|
"loss": 0.8243, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.8391163447483587e-05, |
|
"loss": 0.8191, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.821944730722887e-05, |
|
"loss": 0.8106, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.803936266823792e-05, |
|
"loss": 0.8217, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.785097443464276e-05, |
|
"loss": 0.8071, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.765435050326602e-05, |
|
"loss": 0.8164, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.744956173915022e-05, |
|
"loss": 0.8264, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7236681950017445e-05, |
|
"loss": 0.8183, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.701578785966834e-05, |
|
"loss": 0.8035, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.678695908033017e-05, |
|
"loss": 0.8019, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.655027808396385e-05, |
|
"loss": 0.8084, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.6305830172540295e-05, |
|
"loss": 0.8124, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.6053703447296826e-05, |
|
"loss": 0.8206, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.579398877698471e-05, |
|
"loss": 0.8137, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.55267797651192e-05, |
|
"loss": 0.803, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.5252172716244e-05, |
|
"loss": 0.8108, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.4970266601222243e-05, |
|
"loss": 0.8154, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.4681163021566496e-05, |
|
"loss": 0.7998, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.4384966172820644e-05, |
|
"loss": 0.8109, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.40817828070069e-05, |
|
"loss": 0.8102, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.377172219415136e-05, |
|
"loss": 0.8119, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.345489608290216e-05, |
|
"loss": 0.8041, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.313141866025423e-05, |
|
"loss": 0.8149, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.280140651039522e-05, |
|
"loss": 0.8043, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.246497857268759e-05, |
|
"loss": 0.7933, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.2122256098801726e-05, |
|
"loss": 0.8022, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.177336260901578e-05, |
|
"loss": 0.8083, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.1418423847697786e-05, |
|
"loss": 0.8082, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.105756773798633e-05, |
|
"loss": 0.7956, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.0690924335685865e-05, |
|
"loss": 0.8072, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.031862578239338e-05, |
|
"loss": 0.7954, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.994080625787343e-05, |
|
"loss": 0.8025, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.955760193169851e-05, |
|
"loss": 0.796, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.9169150914172324e-05, |
|
"loss": 0.7924, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.8775593206553575e-05, |
|
"loss": 0.8076, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.837707065059823e-05, |
|
"loss": 0.7929, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.79737268774385e-05, |
|
"loss": 0.7966, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.756570725581684e-05, |
|
"loss": 0.812, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.71531588396938e-05, |
|
"loss": 0.7952, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.673623031524838e-05, |
|
"loss": 0.807, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.6315071947290255e-05, |
|
"loss": 0.7919, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.588983552510291e-05, |
|
"loss": 0.8051, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.546067430773747e-05, |
|
"loss": 0.8067, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.50277429687767e-05, |
|
"loss": 0.7982, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.459119754058929e-05, |
|
"loss": 0.7965, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.4151195358094365e-05, |
|
"loss": 0.7829, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.370789500205656e-05, |
|
"loss": 0.7905, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.326593563722561e-05, |
|
"loss": 0.7904, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.281654834858534e-05, |
|
"loss": 0.7972, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.236434390525093e-05, |
|
"loss": 0.7995, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.190948528579214e-05, |
|
"loss": 0.7965, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.1452136425367785e-05, |
|
"loss": 0.7956, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.099246215664205e-05, |
|
"loss": 0.7894, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.0530628150377295e-05, |
|
"loss": 0.8024, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 3.0066800855724798e-05, |
|
"loss": 0.7726, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.960114744023503e-05, |
|
"loss": 0.7881, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.913383572960889e-05, |
|
"loss": 0.7993, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.866503414721184e-05, |
|
"loss": 0.8094, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.8194911653372613e-05, |
|
"loss": 0.7923, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.7723637684488356e-05, |
|
"loss": 0.8058, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.725138209195824e-05, |
|
"loss": 0.7852, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.677831508096747e-05, |
|
"loss": 0.7888, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.630460714914379e-05, |
|
"loss": 0.8021, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5830429025108627e-05, |
|
"loss": 0.7901, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5355951606944913e-05, |
|
"loss": 0.8147, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.488134590060391e-05, |
|
"loss": 0.7986, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.441152781437193e-05, |
|
"loss": 0.8005, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.393717568838824e-05, |
|
"loss": 0.7923, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.346320661389542e-05, |
|
"loss": 0.789, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.2989791413633083e-05, |
|
"loss": 0.7746, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.2517100710719537e-05, |
|
"loss": 0.7861, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.2045304867157723e-05, |
|
"loss": 0.7853, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.157457392243513e-05, |
|
"loss": 0.7903, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.110507753224012e-05, |
|
"loss": 0.7986, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.0636984907316447e-05, |
|
"loss": 0.7961, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.017046475247813e-05, |
|
"loss": 0.7958, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.9705685205806772e-05, |
|
"loss": 0.7866, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.9242813778053085e-05, |
|
"loss": 0.7816, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.8782017292264426e-05, |
|
"loss": 0.7848, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.8323461823660335e-05, |
|
"loss": 0.7935, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.786731263977748e-05, |
|
"loss": 0.7978, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.7413734140905783e-05, |
|
"loss": 0.7936, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.696288980083704e-05, |
|
"loss": 0.7996, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.6514942107947466e-05, |
|
"loss": 0.7798, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.6070052506635487e-05, |
|
"loss": 0.786, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.5628381339135647e-05, |
|
"loss": 0.7912, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.5190087787729873e-05, |
|
"loss": 0.7767, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.4755329817376672e-05, |
|
"loss": 0.7891, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.4324264118779218e-05, |
|
"loss": 0.7885, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.389704605191254e-05, |
|
"loss": 0.7969, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.3473829590030412e-05, |
|
"loss": 0.788, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.3054767264171989e-05, |
|
"loss": 0.7861, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.2640010108188234e-05, |
|
"loss": 0.7864, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.2229707604308002e-05, |
|
"loss": 0.7989, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.1824007629263217e-05, |
|
"loss": 0.7764, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.1423056400992816e-05, |
|
"loss": 0.7934, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.1026998425944449e-05, |
|
"loss": 0.786, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.0635976446993102e-05, |
|
"loss": 0.7922, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.02501313919952e-05, |
|
"loss": 0.7842, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.869602322996985e-06, |
|
"loss": 0.7932, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.494526386115285e-06, |
|
"loss": 0.7754, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 9.125038762108796e-06, |
|
"loss": 0.7921, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 8.76127261765777e-06, |
|
"loss": 0.7935, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 8.403359057369443e-06, |
|
"loss": 0.7857, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 8.05142707652685e-06, |
|
"loss": 0.7884, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 7.705603514597697e-06, |
|
"loss": 0.7896, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 7.366013009520287e-06, |
|
"loss": 0.7823, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 7.032777952782879e-06, |
|
"loss": 0.7743, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 6.7060184453127065e-06, |
|
"loss": 0.7912, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 6.385852254190483e-06, |
|
"loss": 0.7814, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 6.072394770206108e-06, |
|
"loss": 0.7882, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 5.7657589662707655e-06, |
|
"loss": 0.7895, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 5.4660553567005e-06, |
|
"loss": 0.7872, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 5.173391957385812e-06, |
|
"loss": 0.7932, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.887874246861776e-06, |
|
"loss": 0.7979, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.609605128292627e-06, |
|
"loss": 0.7816, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.33868489238452e-06, |
|
"loss": 0.798, |
|
"step": 13400 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.0752111812398825e-06, |
|
"loss": 0.7787, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.819278953166247e-06, |
|
"loss": 0.7929, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.570980448452507e-06, |
|
"loss": 0.7856, |
|
"step": 13700 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.3304051561246193e-06, |
|
"loss": 0.7891, |
|
"step": 13800 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.0976397816929847e-06, |
|
"loss": 0.7871, |
|
"step": 13900 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 2.872768215902963e-06, |
|
"loss": 0.7771, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 2.6558715044999407e-06, |
|
"loss": 0.7852, |
|
"step": 14100 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 2.4470278190196565e-06, |
|
"loss": 0.7848, |
|
"step": 14200 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 2.246312428614519e-06, |
|
"loss": 0.7832, |
|
"step": 14300 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 2.0537976729258834e-06, |
|
"loss": 0.7903, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.8695529360122367e-06, |
|
"loss": 0.7787, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.6936446213425079e-06, |
|
"loss": 0.7867, |
|
"step": 14600 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.5261361278636949e-06, |
|
"loss": 0.7915, |
|
"step": 14700 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.367087827151284e-06, |
|
"loss": 0.7798, |
|
"step": 14800 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.2165570416508038e-06, |
|
"loss": 0.78, |
|
"step": 14900 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.074598024018303e-06, |
|
"loss": 0.7942, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 9.412619375671838e-07, |
|
"loss": 0.7871, |
|
"step": 15100 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 8.165968378284899e-07, |
|
"loss": 0.7874, |
|
"step": 15200 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 7.006476552312597e-07, |
|
"loss": 0.8047, |
|
"step": 15300 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 5.934561789092075e-07, |
|
"loss": 0.7972, |
|
"step": 15400 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.950610416395052e-07, |
|
"loss": 0.7809, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 4.0549770591921944e-07, |
|
"loss": 0.7863, |
|
"step": 15600 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.2479845118428556e-07, |
|
"loss": 0.7823, |
|
"step": 15700 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.529923621757052e-07, |
|
"loss": 0.7795, |
|
"step": 15800 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.901053184571594e-07, |
|
"loss": 0.7921, |
|
"step": 15900 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.361599850877665e-07, |
|
"loss": 0.7846, |
|
"step": 16000 |
|
} |
|
], |
|
"logging_steps": 100, |
|
"max_steps": 16548, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 1000, |
|
"total_flos": 1.443182508167324e+20, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|