|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.997677659080353, |
|
"eval_steps": 500, |
|
"global_step": 6725, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9628252788104095e-06, |
|
"loss": 1.9064, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.925650557620818e-06, |
|
"loss": 1.8717, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.888475836431228e-06, |
|
"loss": 1.863, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.851301115241636e-06, |
|
"loss": 1.8702, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.814126394052045e-06, |
|
"loss": 1.8228, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.776951672862453e-06, |
|
"loss": 1.8228, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.739776951672863e-06, |
|
"loss": 1.8336, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.702602230483272e-06, |
|
"loss": 1.8533, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.665427509293681e-06, |
|
"loss": 1.8284, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.62825278810409e-06, |
|
"loss": 1.7964, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.591078066914498e-06, |
|
"loss": 1.8245, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.553903345724908e-06, |
|
"loss": 1.8342, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.516728624535316e-06, |
|
"loss": 1.8119, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.479553903345725e-06, |
|
"loss": 1.8344, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.442379182156134e-06, |
|
"loss": 1.7995, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.405204460966543e-06, |
|
"loss": 1.8055, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.368029739776952e-06, |
|
"loss": 1.8143, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.330855018587361e-06, |
|
"loss": 1.7934, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.29368029739777e-06, |
|
"loss": 1.8042, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.256505576208178e-06, |
|
"loss": 1.8193, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.219330855018588e-06, |
|
"loss": 1.788, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.182156133828997e-06, |
|
"loss": 1.8064, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.144981412639406e-06, |
|
"loss": 1.8078, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.107806691449814e-06, |
|
"loss": 1.7875, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.070631970260223e-06, |
|
"loss": 1.7841, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.033457249070632e-06, |
|
"loss": 1.8112, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.996282527881041e-06, |
|
"loss": 1.7616, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.9591078066914504e-06, |
|
"loss": 1.7768, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.921933085501859e-06, |
|
"loss": 1.7868, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.884758364312268e-06, |
|
"loss": 1.7582, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.847583643122677e-06, |
|
"loss": 1.765, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.810408921933086e-06, |
|
"loss": 1.745, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.7732342007434947e-06, |
|
"loss": 1.744, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.7360594795539034e-06, |
|
"loss": 1.7699, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.698884758364313e-06, |
|
"loss": 1.7533, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.6617100371747216e-06, |
|
"loss": 1.7777, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 3.6245353159851303e-06, |
|
"loss": 1.7703, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.5873605947955394e-06, |
|
"loss": 1.7683, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.550185873605948e-06, |
|
"loss": 1.7347, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.5130111524163572e-06, |
|
"loss": 1.795, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 3.4758364312267663e-06, |
|
"loss": 1.7617, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.438661710037175e-06, |
|
"loss": 1.779, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.4014869888475837e-06, |
|
"loss": 1.7368, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.3643122676579924e-06, |
|
"loss": 1.7389, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.327137546468402e-06, |
|
"loss": 1.7511, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.2899628252788106e-06, |
|
"loss": 1.7434, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 3.2527881040892197e-06, |
|
"loss": 1.7498, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 3.2156133828996284e-06, |
|
"loss": 1.7628, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.1784386617100375e-06, |
|
"loss": 1.7477, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.1412639405204466e-06, |
|
"loss": 1.7427, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.1040892193308553e-06, |
|
"loss": 1.7498, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.066914498141264e-06, |
|
"loss": 1.7589, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.0297397769516727e-06, |
|
"loss": 1.7384, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 2.9925650557620822e-06, |
|
"loss": 1.7577, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 2.955390334572491e-06, |
|
"loss": 1.7454, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.9182156133829e-06, |
|
"loss": 1.7235, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 2.8810408921933087e-06, |
|
"loss": 1.7398, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.843866171003718e-06, |
|
"loss": 1.762, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.806691449814127e-06, |
|
"loss": 1.7521, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.7695167286245356e-06, |
|
"loss": 1.7414, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.7323420074349443e-06, |
|
"loss": 1.7409, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.695167286245353e-06, |
|
"loss": 1.7334, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.6579925650557625e-06, |
|
"loss": 1.7119, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.6208178438661712e-06, |
|
"loss": 1.7257, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.5836431226765803e-06, |
|
"loss": 1.7319, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.546468401486989e-06, |
|
"loss": 1.7527, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 2.5092936802973977e-06, |
|
"loss": 1.7386, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 2.472118959107807e-06, |
|
"loss": 1.7368, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 2.434944237918216e-06, |
|
"loss": 1.7521, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 2.3977695167286246e-06, |
|
"loss": 1.7251, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 2.3605947955390337e-06, |
|
"loss": 1.7315, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 2.3234200743494424e-06, |
|
"loss": 1.7337, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 2.2862453531598515e-06, |
|
"loss": 1.71, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 2.2490706319702602e-06, |
|
"loss": 1.7086, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.2118959107806693e-06, |
|
"loss": 1.7308, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.1747211895910784e-06, |
|
"loss": 1.7246, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.137546468401487e-06, |
|
"loss": 1.7339, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 2.1003717472118962e-06, |
|
"loss": 1.7404, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.063197026022305e-06, |
|
"loss": 1.7253, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 2.0260223048327136e-06, |
|
"loss": 1.716, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 1.9888475836431227e-06, |
|
"loss": 1.7423, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.951672862453532e-06, |
|
"loss": 1.7203, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 1.9144981412639405e-06, |
|
"loss": 1.7063, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 1.8773234200743496e-06, |
|
"loss": 1.7304, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 1.8401486988847585e-06, |
|
"loss": 1.7125, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 1.8029739776951674e-06, |
|
"loss": 1.7448, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.7657992565055765e-06, |
|
"loss": 1.7127, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.7286245353159852e-06, |
|
"loss": 1.7353, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 1.6914498141263941e-06, |
|
"loss": 1.7128, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 1.6542750929368032e-06, |
|
"loss": 1.7262, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 1.617100371747212e-06, |
|
"loss": 1.7183, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 1.579925650557621e-06, |
|
"loss": 1.6963, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 1.54275092936803e-06, |
|
"loss": 1.7084, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 1.5055762081784388e-06, |
|
"loss": 1.7101, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 1.4684014869888477e-06, |
|
"loss": 1.7223, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 1.4312267657992564e-06, |
|
"loss": 1.7015, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 1.3940520446096655e-06, |
|
"loss": 1.7044, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 1.3568773234200744e-06, |
|
"loss": 1.7352, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.3197026022304835e-06, |
|
"loss": 1.7363, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 1.2825278810408922e-06, |
|
"loss": 1.7145, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.2453531598513011e-06, |
|
"loss": 1.7229, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.20817843866171e-06, |
|
"loss": 1.715, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 1.1710037174721191e-06, |
|
"loss": 1.7354, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 1.133828996282528e-06, |
|
"loss": 1.7074, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 1.096654275092937e-06, |
|
"loss": 1.7159, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.0594795539033458e-06, |
|
"loss": 1.7391, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 1.0223048327137547e-06, |
|
"loss": 1.688, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 9.851301115241636e-07, |
|
"loss": 1.7221, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 9.479553903345725e-07, |
|
"loss": 1.7209, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 9.107806691449815e-07, |
|
"loss": 1.7422, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 8.736059479553904e-07, |
|
"loss": 1.703, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 8.364312267657993e-07, |
|
"loss": 1.7399, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 7.992565055762081e-07, |
|
"loss": 1.7112, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 7.620817843866171e-07, |
|
"loss": 1.7074, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 7.24907063197026e-07, |
|
"loss": 1.7104, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 6.87732342007435e-07, |
|
"loss": 1.7184, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 6.505576208178439e-07, |
|
"loss": 1.7118, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 6.133828996282528e-07, |
|
"loss": 1.6882, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 5.762081784386617e-07, |
|
"loss": 1.6995, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 5.390334572490706e-07, |
|
"loss": 1.694, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 5.018587360594796e-07, |
|
"loss": 1.6957, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 4.646840148698885e-07, |
|
"loss": 1.7316, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 4.2750929368029745e-07, |
|
"loss": 1.6981, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.9033457249070635e-07, |
|
"loss": 1.7133, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 3.531598513011153e-07, |
|
"loss": 1.6907, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 3.1598513011152414e-07, |
|
"loss": 1.7058, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 2.788104089219331e-07, |
|
"loss": 1.7117, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 2.4163568773234205e-07, |
|
"loss": 1.7143, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.0446096654275095e-07, |
|
"loss": 1.7068, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 1.6728624535315985e-07, |
|
"loss": 1.6976, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.3011152416356877e-07, |
|
"loss": 1.7176, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 9.293680297397771e-08, |
|
"loss": 1.717, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 5.576208178438662e-08, |
|
"loss": 1.7004, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 1.858736059479554e-08, |
|
"loss": 1.7247, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 6725, |
|
"total_flos": 9.348094067612314e+16, |
|
"train_loss": 1.74816911605211, |
|
"train_runtime": 8144.5226, |
|
"train_samples_per_second": 158.606, |
|
"train_steps_per_second": 0.826 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 6725, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"total_flos": 9.348094067612314e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|