| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.999602322436968, | |
| "eval_steps": 500, | |
| "global_step": 31430, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.99e-05, | |
| "loss": 0.0788, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.996789608200495e-05, | |
| "loss": 0.1012, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.9871409692106585e-05, | |
| "loss": 0.1124, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.971078950263544e-05, | |
| "loss": 0.1131, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.948644969128254e-05, | |
| "loss": 0.1204, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.919960625509548e-05, | |
| "loss": 0.1196, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.884984942748463e-05, | |
| "loss": 0.1266, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 4.8438593011311706e-05, | |
| "loss": 0.1261, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 4.7966897478696884e-05, | |
| "loss": 0.1257, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.743597915099559e-05, | |
| "loss": 0.1238, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 4.684720706237356e-05, | |
| "loss": 0.1308, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 4.620209942959426e-05, | |
| "loss": 0.131, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 4.55023197371218e-05, | |
| "loss": 0.114, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 4.475278582120946e-05, | |
| "loss": 0.0905, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 4.394941138799278e-05, | |
| "loss": 0.0941, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 4.309717371111503e-05, | |
| "loss": 0.0923, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 4.2198270383757855e-05, | |
| "loss": 0.0946, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 4.125501933186698e-05, | |
| "loss": 0.0961, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.026985283711271e-05, | |
| "loss": 0.0989, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 3.9247397904690526e-05, | |
| "loss": 0.0976, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 3.8186193919443034e-05, | |
| "loss": 0.0958, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 3.709098781174144e-05, | |
| "loss": 0.0979, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 3.596460369690013e-05, | |
| "loss": 0.0966, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 3.4812281650876075e-05, | |
| "loss": 0.0948, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 3.363237553847716e-05, | |
| "loss": 0.0966, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 3.243263434452087e-05, | |
| "loss": 0.0669, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 3.1211344193786635e-05, | |
| "loss": 0.0671, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 2.997403737537923e-05, | |
| "loss": 0.0643, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 2.8723904427691284e-05, | |
| "loss": 0.066, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 2.7464168962769692e-05, | |
| "loss": 0.0672, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 2.6198079353859955e-05, | |
| "loss": 0.0659, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 2.4928900359101e-05, | |
| "loss": 0.0655, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 2.3662440335427397e-05, | |
| "loss": 0.0664, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 2.239689009727201e-05, | |
| "loss": 0.0613, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 2.113805227834954e-05, | |
| "loss": 0.0625, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.988917293722174e-05, | |
| "loss": 0.0614, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 1.865347245336633e-05, | |
| "loss": 0.0607, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 1.7434137223056952e-05, | |
| "loss": 0.055, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 1.6236689558415462e-05, | |
| "loss": 0.045, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 1.5059418860613778e-05, | |
| "loss": 0.0438, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 1.3907781098231621e-05, | |
| "loss": 0.041, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 1.2784745902116096e-05, | |
| "loss": 0.0413, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 1.16953589010944e-05, | |
| "loss": 0.0406, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 1.0638063865042836e-05, | |
| "loss": 0.0405, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 9.617802738406081e-06, | |
| "loss": 0.0431, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 8.637206382217675e-06, | |
| "loss": 0.0427, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 7.698803377323965e-06, | |
| "loss": 0.0409, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 6.805013504146729e-06, | |
| "loss": 0.0395, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 5.9597869507761e-06, | |
| "loss": 0.0363, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 5.161916279430623e-06, | |
| "loss": 0.0402, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 4.4152003958914255e-06, | |
| "loss": 0.0328, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 3.7228978782059888e-06, | |
| "loss": 0.0296, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 3.0840197463041203e-06, | |
| "loss": 0.0313, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 2.5016544981684596e-06, | |
| "loss": 0.03, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 1.9773038297850347e-06, | |
| "loss": 0.029, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 1.512319839852258e-06, | |
| "loss": 0.0268, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 1.1079015432383654e-06, | |
| "loss": 0.0274, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 7.650917791832607e-07, | |
| "loss": 0.031, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 4.852722817364125e-07, | |
| "loss": 0.0259, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 2.680433182677189e-07, | |
| "loss": 0.0286, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 1.145885586943618e-07, | |
| "loss": 0.0267, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 2.5303703819626323e-08, | |
| "loss": 0.0264, | |
| "step": 31000 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 31430, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "total_flos": 2.6646082174451712e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |