TinyLlama-1.1B-Chat-rust-cpp-encodings
/
LORAs
/tinyllama-encoder_4e-5
/checkpoint-1484
/trainer_state.json
{ | |
"best_metric": null, | |
"best_model_checkpoint": null, | |
"epoch": 2.0, | |
"eval_steps": 500, | |
"global_step": 1484, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.01, | |
"learning_rate": 3.999950204782701e-05, | |
"loss": 1.7807, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.03, | |
"learning_rate": 3.999800821610369e-05, | |
"loss": 1.781, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.04, | |
"learning_rate": 3.999551857921571e-05, | |
"loss": 1.8258, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.05, | |
"learning_rate": 3.999203326113507e-05, | |
"loss": 1.5688, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.07, | |
"learning_rate": 3.9987552435413944e-05, | |
"loss": 1.5824, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.08, | |
"learning_rate": 3.9982076325176035e-05, | |
"loss": 1.7159, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.09, | |
"learning_rate": 3.9975605203105434e-05, | |
"loss": 1.7095, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.11, | |
"learning_rate": 3.996813939143307e-05, | |
"loss": 1.6194, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.12, | |
"learning_rate": 3.9959679261920665e-05, | |
"loss": 1.6602, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.13, | |
"learning_rate": 3.995022523584219e-05, | |
"loss": 1.6734, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.15, | |
"learning_rate": 3.9939777783962946e-05, | |
"loss": 1.715, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.16, | |
"learning_rate": 3.992833742651606e-05, | |
"loss": 1.7322, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.18, | |
"learning_rate": 3.9915904733176614e-05, | |
"loss": 1.6645, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.19, | |
"learning_rate": 3.9902480323033285e-05, | |
"loss": 1.6249, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.2, | |
"learning_rate": 3.9888064864557486e-05, | |
"loss": 1.6279, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.22, | |
"learning_rate": 3.987265907557011e-05, | |
"loss": 1.5166, | |
"step": 160 | |
}, | |
{ | |
"epoch": 0.23, | |
"learning_rate": 3.9856263723205755e-05, | |
"loss": 1.4946, | |
"step": 170 | |
}, | |
{ | |
"epoch": 0.24, | |
"learning_rate": 3.983887962387457e-05, | |
"loss": 1.5583, | |
"step": 180 | |
}, | |
{ | |
"epoch": 0.26, | |
"learning_rate": 3.982050764322154e-05, | |
"loss": 1.526, | |
"step": 190 | |
}, | |
{ | |
"epoch": 0.27, | |
"learning_rate": 3.9801148696083455e-05, | |
"loss": 1.6003, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.28, | |
"learning_rate": 3.9780803746443284e-05, | |
"loss": 1.6403, | |
"step": 210 | |
}, | |
{ | |
"epoch": 0.3, | |
"learning_rate": 3.9759473807382214e-05, | |
"loss": 1.5128, | |
"step": 220 | |
}, | |
{ | |
"epoch": 0.31, | |
"learning_rate": 3.97371599410292e-05, | |
"loss": 1.4739, | |
"step": 230 | |
}, | |
{ | |
"epoch": 0.32, | |
"learning_rate": 3.9713863258508064e-05, | |
"loss": 1.5466, | |
"step": 240 | |
}, | |
{ | |
"epoch": 0.34, | |
"learning_rate": 3.968958491988216e-05, | |
"loss": 1.5307, | |
"step": 250 | |
}, | |
{ | |
"epoch": 0.35, | |
"learning_rate": 3.966432613409667e-05, | |
"loss": 1.4508, | |
"step": 260 | |
}, | |
{ | |
"epoch": 0.36, | |
"learning_rate": 3.9638088158918285e-05, | |
"loss": 1.5163, | |
"step": 270 | |
}, | |
{ | |
"epoch": 0.38, | |
"learning_rate": 3.9610872300872704e-05, | |
"loss": 1.5583, | |
"step": 280 | |
}, | |
{ | |
"epoch": 0.39, | |
"learning_rate": 3.958267991517948e-05, | |
"loss": 1.3893, | |
"step": 290 | |
}, | |
{ | |
"epoch": 0.4, | |
"learning_rate": 3.955351240568459e-05, | |
"loss": 1.4718, | |
"step": 300 | |
}, | |
{ | |
"epoch": 0.42, | |
"learning_rate": 3.9523371224790505e-05, | |
"loss": 1.479, | |
"step": 310 | |
}, | |
{ | |
"epoch": 0.43, | |
"learning_rate": 3.949225787338388e-05, | |
"loss": 1.4874, | |
"step": 320 | |
}, | |
{ | |
"epoch": 0.44, | |
"learning_rate": 3.946017390076081e-05, | |
"loss": 1.2723, | |
"step": 330 | |
}, | |
{ | |
"epoch": 0.46, | |
"learning_rate": 3.942712090454968e-05, | |
"loss": 1.3925, | |
"step": 340 | |
}, | |
{ | |
"epoch": 0.47, | |
"learning_rate": 3.939310053063161e-05, | |
"loss": 1.3833, | |
"step": 350 | |
}, | |
{ | |
"epoch": 0.49, | |
"learning_rate": 3.935811447305853e-05, | |
"loss": 1.4989, | |
"step": 360 | |
}, | |
{ | |
"epoch": 0.5, | |
"learning_rate": 3.9322164473968774e-05, | |
"loss": 1.4961, | |
"step": 370 | |
}, | |
{ | |
"epoch": 0.51, | |
"learning_rate": 3.928525232350035e-05, | |
"loss": 1.5295, | |
"step": 380 | |
}, | |
{ | |
"epoch": 0.53, | |
"learning_rate": 3.924737985970182e-05, | |
"loss": 1.4051, | |
"step": 390 | |
}, | |
{ | |
"epoch": 0.54, | |
"learning_rate": 3.920854896844074e-05, | |
"loss": 1.3306, | |
"step": 400 | |
}, | |
{ | |
"epoch": 0.55, | |
"learning_rate": 3.916876158330979e-05, | |
"loss": 1.3522, | |
"step": 410 | |
}, | |
{ | |
"epoch": 0.57, | |
"learning_rate": 3.912801968553045e-05, | |
"loss": 1.4479, | |
"step": 420 | |
}, | |
{ | |
"epoch": 0.58, | |
"learning_rate": 3.908632530385438e-05, | |
"loss": 1.5481, | |
"step": 430 | |
}, | |
{ | |
"epoch": 0.59, | |
"learning_rate": 3.9043680514462366e-05, | |
"loss": 1.5375, | |
"step": 440 | |
}, | |
{ | |
"epoch": 0.61, | |
"learning_rate": 3.900008744086097e-05, | |
"loss": 1.321, | |
"step": 450 | |
}, | |
{ | |
"epoch": 0.62, | |
"learning_rate": 3.895554825377676e-05, | |
"loss": 1.4584, | |
"step": 460 | |
}, | |
{ | |
"epoch": 0.63, | |
"learning_rate": 3.891006517104823e-05, | |
"loss": 1.4188, | |
"step": 470 | |
}, | |
{ | |
"epoch": 0.65, | |
"learning_rate": 3.886364045751538e-05, | |
"loss": 1.2557, | |
"step": 480 | |
}, | |
{ | |
"epoch": 0.66, | |
"learning_rate": 3.881627642490691e-05, | |
"loss": 1.412, | |
"step": 490 | |
}, | |
{ | |
"epoch": 0.67, | |
"learning_rate": 3.876797543172511e-05, | |
"loss": 1.3267, | |
"step": 500 | |
}, | |
{ | |
"epoch": 0.69, | |
"learning_rate": 3.871873988312842e-05, | |
"loss": 1.3768, | |
"step": 510 | |
}, | |
{ | |
"epoch": 0.7, | |
"learning_rate": 3.86685722308117e-05, | |
"loss": 1.5065, | |
"step": 520 | |
}, | |
{ | |
"epoch": 0.71, | |
"learning_rate": 3.861747497288409e-05, | |
"loss": 1.382, | |
"step": 530 | |
}, | |
{ | |
"epoch": 0.73, | |
"learning_rate": 3.856545065374465e-05, | |
"loss": 1.3336, | |
"step": 540 | |
}, | |
{ | |
"epoch": 0.74, | |
"learning_rate": 3.851250186395565e-05, | |
"loss": 1.2626, | |
"step": 550 | |
}, | |
{ | |
"epoch": 0.75, | |
"learning_rate": 3.845863124011361e-05, | |
"loss": 1.4174, | |
"step": 560 | |
}, | |
{ | |
"epoch": 0.77, | |
"learning_rate": 3.840384146471792e-05, | |
"loss": 1.3371, | |
"step": 570 | |
}, | |
{ | |
"epoch": 0.78, | |
"learning_rate": 3.8348135266037364e-05, | |
"loss": 1.3496, | |
"step": 580 | |
}, | |
{ | |
"epoch": 0.8, | |
"learning_rate": 3.829151541797421e-05, | |
"loss": 1.2245, | |
"step": 590 | |
}, | |
{ | |
"epoch": 0.81, | |
"learning_rate": 3.82339847399261e-05, | |
"loss": 1.2381, | |
"step": 600 | |
}, | |
{ | |
"epoch": 0.82, | |
"learning_rate": 3.817554609664564e-05, | |
"loss": 1.2805, | |
"step": 610 | |
}, | |
{ | |
"epoch": 0.84, | |
"learning_rate": 3.811620239809778e-05, | |
"loss": 1.2055, | |
"step": 620 | |
}, | |
{ | |
"epoch": 0.85, | |
"learning_rate": 3.805595659931487e-05, | |
"loss": 1.3493, | |
"step": 630 | |
}, | |
{ | |
"epoch": 0.86, | |
"learning_rate": 3.799481170024957e-05, | |
"loss": 1.4545, | |
"step": 640 | |
}, | |
{ | |
"epoch": 0.88, | |
"learning_rate": 3.7932770745625406e-05, | |
"loss": 1.3633, | |
"step": 650 | |
}, | |
{ | |
"epoch": 0.89, | |
"learning_rate": 3.786983682478519e-05, | |
"loss": 1.2677, | |
"step": 660 | |
}, | |
{ | |
"epoch": 0.9, | |
"learning_rate": 3.78060130715372e-05, | |
"loss": 1.2218, | |
"step": 670 | |
}, | |
{ | |
"epoch": 0.92, | |
"learning_rate": 3.7741302663999085e-05, | |
"loss": 1.4738, | |
"step": 680 | |
}, | |
{ | |
"epoch": 0.93, | |
"learning_rate": 3.7675708824439656e-05, | |
"loss": 1.191, | |
"step": 690 | |
}, | |
{ | |
"epoch": 0.94, | |
"learning_rate": 3.76092348191184e-05, | |
"loss": 1.2747, | |
"step": 700 | |
}, | |
{ | |
"epoch": 0.96, | |
"learning_rate": 3.7541883958122864e-05, | |
"loss": 1.2833, | |
"step": 710 | |
}, | |
{ | |
"epoch": 0.97, | |
"learning_rate": 3.7473659595203806e-05, | |
"loss": 1.3725, | |
"step": 720 | |
}, | |
{ | |
"epoch": 0.98, | |
"learning_rate": 3.74045651276082e-05, | |
"loss": 1.31, | |
"step": 730 | |
}, | |
{ | |
"epoch": 1.0, | |
"learning_rate": 3.7334603995910075e-05, | |
"loss": 1.2406, | |
"step": 740 | |
}, | |
{ | |
"epoch": 1.01, | |
"learning_rate": 3.726377968383919e-05, | |
"loss": 1.1966, | |
"step": 750 | |
}, | |
{ | |
"epoch": 1.02, | |
"learning_rate": 3.719209571810755e-05, | |
"loss": 1.2898, | |
"step": 760 | |
}, | |
{ | |
"epoch": 1.04, | |
"learning_rate": 3.711955566823381e-05, | |
"loss": 1.3516, | |
"step": 770 | |
}, | |
{ | |
"epoch": 1.05, | |
"learning_rate": 3.704616314636551e-05, | |
"loss": 1.4107, | |
"step": 780 | |
}, | |
{ | |
"epoch": 1.06, | |
"learning_rate": 3.697192180709923e-05, | |
"loss": 1.3388, | |
"step": 790 | |
}, | |
{ | |
"epoch": 1.08, | |
"learning_rate": 3.6896835347298583e-05, | |
"loss": 1.1733, | |
"step": 800 | |
}, | |
{ | |
"epoch": 1.09, | |
"learning_rate": 3.682090750591016e-05, | |
"loss": 1.3502, | |
"step": 810 | |
}, | |
{ | |
"epoch": 1.11, | |
"learning_rate": 3.674414206377732e-05, | |
"loss": 1.3909, | |
"step": 820 | |
}, | |
{ | |
"epoch": 1.12, | |
"learning_rate": 3.6666542843451936e-05, | |
"loss": 1.2328, | |
"step": 830 | |
}, | |
{ | |
"epoch": 1.13, | |
"learning_rate": 3.658811370900404e-05, | |
"loss": 1.1578, | |
"step": 840 | |
}, | |
{ | |
"epoch": 1.15, | |
"learning_rate": 3.650885856582943e-05, | |
"loss": 1.3599, | |
"step": 850 | |
}, | |
{ | |
"epoch": 1.16, | |
"learning_rate": 3.6428781360455176e-05, | |
"loss": 1.1904, | |
"step": 860 | |
}, | |
{ | |
"epoch": 1.17, | |
"learning_rate": 3.6347886080343135e-05, | |
"loss": 1.2668, | |
"step": 870 | |
}, | |
{ | |
"epoch": 1.19, | |
"learning_rate": 3.626617675369135e-05, | |
"loss": 1.215, | |
"step": 880 | |
}, | |
{ | |
"epoch": 1.2, | |
"learning_rate": 3.6183657449233484e-05, | |
"loss": 1.2978, | |
"step": 890 | |
}, | |
{ | |
"epoch": 1.21, | |
"learning_rate": 3.610033227603626e-05, | |
"loss": 1.3659, | |
"step": 900 | |
}, | |
{ | |
"epoch": 1.23, | |
"learning_rate": 3.601620538329476e-05, | |
"loss": 1.2659, | |
"step": 910 | |
}, | |
{ | |
"epoch": 1.24, | |
"learning_rate": 3.593128096012589e-05, | |
"loss": 1.2425, | |
"step": 920 | |
}, | |
{ | |
"epoch": 1.25, | |
"learning_rate": 3.584556323535978e-05, | |
"loss": 1.362, | |
"step": 930 | |
}, | |
{ | |
"epoch": 1.27, | |
"learning_rate": 3.5759056477329135e-05, | |
"loss": 1.1444, | |
"step": 940 | |
}, | |
{ | |
"epoch": 1.28, | |
"learning_rate": 3.5671764993656784e-05, | |
"loss": 1.3044, | |
"step": 950 | |
}, | |
{ | |
"epoch": 1.29, | |
"learning_rate": 3.558369313104112e-05, | |
"loss": 1.288, | |
"step": 960 | |
}, | |
{ | |
"epoch": 1.31, | |
"learning_rate": 3.5494845275039676e-05, | |
"loss": 1.338, | |
"step": 970 | |
}, | |
{ | |
"epoch": 1.32, | |
"learning_rate": 3.5405225849850754e-05, | |
"loss": 1.4229, | |
"step": 980 | |
}, | |
{ | |
"epoch": 1.33, | |
"learning_rate": 3.531483931809311e-05, | |
"loss": 1.2542, | |
"step": 990 | |
}, | |
{ | |
"epoch": 1.35, | |
"learning_rate": 3.5223690180583717e-05, | |
"loss": 1.3793, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 1.36, | |
"learning_rate": 3.513178297611369e-05, | |
"loss": 1.2778, | |
"step": 1010 | |
}, | |
{ | |
"epoch": 1.37, | |
"learning_rate": 3.503912228122226e-05, | |
"loss": 1.384, | |
"step": 1020 | |
}, | |
{ | |
"epoch": 1.39, | |
"learning_rate": 3.494571270996885e-05, | |
"loss": 1.1319, | |
"step": 1030 | |
}, | |
{ | |
"epoch": 1.4, | |
"learning_rate": 3.4851558913703367e-05, | |
"loss": 1.312, | |
"step": 1040 | |
}, | |
{ | |
"epoch": 1.42, | |
"learning_rate": 3.475666558083455e-05, | |
"loss": 1.1727, | |
"step": 1050 | |
}, | |
{ | |
"epoch": 1.43, | |
"learning_rate": 3.4661037436596526e-05, | |
"loss": 1.1568, | |
"step": 1060 | |
}, | |
{ | |
"epoch": 1.44, | |
"learning_rate": 3.456467924281353e-05, | |
"loss": 1.2801, | |
"step": 1070 | |
}, | |
{ | |
"epoch": 1.46, | |
"learning_rate": 3.446759579766275e-05, | |
"loss": 1.2211, | |
"step": 1080 | |
}, | |
{ | |
"epoch": 1.47, | |
"learning_rate": 3.436979193543543e-05, | |
"loss": 1.2206, | |
"step": 1090 | |
}, | |
{ | |
"epoch": 1.48, | |
"learning_rate": 3.427127252629616e-05, | |
"loss": 1.2377, | |
"step": 1100 | |
}, | |
{ | |
"epoch": 1.5, | |
"learning_rate": 3.417204247604031e-05, | |
"loss": 1.2548, | |
"step": 1110 | |
}, | |
{ | |
"epoch": 1.51, | |
"learning_rate": 3.407210672584979e-05, | |
"loss": 1.1323, | |
"step": 1120 | |
}, | |
{ | |
"epoch": 1.52, | |
"learning_rate": 3.397147025204701e-05, | |
"loss": 1.1272, | |
"step": 1130 | |
}, | |
{ | |
"epoch": 1.54, | |
"learning_rate": 3.387013806584705e-05, | |
"loss": 1.1798, | |
"step": 1140 | |
}, | |
{ | |
"epoch": 1.55, | |
"learning_rate": 3.376811521310814e-05, | |
"loss": 1.4287, | |
"step": 1150 | |
}, | |
{ | |
"epoch": 1.56, | |
"learning_rate": 3.36654067740804e-05, | |
"loss": 1.1023, | |
"step": 1160 | |
}, | |
{ | |
"epoch": 1.58, | |
"learning_rate": 3.3562017863152867e-05, | |
"loss": 1.3098, | |
"step": 1170 | |
}, | |
{ | |
"epoch": 1.59, | |
"learning_rate": 3.3457953628598826e-05, | |
"loss": 1.3028, | |
"step": 1180 | |
}, | |
{ | |
"epoch": 1.6, | |
"learning_rate": 3.335321925231946e-05, | |
"loss": 1.1454, | |
"step": 1190 | |
}, | |
{ | |
"epoch": 1.62, | |
"learning_rate": 3.3247819949585776e-05, | |
"loss": 1.1599, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 1.63, | |
"learning_rate": 3.314176096877898e-05, | |
"loss": 1.2164, | |
"step": 1210 | |
}, | |
{ | |
"epoch": 1.64, | |
"learning_rate": 3.3035047591129054e-05, | |
"loss": 1.266, | |
"step": 1220 | |
}, | |
{ | |
"epoch": 1.66, | |
"learning_rate": 3.292768513045183e-05, | |
"loss": 1.2517, | |
"step": 1230 | |
}, | |
{ | |
"epoch": 1.67, | |
"learning_rate": 3.281967893288436e-05, | |
"loss": 1.1866, | |
"step": 1240 | |
}, | |
{ | |
"epoch": 1.68, | |
"learning_rate": 3.271103437661873e-05, | |
"loss": 1.1079, | |
"step": 1250 | |
}, | |
{ | |
"epoch": 1.7, | |
"learning_rate": 3.260175687163423e-05, | |
"loss": 1.2, | |
"step": 1260 | |
}, | |
{ | |
"epoch": 1.71, | |
"learning_rate": 3.249185185942795e-05, | |
"loss": 1.1048, | |
"step": 1270 | |
}, | |
{ | |
"epoch": 1.73, | |
"learning_rate": 3.2381324812743875e-05, | |
"loss": 1.3801, | |
"step": 1280 | |
}, | |
{ | |
"epoch": 1.74, | |
"learning_rate": 3.22701812353003e-05, | |
"loss": 1.1595, | |
"step": 1290 | |
}, | |
{ | |
"epoch": 1.75, | |
"learning_rate": 3.215842666151582e-05, | |
"loss": 0.9625, | |
"step": 1300 | |
}, | |
{ | |
"epoch": 1.77, | |
"learning_rate": 3.2046066656233726e-05, | |
"loss": 1.0588, | |
"step": 1310 | |
}, | |
{ | |
"epoch": 1.78, | |
"learning_rate": 3.193310681444488e-05, | |
"loss": 1.1553, | |
"step": 1320 | |
}, | |
{ | |
"epoch": 1.79, | |
"learning_rate": 3.181955276100917e-05, | |
"loss": 1.0569, | |
"step": 1330 | |
}, | |
{ | |
"epoch": 1.81, | |
"learning_rate": 3.170541015037535e-05, | |
"loss": 1.1522, | |
"step": 1340 | |
}, | |
{ | |
"epoch": 1.82, | |
"learning_rate": 3.159068466629951e-05, | |
"loss": 1.0199, | |
"step": 1350 | |
}, | |
{ | |
"epoch": 1.83, | |
"learning_rate": 3.147538202156208e-05, | |
"loss": 1.2719, | |
"step": 1360 | |
}, | |
{ | |
"epoch": 1.85, | |
"learning_rate": 3.135950795768331e-05, | |
"loss": 1.1117, | |
"step": 1370 | |
}, | |
{ | |
"epoch": 1.86, | |
"learning_rate": 3.1243068244637364e-05, | |
"loss": 1.0545, | |
"step": 1380 | |
}, | |
{ | |
"epoch": 1.87, | |
"learning_rate": 3.112606868056508e-05, | |
"loss": 1.1852, | |
"step": 1390 | |
}, | |
{ | |
"epoch": 1.89, | |
"learning_rate": 3.100851509148517e-05, | |
"loss": 1.0035, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 1.9, | |
"learning_rate": 3.089041333100414e-05, | |
"loss": 1.2372, | |
"step": 1410 | |
}, | |
{ | |
"epoch": 1.91, | |
"learning_rate": 3.077176928002482e-05, | |
"loss": 1.2852, | |
"step": 1420 | |
}, | |
{ | |
"epoch": 1.93, | |
"learning_rate": 3.065258884645351e-05, | |
"loss": 1.1826, | |
"step": 1430 | |
}, | |
{ | |
"epoch": 1.94, | |
"learning_rate": 3.05328779649058e-05, | |
"loss": 1.2395, | |
"step": 1440 | |
}, | |
{ | |
"epoch": 1.95, | |
"learning_rate": 3.041264259641104e-05, | |
"loss": 1.3701, | |
"step": 1450 | |
}, | |
{ | |
"epoch": 1.97, | |
"learning_rate": 3.029188872811554e-05, | |
"loss": 1.0938, | |
"step": 1460 | |
}, | |
{ | |
"epoch": 1.98, | |
"learning_rate": 3.017062237298441e-05, | |
"loss": 1.2473, | |
"step": 1470 | |
}, | |
{ | |
"epoch": 1.99, | |
"learning_rate": 3.0048849569502158e-05, | |
"loss": 1.2034, | |
"step": 1480 | |
} | |
], | |
"logging_steps": 10, | |
"max_steps": 4452, | |
"num_train_epochs": 6, | |
"save_steps": 500, | |
"total_flos": 7755360321208320.0, | |
"trial_name": null, | |
"trial_params": null | |
} | |